[csw-devel] SF.net SVN: gar:[11880] csw/mgar/gar/v2
wahwah at users.sourceforge.net
wahwah at users.sourceforge.net
Fri Dec 10 21:55:59 CET 2010
Revision: 11880
http://gar.svn.sourceforge.net/gar/?rev=11880&view=rev
Author: wahwah
Date: 2010-12-10 20:55:58 +0000 (Fri, 10 Dec 2010)
Log Message:
-----------
Revert "checkpkg: File collision detection"
This reverts commit 41873226c2f5805d914b6fb39c05efbdcafcc084.
Modified Paths:
--------------
csw/mgar/gar/v2/bin/checkpkg
csw/mgar/gar/v2/bin/mkpackage
csw/mgar/gar/v2/gar.pkg.mk
csw/mgar/gar/v2/lib/python/README
csw/mgar/gar/v2/lib/python/catalog.py
csw/mgar/gar/v2/lib/python/checkpkg.py
csw/mgar/gar/v2/lib/python/checkpkg_test.py
csw/mgar/gar/v2/lib/python/configuration.py
csw/mgar/gar/v2/lib/python/database.py
csw/mgar/gar/v2/lib/python/dependency_checks.py
csw/mgar/gar/v2/lib/python/dependency_checks_test.py
csw/mgar/gar/v2/lib/python/inspective_package.py
csw/mgar/gar/v2/lib/python/models.py
csw/mgar/gar/v2/lib/python/opencsw.py
csw/mgar/gar/v2/lib/python/package.py
csw/mgar/gar/v2/lib/python/package_checks.py
csw/mgar/gar/v2/lib/python/package_checks_test.py
csw/mgar/gar/v2/lib/python/package_stats.py
csw/mgar/gar/v2/lib/python/package_stats_test.py
csw/mgar/gar/v2/lib/python/pkgdb.py
csw/mgar/gar/v2/lib/python/sharedlib_utils.py
csw/mgar/gar/v2/lib/python/sharedlib_utils_test.py
csw/mgar/gar/v2/lib/python/tag.py
csw/mgar/gar/v2/lib/python/tag_test.py
csw/mgar/gar/v2/lib/python/testdata/neon_stats.py
csw/mgar/gar/v2/lib/python/testdata/tree_stats.py
csw/mgar/gar/v2/tests/run_tests.py
Added Paths:
-----------
csw/mgar/gar/v2/bin/analyze_module_results.py
csw/mgar/gar/v2/bin/checkpkg_collect_stats.py
csw/mgar/gar/v2/bin/checkpkg_run_modules.py
Removed Paths:
-------------
csw/mgar/gar/v2/lib/python/checkpkg2.py
csw/mgar/gar/v2/lib/python/checkpkg_lib.py
csw/mgar/gar/v2/lib/python/checkpkg_lib_test.py
csw/mgar/gar/v2/lib/python/common_constants.py
csw/mgar/gar/v2/lib/python/ldd_emul.py
csw/mgar/gar/v2/lib/python/ldd_emul_test.py
csw/mgar/gar/v2/lib/python/models_test.py
csw/mgar/gar/v2/lib/python/mute_progressbar.py
csw/mgar/gar/v2/lib/python/pkgdb_test.py
csw/mgar/gar/v2/lib/python/shell.py
csw/mgar/gar/v2/lib/python/system_pkgmap.py
csw/mgar/gar/v2/lib/python/system_pkgmap_test.py
csw/mgar/gar/v2/lib/python/test_base.py
Property Changed:
----------------
csw/mgar/gar/v2/bin/checkpkg
csw/mgar/gar/v2/lib/python/package_stats_test.py
Added: csw/mgar/gar/v2/bin/analyze_module_results.py
===================================================================
--- csw/mgar/gar/v2/bin/analyze_module_results.py (rev 0)
+++ csw/mgar/gar/v2/bin/analyze_module_results.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -0,0 +1,73 @@
+#!/opt/csw/bin/python2.6
+# $Id$
+
+import itertools
+import operator
+import optparse
+import os
+import pprint
+import progressbar
+import sys
+import textwrap
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+ "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+import overrides
+import package_stats
+
+BEFORE_OVERRIDES = """If any of the reported errors were false positives, you
+can override them pasting the lines below to the GAR recipe."""
+
+AFTER_OVERRIDES = """Please note that checkpkg isn't suggesting you should
+simply add these overrides do the Makefile. It only informs what the overrides
+could look like. You need to understand what are the reported issues about and
+use your best judgement to decide whether to fix the underlying problems or
+override them. For more information, scroll up and read the detailed
+messages."""
+
+UNAPPLIED_OVERRIDES = """WARNING: Some overrides did not match any errors.
+They can be removed, as they don't take any effect anyway. If you're getting
+errors at the same time, maybe you didn't specify the overrides correctly."""
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option("-c", "--catalog_file", dest="catalog",
+ help="Optional catalog file")
+ parser.add_option("-q", "--quiet", dest="quiet",
+ default=False, action="store_true",
+ help=("Display less messages"))
+ options, args = parser.parse_args()
+ filenames = args
+
+ # This might be bottleneck. Perhaps a list of md5 sums can be given to this
+ # script instead.
+
+ # It might be a good idea to store the error tags in the database and
+ # eliminate the need to access the directory with the error tag files.
+
+ pkgstats = package_stats.StatsListFromCatalog(filenames, options.catalog)
+ overrides_list = [pkg.GetSavedOverrides() for pkg in pkgstats]
+ override_list = reduce(operator.add, overrides_list)
+ error_tags = reduce(operator.add, [stat.GetSavedErrorTags() for stat in pkgstats])
+ (tags_after_overrides,
+ unapplied_overrides) = overrides.ApplyOverrides(error_tags, override_list)
+ if not options.quiet:
+ if tags_after_overrides:
+ print textwrap.fill(BEFORE_OVERRIDES, 80)
+ for checkpkg_tag in tags_after_overrides:
+ print checkpkg_tag.ToGarSyntax()
+ print textwrap.fill(AFTER_OVERRIDES, 80)
+ if unapplied_overrides:
+ print textwrap.fill(UNAPPLIED_OVERRIDES, 80)
+ for override in unapplied_overrides:
+ print "* Unused %s" % override
+ exit_code = bool(tags_after_overrides)
+ sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+ main()
Property changes on: csw/mgar/gar/v2/bin/analyze_module_results.py
___________________________________________________________________
Added: svn:executable
+ *
Modified: csw/mgar/gar/v2/bin/checkpkg
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/bin/checkpkg 2010-12-10 20:55:58 UTC (rev 11880)
@@ -1 +1,322 @@
-link ../lib/python/checkpkg2.py
\ No newline at end of file
+#!/bin/ksh -p
+#
+# $Id$
+#
+# checkpkg 1.51
+#
+# diff to 1.46a
+# - check multiple package files
+# - checkpkg.d plugin support
+# - getopts support for command line options
+# - colors
+# - modular architecture + unit tests
+# - reliable shared library checking
+#
+# This script examines a package that has been put together
+# for submittal to the CSW archive at opencsw.org
+#
+# It examines it for compliance with the packaging standards at
+# http://www.opencsw.org/standards/
+# It DOES NOT CATCH EVERYTHING. However, the package will be
+# tested with this script before acceptance, so you may as well
+# save yourself some time, and run the script yourself!
+#
+# Be sure to occasionally do a "pkg-get update cswutils" so that
+# you know you are tracking the most current version.
+#
+# TODO:
+# - add message handlig to the CheckInterface class.
+#
+
+PATH=$PATH:/usr/sbin
+readonly NAME_MAX_LENGTH=${NAME_MAX_LENGTH:-20}
+
+command_basename=`basename $0`
+command_basedir="${0%/${command_basename}}"
+libshdir="${command_basedir}/../lib/sh"
+readonly command_basename command_basedir libshdir
+. "${libshdir}/libcheckpkg.sh"
+
+LOCAL_ARCH=`uname -p`
+CHECKPKG_TMPDIR=${CHECKPKG_TMPDIR:-/var/tmp}
+readonly CHECKPKG_TMPDIR
+
+# Colors only when running interactively
+if [[ -t 1 ]]; then
+ GREEN="\\033[0;32;40m"
+ RED="\\033[1;31;40m"
+ BOLD="\\033[1m"
+ COLOR_RESET="\\033[00m"
+else
+ GREEN=""
+ RED=""
+ BOLD=""
+ COLOR_RESET=""
+fi
+readonly GREEN RED BOLD COLOR_RESET
+
+readonly selfpath="$0"
+readonly selfargs="$@"
+
+cleanup(){
+ if [[ -d "$EXTRACTDIR" ]] ; then
+ rm -rf $EXTRACTDIR
+ fi
+ cleantmparchives
+}
+
+cleantmparchives() {
+ for TMPARCHIVE in $tmparchives; do
+ if [[ "$TMPARCHIVE" != "" ]]; then
+ [ -f "$TMPARCHIVE" ] && rm $TMPARCHIVE
+ fi
+ done
+}
+
+cleanupset(){
+ if [ "`echo $SETINF*`" != "$SETINF*" ]; then
+ rm $SETINF*
+ fi
+}
+
+# Print error message, and quit program.
+errmsg(){
+ print ERROR: $* >/dev/fd/2
+ cleanup
+ cleanupset
+ print "To run checkpkg in the debug mode, add the '-d' flag, for example:"
+ # selfargs can be very, very long. Find a way to truncate it.
+ # print "${selfpath} -d ${selfargs}"
+ print "After you modify any overrides, you need to do gmake remerge repackage"
+ print "or gmake platforms-remerge platforms-repackage."
+ exit 1
+}
+
+debugmsg() {
+ if [[ "${DEBUG}" != "" ]]; then
+ print "DEBUG: $*" > /dev/fd/2
+ fi
+}
+
+# TODO: Options to add:
+# - Use an pre-cached (from a catalog file?) list of md5 sums
+# - Don't use the data from /var/sadm/install/contents
+display_help=0
+SKIP_STATS_COLLECTION=0
+MD5_SUMS_CATALOG_FILE=""
+INSTALL_CONTENTS_FILES="/var/sadm/install/contents"
+ANALYZE=1
+PROFILE=0
+QUIET=0
+
+while getopts hsdNM:o:c:Apq opt; do
+ case "${opt}" in
+ c)
+ INSTALL_CONTENTS_FILES="${INSTALL_CONTENTS_FILES} ${OPTARG}"
+ ;;
+ d)
+ DEBUG=1
+ ;;
+ h)
+ display_help=1
+ ;;
+ N)
+ SKIP_STATS_COLLECTION=1
+ ;;
+ M)
+ MD5_SUMS_CATALOG_FILE="${OPTARG}"
+ ;;
+ A)
+ ANALYZE=0
+ ;;
+ p)
+ PROFILE=1
+ ;;
+ q) QUIET=1
+ ;;
+ *)
+ echo "Unknown option '${opt}'"
+ ;;
+ esac
+done
+shift $(( $OPTIND -1 ))
+
+readonly INSTALL_CONTENTS_FILES
+readonly MD5_SUMS_CATALOG_FILE
+readonly SKIP_STATS_COLLECTION
+readonly ANALYZE
+readonly PROFILE
+readonly QUIET
+
+if [[ "${display_help}" -eq 1 ]] ; then
+ print 'Usage: checkpkg [options] pkg1 [pkg2 ....]'
+ print 'Options:'
+ print ' -c <file> use an additional install/contents file'
+ print ' -d display debug messages'
+ print ' -N skip statistics collection'
+ print ' -M <file> use package md5sums from a catalog file'
+ print ' -A Do not analyze the results.'
+ print ' -p Enable profiling'
+ print ' -q Display less messages'
+ print ''
+ print 'Error tags are saved to the sqlite database.'
+ exit 0
+fi
+
+# a unique filename for the list of package deps and libs we see in a 'set'
+SETINF=$CHECKPKG_TMPDIR/checkpkg.$$.`date +%Y%m%d%H%M%S`
+SETLIBS=$SETINF.libs
+SETDEPS=$SETINF.deps
+pkgnames=""
+tmparchives=""
+
+EXTRACTDIR=$CHECKPKG_TMPDIR/dissect.$$
+
+if [ -d $EXTRACTDIR ] ; then
+ errmsg ERROR: $EXTRACTDIR already exists
+fi
+
+for f in "$@"
+do
+
+ if [[ ! -f $f ]] ; then
+ errmsg ERROR: $f does not exist
+ fi
+
+
+[ -d ${EXTRACTDIR} ] || mkdir ${EXTRACTDIR}
+
+########################################
+# Check for some common errors
+#########################################
+
+# TODO: To be ported.
+#
+# # find all executables and dynamic libs,and list their filenames.
+# if [[ "$basedir" != "" ]] ; then
+# print
+# if [[ -f $EXTRACTDIR/elflist ]] ; then
+# print "Checking relocation ability..."
+# xargs strings < $EXTRACTDIR/elflist| grep /opt/csw
+# if [[ $? -eq 0 ]] ; then
+# errmsg package build as relocatable, but binaries have hardcoded /opt/csw paths in them
+# else
+# print trivial check passed
+# fi
+# else
+# echo No relocation check done for non-binary relocatable package.
+# fi
+# fi
+
+tmparchives="$tmparchives $TMPARCHIVE"
+done
+
+# Plugin section. This is here for support for other programming languages
+# than Python. As of 2010-03-16 there are no checks in there. If this keeps
+# empty, if no checks in other languages get written, it could be removed.
+#
+# Plugins should live in checkpkg.d subdirectory in the same directory in which
+# checkpkg is. Each plugin file name should be an executable and begin with
+# "checkpkg-".
+
+test_suite_ok=1
+checkpkg_module_dir="${command_basedir}/../lib/checkpkg.d"
+checkpkg_module_tag="checkpkg-"
+checkpkg_stats_basedir="${HOME}/.checkpkg/stats"
+
+# Cleaning up old *.pyc files which can cause grief. This is because of the
+# move of Python libraries.
+for pyc_file in ${checkpkg_module_dir}/opencsw.pyc \
+ ${checkpkg_module_dir}/checkpkg.pyc; do
+ if [ -f "${pyc_file}" ]; then
+ echo "Removing old pyc file: '${pyc_file}'"
+ rm "${pyc_file}"
+ fi
+done
+
+if [[ "${DEBUG}" != "" ]]; then
+ extra_options="--debug"
+fi
+if [[ "${PROFILE}" -eq 1 ]]; then
+ extra_options="${extra_options} --profile"
+fi
+if [[ "${QUIET}" -eq 1 ]]; then
+ quiet_options="--quiet"
+else
+ quiet_options=""
+fi
+
+if [[ -n "${MD5_SUMS_CATALOG_FILE}" ]]; then
+ catalog_options="--catalog=${MD5_SUMS_CATALOG_FILE}"
+else
+ catalog_options=""
+fi
+
+# /var/sadm/install/contents cache update
+# TODO: Either remove this section or stop the stats collection phase from
+# updating the cache.
+${command_basedir}/update_contents_cache.py ${extra_options}
+if [[ $? -ne 0 ]]; then
+ errmsg "Updating the contents cache has failed."
+fi
+if [[ "${SKIP_STATS_COLLECTION}" -eq 0 ]]; then
+ # Collects package stats to be analyzed later
+ ${command_basedir}/checkpkg_collect_stats.py \
+ ${catalog_options} \
+ ${extra_options} \
+ "$@"
+ if [[ "$?" -ne 0 ]]; then
+ errmsg "Stats collection phase has failed."
+ fi
+fi
+
+# TODO: A performance problem. The following line means that the md5sums are
+# calculated once more.
+if [ "${MD5_SUMS_CATALOG_FILE}" ]; then
+ debugmsg "Reading md5sums from ${MD5_SUMS_CATALOG_FILE}"
+ md5sums=`cat "${MD5_SUMS_CATALOG_FILE}" \
+ | awk '{print $5}' \
+ | ggrep -E '[0-9abcdef]{32}'`
+else
+ debugmsg "Calculating md5 sums of all the package files."
+ md5sums=`gmd5sum "$@" | awk '{print $1}'`
+fi
+debugmsg "All md5 sums: ${md5sums}"
+
+# Running the checks.
+${command_basedir}/checkpkg_run_modules.py \
+ ${extra_options} \
+ -b "${checkpkg_stats_basedir}" \
+ ${quiet_options} \
+ ${md5sums}
+if [[ "$?" -ne 0 ]]; then
+ print "There was a problem analyzing package stats."
+ test_suite_ok=0
+fi
+
+if [[ ${test_suite_ok} -ne 1 ]]; then
+ errmsg "One or more tests have finished with an error."
+fi
+
+if [[ "${ANALYZE}" -eq 1 ]]; then
+# Collecting errors and applying the overrides.
+# This has to use the original files.
+ ${command_basedir}/analyze_module_results.py \
+ ${catalog_options} \
+ ${quiet_options} \
+ "$@"
+ if [[ "$?" -ne 0 ]]; then
+ errmsg "${RED}Checkpkg has reported errors.${COLOR_RESET}"
+ else
+ print "${GREEN}Checkpkg reports no errors.${COLOR_RESET}"
+ fi
+else
+ echo "Skipping result analysis."
+fi
+
+print ""
+
+# Cleaning up after all packages
+cleanup
+
+cleanupset
Property changes on: csw/mgar/gar/v2/bin/checkpkg
___________________________________________________________________
Deleted: svn:special
- *
Added: svn:executable
+ *
Added: csw/mgar/gar/v2/bin/checkpkg_collect_stats.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_collect_stats.py (rev 0)
+++ csw/mgar/gar/v2/bin/checkpkg_collect_stats.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -0,0 +1,69 @@
+#!/opt/csw/bin/python2.6
+#
+# $Id$
+#
+# Collects statistics about a package and saves to a directory, for later use
+# by checkpkg modules.
+
+import itertools
+import logging
+import optparse
+import os
+import os.path
+import subprocess
+import sys
+import progressbar
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+ "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+import opencsw
+import package_stats
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option("-d", "--debug", dest="debug",
+ default=False, action="store_true",
+ help="Turn on debugging messages")
+ parser.add_option("-c", "--catalog", dest="catalog",
+ help="Catalog file")
+ parser.add_option("-p", "--profile", dest="profile",
+ default=False, action="store_true",
+ help="A disabled option")
+ options, args = parser.parse_args()
+ if options.debug:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.INFO)
+ logging.debug("Collecting statistics about given package files.")
+ args_display = args
+ if len(args_display) > 5:
+ args_display = args_display[:5] + ["...more..."]
+ file_list = args
+ logging.debug("Processing: %s, please be patient", args_display)
+ stats_list = package_stats.StatsListFromCatalog(
+ file_list, options.catalog, options.debug)
+ # Reversing the item order in the list, so that the pop() method can be used
+ # to get packages, and the order of processing still matches the one in the
+ # catalog file.
+ stats_list.reverse()
+ total_packages = len(stats_list)
+ counter = itertools.count(1)
+ logging.info("Juicing the srv4 package stream files...")
+ bar = progressbar.ProgressBar()
+ bar.maxval = total_packages
+ bar.start()
+ while stats_list:
+ # This way objects will get garbage collected as soon as they are removed
+ # from the list by pop(). The destructor (__del__()) of the srv4 class
+ # removes the temporary directory from the disk. This allows to process
+ # the whole catalog.
+ stats_list.pop().CollectStats()
+ bar.update(counter.next())
+ bar.finish()
+
+if __name__ == '__main__':
+ main()
Property changes on: csw/mgar/gar/v2/bin/checkpkg_collect_stats.py
___________________________________________________________________
Added: svn:executable
+ *
Added: csw/mgar/gar/v2/bin/checkpkg_run_modules.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_run_modules.py (rev 0)
+++ csw/mgar/gar/v2/bin/checkpkg_run_modules.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -0,0 +1,56 @@
+#!/opt/csw/bin/python2.6
+# $Id$
+
+"""This script runs all the checks written in Python."""
+
+import datetime
+import logging
+import os
+import os.path
+import sys
+import re
+import cProfile
+
+CHECKPKG_MODULE_NAME = "Second checkpkg API version"
+
+# The following bit of code sets the correct path to Python libraries
+# distributed with GAR.
+path_list = [os.path.dirname(__file__),
+ "..", "lib", "python"]
+sys.path.append(os.path.join(*path_list))
+import checkpkg
+import opencsw
+
+
+def main():
+ options, args = checkpkg.GetOptions()
+ if options.debug:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.INFO)
+ md5sums = args
+ # CheckpkgManager2 class abstracts away things such as the collection of
+ # results.
+ check_manager = checkpkg.CheckpkgManager2(CHECKPKG_MODULE_NAME,
+ options.stats_basedir,
+ md5sums,
+ options.debug)
+ # Running the checks, reporting and exiting.
+ exit_code, screen_report, tags_report = check_manager.Run()
+ screen_report = unicode(screen_report)
+ if not options.quiet and screen_report:
+ sys.stdout.write(screen_report)
+ else:
+ logging.debug("No screen report.")
+ sys.exit(exit_code)
+
+
+if __name__ == '__main__':
+ if "--profile" in sys.argv:
+ t_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M")
+ home = os.environ["HOME"]
+ cprof_file_name = os.path.join(
+ home, ".checkpkg", "run-modules-%s.cprof" % t_str)
+ cProfile.run("main()", sort=1, filename=cprof_file_name)
+ else:
+ main()
Property changes on: csw/mgar/gar/v2/bin/checkpkg_run_modules.py
___________________________________________________________________
Added: svn:executable
+ *
Modified: csw/mgar/gar/v2/bin/mkpackage
===================================================================
--- csw/mgar/gar/v2/bin/mkpackage 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/bin/mkpackage 2010-12-10 20:55:58 UTC (rev 11880)
@@ -25,9 +25,6 @@
# Tool Version/Revision Information
$TOOLVERSION = "1.4";
($REVISION) = q/$Revision$/ =~ /(\d+)/;
-# This shows a warning:
-# "Use of uninitialized value $REVISION in sprintf at
-# /home/maciej/src/opencsw/pkg/nspr/trunk/gar/bin/mkpackage line 31."
$VERSION = sprintf '%s (r%d)', $TOOLVERSION, $REVISION;
# Discover network support
Modified: csw/mgar/gar/v2/gar.pkg.mk
===================================================================
--- csw/mgar/gar/v2/gar.pkg.mk 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/gar.pkg.mk 2010-12-10 20:55:58 UTC (rev 11880)
@@ -52,7 +52,6 @@
GARPKG_v1 = CSWgar-v1
GARPKG_v2 = CSWgar-v2
RUNTIME_DEP_PKGS_$(SRCPACKAGE) ?= $(or $(GARPKG_$(GARSYSTEMVERSION)),$(error GAR version $(GARSYSTEMVERSION) unknown))
-CATALOG_RELEASE ?= current
_PKG_SPECS = $(filter-out $(NOPACKAGE),$(SPKG_SPECS))
@@ -843,11 +842,7 @@
# pkgcheck - check if the package is compliant
#
pkgcheck: $(foreach SPEC,$(_PKG_SPECS),package-$(SPEC))
- $(_DBG)( LC_ALL=C $(GARBIN)/checkpkg \
- --architecture "$(GARCH)" \
- --os-releases "$(SPKG_OSNAME)" \
- --catalog-release "$(CATALOG_RELEASE)" \
- $(foreach SPEC,$(_PKG_SPECS),$(SPKG_EXPORT)/`$(call _PKG_ENV,$(SPEC)) mkpackage --tmpdir $(SPKG_TMPDIR) -qs $(WORKDIR)/$(SPEC).gspec -D pkgfile`.gz ) || exit 2;)
+ $(_DBG)( LC_ALL=C $(GARBIN)/checkpkg $(foreach SPEC,$(_PKG_SPECS),$(SPKG_EXPORT)/`$(call _PKG_ENV,$(SPEC)) mkpackage --tmpdir $(SPKG_TMPDIR) -qs $(WORKDIR)/$(SPEC).gspec -D pkgfile`.gz ) || exit 2;)
@$(MAKECOOKIE)
pkgcheck-p:
Modified: csw/mgar/gar/v2/lib/python/README
===================================================================
--- csw/mgar/gar/v2/lib/python/README 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/README 2010-12-10 20:55:58 UTC (rev 11880)
@@ -21,11 +21,9 @@
tests as means to run various bits of code. Here's the dependency list
for Ubuntu.
-sudo aptitude install \
- python-cheetah \
- python-hachoir-parser \
- python-magic \
- python-mox \
- python-progressbar \
- python-sqlobject \
+ python-cheetah
+ python-hachoir-parser
+ python-magic
+ python-mox
+ python-progressbar
python-yaml
Modified: csw/mgar/gar/v2/lib/python/catalog.py
===================================================================
--- csw/mgar/gar/v2/lib/python/catalog.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/catalog.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -123,7 +123,6 @@
def _GetCatalogData(self, fd):
catalog_data = []
for line in fd:
- if line.startswith("#"): continue
try:
parsed = self._ParseCatalogLine(line)
catalog_data.append(parsed)
Modified: csw/mgar/gar/v2/lib/python/checkpkg.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/checkpkg.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -3,22 +3,44 @@
# This is the checkpkg library, common for all checkpkg tests written in
# Python.
+import copy
+import cPickle
+import errno
import itertools
import logging
+import operator
import optparse
+import os
import os.path
import re
import pprint
import progressbar
+import socket
+import sqlite3
import sqlobject
+import time
+from sqlobject import sqlbuilder
import subprocess
+import textwrap
+from Cheetah import Template
import database
+import package
import inspective_package
+import package_checks
+import package_stats
import models as m
-import common_constants
+import configuration as c
+import tag
+DEBUG_BREAK_PKGMAP_AFTER = False
+SYSTEM_PKGMAP = "/var/sadm/install/contents"
+NEEDED_SONAMES = "needed sonames"
+RUNPATH = "runpath"
+SONAME = "soname"
+CONFIG_MTIME = "mtime"
+CONFIG_DB_SCHEMA = "db_schema_version"
DO_NOT_REPORT_SURPLUS = set([u"CSWcommon", u"CSWcswclassutils", u"CSWisaexec"])
DO_NOT_REPORT_MISSING = set([])
DO_NOT_REPORT_MISSING_RE = [r"\*?SUNW.*"]
@@ -26,11 +48,17 @@
PSTAMP_RE = r"(?P<username>\w+)@(?P<hostname>[\w\.-]+)-(?P<timestamp>\d+)"
DESCRIPTION_RE = r"^([\S]+) - (.*)$"
BAD_CONTENT_REGEXES = (
- # Slightly obfuscating these by using concatenation of strings.
+ # Slightly obfuscating these by using the default concatenation of
+ # strings.
r'/export' r'/medusa',
r'/opt' r'/build',
)
+SYSTEM_SYMLINKS = (
+ ("/opt/csw/bdb4", ["/opt/csw/bdb42"]),
+ ("/64", ["/amd64", "/sparcv9"]),
+ ("/opt/csw/lib/i386", ["/opt/csw/lib"]),
+)
INSTALL_CONTENTS_AVG_LINE_LENGTH = 102.09710677919261
SYS_DEFAULT_RUNPATH = [
"/usr/lib/$ISALIST",
@@ -39,7 +67,8 @@
"/lib",
]
-MD5_RE = re.compile(r"^[0123456789abcdef]{32}$")
+CONTENT_PKG_RE = r"^\*?(CSW|SUNW)[0-9a-zA-Z\-]?[0-9a-z\-]+$"
+MD5_RE = r"^[0123456789abcdef]{32}$"
REPORT_TMPL = u"""#if $missing_deps or $surplus_deps or $orphan_sonames
Dependency issues of $pkgname:
@@ -61,6 +90,57 @@
#end if
"""
+SCREEN_ERROR_REPORT_TMPL = u"""#if $errors
+#if $debug
+ERROR: One or more errors have been found by $name.
+#end if
+#for $pkgname in $errors
+$pkgname:
+#for $error in $errors[$pkgname]
+#if $debug
+ $repr($error)
+#elif $error.msg
+$textwrap.fill($error.msg, 78, initial_indent="# ", subsequent_indent="# ")
+# -> $repr($error)
+
+#end if
+#end for
+#end for
+#else
+#if $debug
+OK: $repr($name) module found no problems.
+#end if
+#end if
+#if $messages
+#for $msg in $messages
+$textwrap.fill($msg, 78, initial_indent=" * ", subsequent_indent=" ")
+#end for
+#end if
+#if $gar_lines
+
+# Checkpkg suggests adding the following lines to the GAR recipe:
+# This is a summary; see above for details.
+#for $line in $gar_lines
+$line
+#end for
+#end if
+"""
+
+# http://www.cheetahtemplate.org/docs/users_guide_html_multipage/language.directives.closures.html
+TAG_REPORT_TMPL = u"""#if $errors
+# Tags reported by $name module
+#for $pkgname in $errors
+#for $tag in $errors[$pkgname]
+#if $tag.msg
+$textwrap.fill($tag.msg, 70, initial_indent="# ", subsequent_indent="# ")
+#end if
+$pkgname: ${tag.tag_name}#if $tag.tag_info# $tag.tag_info#end if#
+#end for
+#end for
+#end if
+"""
+
+
class Error(Exception):
pass
@@ -77,12 +157,11 @@
pass
-class SetupError(Error):
- pass
-
-
def GetOptions():
parser = optparse.OptionParser()
+ parser.add_option("-b", "--stats-basedir", dest="stats_basedir",
+ help=("The base directory with package statistics "
+ "in yaml format, e.g. ~/.checkpkg/stats"))
parser.add_option("-d", "--debug", dest="debug",
default=False, action="store_true",
help="Turn on debugging messages")
@@ -93,6 +172,8 @@
default=False, action="store_true",
help=("Print less messages"))
(options, args) = parser.parse_args()
+ if not options.stats_basedir:
+ raise ConfigurationError("ERROR: the -b option is missing.")
# Using set() to make the arguments unique.
return options, set(args)
@@ -114,9 +195,859 @@
return m.group("username") if m else None
+class SystemPkgmap(database.DatabaseClient):
+ """A class to hold and manipulate the /var/sadm/install/contents file."""
+
+ STOP_PKGS = ["SUNWbcp", "SUNWowbcp", "SUNWucb"]
+
+ def __init__(self, system_pkgmap_files=None, debug=False):
+ """There is no need to re-parse it each time.
+
+ Read it slowly the first time and cache it for later."""
+ super(SystemPkgmap, self).__init__(debug=debug)
+ self.cache = {}
+ self.pkgs_by_path_cache = {}
+ self.file_mtime = None
+ self.cache_mtime = None
+ self.initialized = False
+ if not system_pkgmap_files:
+ self.system_pkgmap_files = [SYSTEM_PKGMAP]
+ else:
+ self.system_pkgmap_files = system_pkgmap_files
+ self.csw_pkg_re = re.compile(CONTENT_PKG_RE)
+ self.digits_re = re.compile(r"^[0-9]+$")
+
+ def _LazyInitializeDatabase(self):
+ if not self.initialized:
+ self.InitializeDatabase()
+
+ def InitializeRawDb(self):
+ """It's necessary for low level operations."""
+ if True:
+ logging.debug("Connecting to sqlite")
+ self.sqlite_conn = sqlite3.connect(self.GetDatabasePath())
+
+ def InitializeDatabase(self):
+ """Established the connection to the database.
+
+ TODO: Refactor this class to first create CswFile with no primary key and
+ no indexes.
+ """
+ need_to_create_tables = False
+ db_path = self.GetDatabasePath()
+ checkpkg_dir = os.path.join(os.environ["HOME"], self.CHECKPKG_DIR)
+ if not os.path.exists(db_path):
+ logging.info("Building the cache database %s.", self.system_pkgmap_files)
+ logging.info("The cache will be kept in %s.", db_path)
+ if not os.path.exists(checkpkg_dir):
+ logging.debug("Creating %s", checkpkg_dir)
+ os.mkdir(checkpkg_dir)
+ need_to_create_tables = True
+ self.InitializeRawDb()
+ self.InitializeSqlobject()
+ if not self.IsDatabaseGoodSchema():
+ logging.info("Old database schema detected.")
+ self.PurgeDatabase(drop_tables=True)
+ need_to_create_tables = True
+ if need_to_create_tables:
+ self.CreateTables()
+ self.PerformInitialDataImport()
+ if not self.IsDatabaseUpToDate():
+ logging.debug("Rebuilding the package cache, can take a few minutes.")
+ self.ClearTablesForUpdates()
+ self.RefreshDatabase()
+ self.initialized = True
+
+ def RefreshDatabase(self):
+ for pkgmap_path in self.system_pkgmap_files:
+ self._ProcessSystemPkgmap(pkgmap_path)
+ self.PopulatePackagesTable()
+ self.SetDatabaseMtime()
+
+ def PerformInitialDataImport(self):
+ """Imports data into the database.
+
+ Original bit of code from checkpkg:
+ egrep -v 'SUNWbcp|SUNWowbcp|SUNWucb' /var/sadm/install/contents |
+ fgrep -f $EXTRACTDIR/liblist >$EXTRACTDIR/shortcatalog
+ """
+ for pkgmap_path in self.system_pkgmap_files:
+ self._ProcessSystemPkgmap(pkgmap_path)
+ self.SetDatabaseSchemaVersion()
+ self.PopulatePackagesTable()
+ self.SetDatabaseMtime()
+
+ def _ProcessSystemPkgmap(self, pkgmap_path):
+ """Update the database using data from pkgmap.
+
+ The strategy to only update the necessary bits:
+ - for each new row
+ - look it up in the db
+ - if doesn't exist, create it
+ - if exists, check the
+ TODO: continue this description
+ """
+ INSERT_SQL = """
+ INSERT INTO csw_file (basename, path, line)
+ VALUES (?, ?, ?);
+ """
+ sqlite_cursor = self.sqlite_conn.cursor()
+ break_after = DEBUG_BREAK_PKGMAP_AFTER
+ contents_length = os.stat(pkgmap_path).st_size
+ if break_after:
+ estimated_lines = break_after
+ else:
+ estimated_lines = contents_length / INSTALL_CONTENTS_AVG_LINE_LENGTH
+ # The progressbar library doesn't like handling larger numbers
+ # It displays up to 99% if we feed it a maxval in the range of hundreds of
+ # thousands.
+ progressbar_divisor = int(estimated_lines / 1000)
+ if progressbar_divisor < 1:
+ progressbar_divisor = 1
+ update_period = 1L
+ # To help delete old records
+ system_pkgmap_fd = open(pkgmap_path, "r")
+ stop_re = re.compile("(%s)" % "|".join(self.STOP_PKGS))
+ # Creating a data structure:
+ # soname - {<path1>: <line1>, <path2>: <line2>, ...}
+ logging.debug("Building database cache db of the %s file",
+ pkgmap_path)
+ logging.info("Processing %s, it can take a few minutes", pkgmap_path)
+ count = itertools.count()
+ bar = progressbar.ProgressBar()
+ bar.maxval = estimated_lines / progressbar_divisor
+ bar.start()
+ # I tried dropping the csw_file_basename_idx index to speed up operation,
+ # but after I measured the times, it turned out that it doesn't make any
+ # difference to the total runnng time.
+ # logging.info("Dropping csw_file_basename_idx")
+ # sqlite_cursor.execute("DROP INDEX csw_file_basename_idx;")
+ for line in system_pkgmap_fd:
+ i = count.next()
+ if not i % update_period and (i / progressbar_divisor) <= bar.maxval:
+ bar.update(i / progressbar_divisor)
+ if stop_re.search(line):
+ continue
+ if line.startswith("#"):
+ continue
+ fields = re.split(c.WS_RE, line)
+ pkgmap_entry_path = fields[0].split("=")[0]
+ pkgmap_entry_dir, pkgmap_entry_base_name = os.path.split(pkgmap_entry_path)
+ # The following SQLObject-driven inserts are 60 times slower than the raw
+ # sqlite API.
+ # pkgmap_entry = m.CswFile(basename=pkgmap_entry_base_name,
+ # path=pkgmap_entry_dir, line=line.strip())
+ # This page has some hints:
+ # http://www.mail-archive.com/sqlobject-discuss@lists.sourceforge.net/msg04641.html
+ # "These are simple straightforward INSERTs without any additional
+ # high-level burden - no SELECT, no caching, nothing. Fire and forget."
+ # sql = self.sqo_conn.sqlrepr(
+ # sqlobject.sqlbuilder.Insert(m.CswFile.sqlmeta.table, values=record))
+ # self.sqo_conn.query(sql)
+ # ...unfortunately, it isn't any faster in practice.
+ # The fastest way is:
+ sqlite_cursor.execute(INSERT_SQL, [pkgmap_entry_base_name,
+ pkgmap_entry_dir,
+ line.strip()])
+ if break_after and i > break_after:
+ logging.warning("Breaking after %s for debugging purposes.", break_after)
+ break
+ bar.finish()
+ self.sqlite_conn.commit()
+ logging.debug("All lines of %s were processed.", pkgmap_path)
+
+ def _ParsePkginfoLine(self, line):
+ fields = re.split(c.WS_RE, line)
+ pkgname = fields[1]
+ pkg_desc = u" ".join(fields[2:])
+ return pkgname, pkg_desc
+
+ def PopulatePackagesTable(self):
+ logging.info("Updating the packages table")
+ args = ["pkginfo"]
+ pkginfo_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ stdout, stderr = pkginfo_proc.communicate()
+ ret = pkginfo_proc.wait()
+ lines = stdout.splitlines()
+ bar = progressbar.ProgressBar()
+ bar.maxval = len(lines)
+ bar.start()
+ count = itertools.count()
+ INSERT_SQL = """
+ INSERT INTO pkginst (pkgname, pkg_desc)
+ VALUES (?, ?);
+ """
+ # If self.GetInstalledPackages calls out to the initialization,
+ # the result is an infinite recursion.
+ installed_pkgs = self.GetInstalledPackages(initialize=False)
+ for line in stdout.splitlines():
+ pkgname, pkg_desc = self._ParsePkginfoLine(line)
+ if pkgname not in installed_pkgs:
+ # This is slow:
+ # pkg = m.Pkginst(pkgname=pkgname, pkg_desc=pkg_desc)
+ # This is much faster:
+ self.sqlite_conn.execute(INSERT_SQL, [pkgname, pkg_desc])
+ i = count.next()
+ bar.update(i)
+ # Need to commit, otherwise subsequent SQLObject calls will fail.
+ self.sqlite_conn.commit()
+ bar.finish()
+
+ def SetDatabaseMtime(self):
+ mtime = self.GetFileMtime()
+ res = m.CswConfig.select(m.CswConfig.q.option_key==CONFIG_MTIME)
+ if res.count() == 0:
+ logging.debug("Inserting the mtime (%s) into the database.", mtime)
+ config_record = m.CswConfig(option_key=CONFIG_MTIME, float_value=mtime)
+ else:
+ logging.debug("Updating the mtime (%s) in the database.", mtime)
+ res.getOne().float_value = mtime
+
+ def SetDatabaseSchemaVersion(self):
+ try:
+ config_option = m.CswConfig.select(
+ m.CswConfig.q.option_key==CONFIG_DB_SCHEMA).getOne()
+ config_option.int_value = database.DB_SCHEMA_VERSION
+ except sqlobject.main.SQLObjectNotFound, e:
+ version = m.CswConfig(option_key=CONFIG_DB_SCHEMA,
+ int_value=database.DB_SCHEMA_VERSION)
+
+ def GetPkgmapLineByBasename(self, filename):
+ """Returns pkgmap lines by basename:
+ {
+ path1: line1,
+ path2: line2,
+ }
+ """
+ if filename in self.cache:
+ return self.cache[filename]
+ self._LazyInitializeDatabase()
+ res = m.CswFile.select(m.CswFile.q.basename==filename)
+ lines = {}
+ for obj in res:
+ lines[obj.path] = obj.line
+ if len(lines) == 0:
+ logging.debug("Cache doesn't contain filename %s", filename)
+ self.cache[filename] = lines
+ return lines
+
+ def _InferPackagesFromPkgmapLine(self, line):
+ """Given a pkgmap line, return all packages it contains."""
+ line = line.strip()
+ parts = re.split(c.WS_RE, line)
+ pkgs = []
+ if parts[1] == 'd':
+ parts = parts[6:]
+ while parts:
+ part = parts.pop()
+ if self.digits_re.match(part):
+ break
+ elif "none" == part:
+ break
+ pkgs.append(part)
+ # Make the packages appear in the same order as in the install/contents
+ # file.
+ pkgs.reverse()
+ return pkgs
+
+ def GetPathsAndPkgnamesByBasename(self, filename):
+ """Returns paths and packages by basename.
+
+ e.g.
+ {"/opt/csw/lib": ["CSWfoo", "CSWbar"],
+ "/opt/csw/1/lib": ["CSWfoomore"]}
+ """
+ lines = self.GetPkgmapLineByBasename(filename)
+ pkgs = {}
+ # Infer packages
+ for file_path in lines:
+ pkgs[file_path] = self._InferPackagesFromPkgmapLine(lines[file_path])
+ # self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libc.so.1').AndReturn({
+ # "/usr/lib": (u"SUNWcsl",)})
+ logging.debug("self.error_mgr_mock.GetPathsAndPkgnamesByBasename(%s).AndReturn(%s)",
+ repr(filename), pprint.pformat(pkgs))
+ return pkgs
+
+ def GetPkgByPath(self, full_path):
+ if full_path not in self.pkgs_by_path_cache:
+ self._LazyInitializeDatabase()
+ path, basename = os.path.split(full_path)
+ try:
+ obj = m.CswFile.select(
+ sqlobject.AND(
+ m.CswFile.q.path==path,
+ m.CswFile.q.basename==basename)).getOne()
+ self.pkgs_by_path_cache[full_path] = self._InferPackagesFromPkgmapLine(
+ obj.line)
+ except sqlobject.main.SQLObjectNotFound, e:
+ logging.debug("Couldn't find in the db: %s/%s", path, basename)
+ logging.debug(e)
+ self.pkgs_by_path_cache[full_path] = []
+ logging.debug("self.error_mgr_mock.GetPkgByPath(%s).AndReturn(%s)",
+ repr(full_path), pprint.pformat(self.pkgs_by_path_cache[full_path]))
+ return self.pkgs_by_path_cache[full_path]
+
+ def GetDatabaseMtime(self):
+ if not self.cache_mtime:
+ res = m.CswConfig.select(m.CswConfig.q.option_key==CONFIG_MTIME)
+ if res.count() == 1:
+ self.cache_mtime = res.getOne().float_value
+ elif res.count() < 1:
+ self.cache_mtime = 1
+ logging.debug("GetDatabaseMtime() --> %s", self.cache_mtime)
+ return self.cache_mtime
+
+ def GetFileMtime(self):
+ if not self.file_mtime:
+ stat_data = os.stat(SYSTEM_PKGMAP)
+ self.file_mtime = stat_data.st_mtime
+ return self.file_mtime
+
+ def GetDatabaseSchemaVersion(self):
+ schema_on_disk = 1L
+ if not m.CswConfig.tableExists():
+ return schema_on_disk;
+ res = m.CswConfig.select(m.CswConfig.q.option_key == CONFIG_DB_SCHEMA)
+ if res.count() < 1:
+ logging.debug("No db schema value found, assuming %s.",
+ schema_on_disk)
+ elif res.count() == 1:
+ schema_on_disk = res.getOne().int_value
+ return schema_on_disk
+
+ def IsDatabaseUpToDate(self):
+ f_mtime_epoch = self.GetFileMtime()
+ d_mtime_epoch = self.GetDatabaseMtime()
+ f_mtime = time.gmtime(int(f_mtime_epoch))
+ d_mtime = time.gmtime(int(d_mtime_epoch))
+ logging.debug("IsDatabaseUpToDate: f_mtime %s, d_time: %s", f_mtime, d_mtime)
+ # Rounding up to integer seconds. There is a race condition:
+ # pkgadd finishes at 100.1
+ # checkpkg reads /var/sadm/install/contents at 100.2
+ # new pkgadd runs and finishes at 100.3
+ # subsequent checkpkg runs won't pick up the last change.
+ # I don't expect pkgadd to run under 1s.
+ fresh = f_mtime <= d_mtime
+ good_version = self.GetDatabaseSchemaVersion() >= database.DB_SCHEMA_VERSION
+ logging.debug("IsDatabaseUpToDate: good_version=%s, fresh=%s",
+ repr(good_version), repr(fresh))
+ return fresh and good_version
+
+ def ClearTablesForUpdates(self):
+ for table in self.TABLES_THAT_NEED_UPDATES:
+ table.clearTable()
+
+ def PurgeDatabase(self, drop_tables=False):
+ if drop_tables:
+ for table in self.TABLES:
+ if table.tableExists():
+ table.dropTable()
+ else:
+ logging.debug("Truncating all tables")
+ for table in self.TABLES:
+ table.clearTable()
+
+ def GetInstalledPackages(self, initialize=True):
+ """Returns a dictionary of all installed packages."""
+ if initialize:
+ self._LazyInitializeDatabase()
+ res = m.Pkginst.select()
+ return dict([[str(x.pkgname), str(x.pkg_desc)] for x in res])
+
+
+class LddEmulator(object):
+ """A class to emulate ldd(1)
+
+ Used primarily to resolve SONAMEs and detect package dependencies.
+ """
+ def __init__(self):
+ self.runpath_expand_cache = {}
+ self.runpath_origin_expand_cache = {}
+ self.symlink_expand_cache = {}
+ self.symlink64_cache = {}
+ self.runpath_sanitize_cache = {}
+
+ def ExpandRunpath(self, runpath, isalist, binary_path):
+ """Expands a signle runpath element.
+
+ Args:
+ runpath: e.g. "/opt/csw/lib/$ISALIST"
+ isalist: isalist elements
+ binary_path: Necessary to expand $ORIGIN
+ """
+ key = (runpath, tuple(isalist))
+ if key not in self.runpath_expand_cache:
+ origin_present = False
+ # Emulating $ISALIST and $ORIGIN expansion
+ if '$ORIGIN' in runpath:
+ origin_present = True
+ if origin_present:
+ key_o = (runpath, tuple(isalist), binary_path)
+ if key_o in self.runpath_origin_expand_cache:
+ return self.runpath_origin_expand_cache[key_o]
+ else:
+ if not binary_path.startswith("/"):
+ binary_path = "/" + binary_path
+ runpath = runpath.replace('$ORIGIN', binary_path)
+ if '$ISALIST' in runpath:
+ expanded_list = [runpath.replace('/$ISALIST', '')]
+ expanded_list += [runpath.replace('$ISALIST', isa) for isa in isalist]
+ else:
+ expanded_list = [runpath]
+ expanded_list = [os.path.abspath(p) for p in expanded_list]
+ if not origin_present:
+ self.runpath_expand_cache[key] = expanded_list
+ else:
+ self.runpath_origin_expand_cache[key_o] = expanded_list
+ return self.runpath_origin_expand_cache[key_o]
+ return self.runpath_expand_cache[key]
+
+ def ExpandSymlink(self, symlink, target, input_path):
+ key = (symlink, target, input_path)
+ if key not in self.symlink_expand_cache:
+ symlink_re = re.compile(r"%s(/|$)" % symlink)
+ if re.search(symlink_re, input_path):
+ result = input_path.replace(symlink, target)
+ else:
+ result = input_path
+ self.symlink_expand_cache[key] = result
+ return self.symlink_expand_cache[key]
+
+ def Emulate64BitSymlinks(self, runpath_list):
+ """Need to emulate the 64 -> amd64, 64 -> sparcv9 symlink
+
+ Since we don't know the architecture, we are adding both amd64 and
+ sparcv9. It should be safe - there are other checks that make sure
+ that right architectures are in the right directories.
+ """
+ key = tuple(runpath_list)
+ if key not in self.symlink64_cache:
+ symlinked_list = []
+ for runpath in runpath_list:
+ for symlink, expansion_list in SYSTEM_SYMLINKS:
+ for target in expansion_list:
+ expanded = self.ExpandSymlink(symlink, target, runpath)
+ if expanded not in symlinked_list:
+ symlinked_list.append(expanded)
+ self.symlink64_cache[key] = symlinked_list
+ return self.symlink64_cache[key]
+
+ def SanitizeRunpath(self, runpath):
+ if runpath not in self.runpath_sanitize_cache:
+ self.runpath_sanitize_cache[runpath] = os.path.normpath(runpath)
+ return self.runpath_sanitize_cache[runpath]
+
+
+ def ResolveSoname(self, runpath_list, soname, isalist,
+ path_list, binary_path):
+ """Emulates ldd behavior, minimal implementation.
+
+ runpath: e.g. ["/opt/csw/lib/$ISALIST", "/usr/lib"]
+ soname: e.g. "libfoo.so.1"
+ isalist: e.g. ["sparcv9", "sparcv8"]
+ path_list: A list of paths where the soname is present, e.g.
+ ["/opt/csw/lib", "/opt/csw/lib/sparcv9"]
+
+ The function returns the one path.
+ """
+ # Emulating the install time symlinks, for instance, if the prototype contains
+ # /opt/csw/lib/i386/foo.so.0 and /opt/csw/lib/i386 is a symlink to ".",
+ # the shared library ends up in /opt/csw/lib/foo.so.0 and should be
+ # findable even when RPATH does not contain $ISALIST.
+ original_paths_by_expanded_paths = {}
+ for p in path_list:
+ expanded_p_list = self.Emulate64BitSymlinks([p])
+ # We can't just expand and return; we need to return one of the paths given
+ # in the path_list.
+ for expanded_p in expanded_p_list:
+ original_paths_by_expanded_paths[expanded_p] = p
+ logging.debug(
+ "%s: looking for %s in %s",
+ soname, runpath_list, original_paths_by_expanded_paths.keys())
+ for runpath_expanded in runpath_list:
+ if runpath_expanded in original_paths_by_expanded_paths:
+ # logging.debug("Found %s",
+ # original_paths_by_expanded_paths[runpath_expanded])
+ return original_paths_by_expanded_paths[runpath_expanded]
+
+
+def ParseDumpOutput(dump_output):
+ binary_data = {RUNPATH: [],
+ NEEDED_SONAMES: []}
+ runpath = []
+ rpath = []
+ for line in dump_output.splitlines():
+ fields = re.split(c.WS_RE, line)
+ if len(fields) < 3:
+ continue
+ if fields[1] == "NEEDED":
+ binary_data[NEEDED_SONAMES].append(fields[2])
+ elif fields[1] == "RUNPATH":
+ runpath.extend(fields[2].split(":"))
+ elif fields[1] == "RPATH":
+ rpath.extend(fields[2].split(":"))
+ elif fields[1] == "SONAME":
+ binary_data[SONAME] = fields[2]
+ if runpath:
+ binary_data[RUNPATH].extend(runpath)
+ elif rpath:
+ binary_data[RUNPATH].extend(rpath)
+
+ # Converting runpath to a tuple, which is a hashable data type and can act as
+ # a key in a dict.
+ binary_data[RUNPATH] = tuple(binary_data[RUNPATH])
+ # the NEEDED list must not be modified, converting to a tuple.
+ binary_data[NEEDED_SONAMES] = tuple(binary_data[NEEDED_SONAMES])
+ binary_data["RUNPATH RPATH the same"] = (runpath == rpath)
+ binary_data["RPATH set"] = bool(rpath)
+ binary_data["RUNPATH set"] = bool(runpath)
+ return binary_data
+
+
+class CheckpkgManagerBase(object):
+ """Common functions between the older and newer calling functions."""
+
+ def __init__(self, name, stats_basedir, md5sum_list, debug=False):
+ self.debug = debug
+ self.name = name
+ self.md5sum_list = md5sum_list
+ self.stats_basedir = stats_basedir
+ self.errors = []
+ self.individual_checks = []
+ self.set_checks = []
+ self.packages = []
+
+ def GetPackageStatsList(self):
+ return [package_stats.PackageStats(None, self.stats_basedir, x)
+ for x in self.md5sum_list]
+
+ def FormatReports(self, errors, messages, gar_lines):
+ namespace = {
+ "name": self.name,
+ "errors": errors,
+ "debug": self.debug,
+ "textwrap": textwrap,
+ "messages": messages,
+ "gar_lines": gar_lines,
+ }
+ screen_t = Template.Template(SCREEN_ERROR_REPORT_TMPL, searchList=[namespace])
+ tags_report_t = Template.Template(TAG_REPORT_TMPL, searchList=[namespace])
+ return screen_t, tags_report_t
+
+ def SetErrorsToDict(self, set_errors, a_dict):
+ # These were generated by a set, but are likely to be bound to specific
+ # packages. We'll try to preserve the package assignments.
+ errors = copy.copy(a_dict)
+ for tag in set_errors:
+ if tag.pkgname:
+ if not tag.pkgname in errors:
+ errors[tag.pkgname] = []
+ errors[tag.pkgname].append(tag)
+ else:
+ if "package-set" not in errors:
+ errors["package-set"] = []
+ errors["package-set"].append(tag)
+ return errors
+
+ def GetOptimizedAllStats(self, stats_obj_list):
+ logging.info("Unwrapping candies...")
+ pkgs_data = []
+ counter = itertools.count()
+ length = len(stats_obj_list)
+ bar = progressbar.ProgressBar()
+ bar.maxval = length
+ bar.start()
+ for stats_obj in stats_obj_list:
+ # pkg_data = {}
+ # This bit is tightly tied to the data structures returned by
+ # PackageStats.
+ #
+ # Python strings are already implementing the flyweight pattern. What's
+ # left is lists and dictionaries.
+ i = counter.next()
+ # logging.debug("Loading stats for %s (%s/%s)",
+ # stats_obj.md5sum, i, length)
+ raw_pkg_data = stats_obj.GetAllStats()
+ pkg_data = raw_pkg_data
+ pkgs_data.append(pkg_data)
+ bar.update(i)
+ bar.finish()
+ return pkgs_data
+
+ def Run(self):
+ """Runs all the checks
+
+ Returns a tuple of an exit code and a report.
+ """
+ packages_data = self.GetPackageStatsList()
+ db_stat_objs_by_pkgname = {}
+ obj_id_list = []
+ for pkg in packages_data:
+ db_obj = pkg.GetDbObject()
+ db_stat_objs_by_pkgname[db_obj.pkginst.pkgname] = db_obj
+ obj_id_list.append(db_obj.id)
+ logging.debug("Deleting old %s errors from the database.",
+ db_obj.pkginst.pkgname)
+ conn = sqlobject.sqlhub.processConnection
+ # It's the maximum number of ORs in a SQL statement.
+ # Slicing the long list up into s-sized segments. 1000 is too much.
+ obj_id_lists = SliceList(obj_id_list, 900)
+ for obj_id_list in obj_id_lists:
+ # WARNING: This is raw SQL, potentially breaking during a transition to
+ # another db. It's here for efficiency.
+ sql = ("DELETE FROM checkpkg_error_tag WHERE %s;"
+ % " OR ".join("srv4_file_id = %s" % x for x in obj_id_list))
+ conn.query(sql)
+ # Need to construct the predicate by hand. Otherwise:
+ # File "/opt/csw/lib/python/site-packages/sqlobject/sqlbuilder.py",
+ # line 829, in OR
+ # return SQLOp("OR", op1, OR(*ops))
+ # RuntimeError: maximum recursion depth exceeded while calling a Python object
+ #
+ # The following also tries to use recursion and fails.
+ # delete_predicate = sqlobject.OR(False)
+ # for pred in delete_predicate_list:
+ # delete_predicate = sqlobject.OR(delete_predicate, pred)
+ # conn.query(
+ # conn.sqlrepr(sqlbuilder.Delete(m.CheckpkgErrorTag.sqlmeta.table,
+ # delete_predicate
+ # )))
+ # res = m.CheckpkgErrorTag.select(m.CheckpkgErrorTag.q.srv4_file==db_obj)
+ # for obj in res:
+ # obj.destroySelf()
+ errors, messages, gar_lines = self.GetAllTags(packages_data)
+ no_errors = len(errors) + 1
+ bar = progressbar.ProgressBar()
+ bar.maxval = no_errors
+ count = itertools.count(1)
+ logging.info("Stuffing the candies under the pillow...")
+ bar.start()
+ for pkgname, es in errors.iteritems():
+ logging.debug("Saving %s errors to the database.", pkgname)
+ for e in es:
+ db_error = m.CheckpkgErrorTag(srv4_file=db_stat_objs_by_pkgname[e.pkgname],
+ pkgname=e.pkgname,
+ tag_name=e.tag_name,
+ tag_info=e.tag_info,
+ msg=e.msg)
+ bar.update(count.next())
+ bar.finish()
+ flat_error_list = reduce(operator.add, errors.values(), [])
+ screen_report, tags_report = self.FormatReports(errors, messages, gar_lines)
+ exit_code = 0
+ return (exit_code, screen_report, tags_report)
+
+
+class CheckInterfaceBase(object):
+ """Proxies interaction with checking functions.
+
+ It wraps access to the /var/sadm/install/contents cache.
+ """
+
+ def __init__(self, system_pkgmap=None, lines_dict=None):
+ self.system_pkgmap = system_pkgmap
+ if not self.system_pkgmap:
+ self.system_pkgmap = SystemPkgmap()
+ self.common_paths = {}
+ if lines_dict:
+ self.lines_dict = lines_dict
+ else:
+ self.lines_dict = {}
+
+ def GetPathsAndPkgnamesByBasename(self, basename):
+ """Proxies calls to self.system_pkgmap."""
+ return self.system_pkgmap.GetPathsAndPkgnamesByBasename(basename)
+
+ def GetPkgByPath(self, path):
+ """Proxies calls to self.system_pkgmap."""
+ return self.system_pkgmap.GetPkgByPath(path)
+
+ def GetInstalledPackages(self, initialize=True):
+ return self.system_pkgmap.GetInstalledPackages(initialize)
+
+ def _GetPathsForArch(self, arch):
+ if not arch in self.lines_dict:
+ file_name = os.path.join(
+ os.path.dirname(__file__), "..", "..", "etc", "commondirs-%s" % arch)
+ logging.debug("opening %s", file_name)
+ f = open(file_name, "r")
+ self.lines_dict[arch] = f.read().splitlines()
+ f.close()
+ return self.lines_dict[arch]
+
+ def GetCommonPaths(self, arch):
+ """Returns a list of paths for architecture, from gar/etc/commondirs*."""
+ # TODO: If this was cached, it could save a significant amount of time.
+ if arch not in ('i386', 'sparc', 'all'):
+ logging.warn("Wrong arch: %s", repr(arch))
+ return []
+ if arch == 'all':
+ archs = ('i386', 'sparc')
+ else:
+ archs = [arch]
+ lines = []
+ for arch in archs:
+ lines.extend(self._GetPathsForArch(arch))
+ return lines
+
+
+class IndividualCheckInterface(CheckInterfaceBase):
+ """To be passed to the checking functions.
+
+ Wraps the creation of tag.CheckpkgTag objects.
+ """
+
+ def __init__(self, pkgname, system_pkgmap=None):
+ super(IndividualCheckInterface, self).__init__(system_pkgmap)
+ self.pkgname = pkgname
+ self.errors = []
+
+ def ReportError(self, tag_name, tag_info=None, msg=None):
+ logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s)",
+ repr(tag_name), repr(tag_info), repr(msg))
+ checkpkg_tag = tag.CheckpkgTag(self.pkgname, tag_name, tag_info, msg=msg)
+ self.errors.append(checkpkg_tag)
+
+
+class SetCheckInterface(CheckInterfaceBase):
+ """To be passed to set checking functions."""
+
+ def __init__(self, system_pkgmap=None):
+ super(SetCheckInterface, self).__init__(system_pkgmap)
+ self.errors = []
+
+ def ReportError(self, pkgname, tag_name, tag_info=None, msg=None):
+ logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s, %s)",
+ repr(pkgname),
+ repr(tag_name), repr(tag_info), repr(msg))
+ checkpkg_tag = tag.CheckpkgTag(pkgname, tag_name, tag_info, msg=msg)
+ self.errors.append(checkpkg_tag)
+
+
+class CheckpkgMessenger(object):
+ """Class responsible for passing messages from checks to the user."""
+ def __init__(self):
+ self.messages = []
+ self.one_time_messages = {}
+ self.gar_lines = []
+
+ def Message(self, m):
+ logging.debug("self.messenger.Message(%s)", repr(m))
+ self.messages.append(m)
+
+ def OneTimeMessage(self, key, m):
+ logging.debug("self.messenger.OneTimeMessage(%s, %s)", repr(key), repr(m))
+ if key not in self.one_time_messages:
+ self.one_time_messages[key] = m
+
+ def SuggestGarLine(self, m):
+ logging.debug("self.messenger.SuggestGarLine(%s)", repr(m))
+ self.gar_lines.append(m)
+
+
+class CheckpkgManager2(CheckpkgManagerBase):
+ """The second incarnation of the checkpkg manager.
+
+ Implements the API to be used by checking functions.
+
+ Its purpose is to reduce the amount of boilerplate code and allow for easier
+ unit test writing.
+ """
+ def _RegisterIndividualCheck(self, function):
+ self.individual_checks.append(function)
+
+ def _RegisterSetCheck(self, function):
+ self.set_checks.append(function)
+
+ def _AutoregisterChecks(self):
+ """Autodetects all defined checks."""
+ logging.debug("CheckpkgManager2._AutoregisterChecks()")
+ checkpkg_module = package_checks
+ members = dir(checkpkg_module)
+ for member_name in members:
+ logging.debug("Examining module member: %s", repr(member_name))
+ member = getattr(checkpkg_module, member_name)
+ if callable(member):
+ if member_name.startswith("Check"):
+ logging.debug("Registering individual check %s", repr(member_name))
+ self._RegisterIndividualCheck(member)
+ elif member_name.startswith("SetCheck"):
+ logging.debug("Registering set check %s", repr(member_name))
+ self._RegisterSetCheck(member)
+
+ def GetAllTags(self, stats_obj_list):
+ errors = {}
+ pkgmap = SystemPkgmap()
+ logging.debug("Loading all package statistics.")
+ pkgs_data = self.GetOptimizedAllStats(stats_obj_list)
+ logging.debug("All package statistics loaded.")
+ messenger = CheckpkgMessenger()
+ # Individual checks
+ count = itertools.count()
+ bar = progressbar.ProgressBar()
+ bar.maxval = len(pkgs_data) * len(self.individual_checks)
+ logging.info("Tasting candies one by one...")
+ bar.start()
+ for pkg_data in pkgs_data:
+ pkgname = pkg_data["basic_stats"]["pkgname"]
+ check_interface = IndividualCheckInterface(pkgname, pkgmap)
+ for function in self.individual_checks:
+ logger = logging.getLogger("%s-%s" % (pkgname, function.__name__))
+ logger.debug("Calling %s", function.__name__)
+ function(pkg_data, check_interface, logger=logger, messenger=messenger)
+ if check_interface.errors:
+ errors[pkgname] = check_interface.errors
+ bar.update(count.next())
+ bar.finish()
+ # Set checks
+ logging.info("Tasting them all at once...")
+ for function in self.set_checks:
+ logger = logging.getLogger(function.__name__)
+ check_interface = SetCheckInterface(pkgmap)
+ logger.debug("Calling %s", function.__name__)
+ function(pkgs_data, check_interface, logger=logger, messenger=messenger)
+ if check_interface.errors:
+ errors = self.SetErrorsToDict(check_interface.errors, errors)
+ messages = messenger.messages + messenger.one_time_messages.values()
+ return errors, messages, messenger.gar_lines
+
+ def Run(self):
+ self._AutoregisterChecks()
+ return super(CheckpkgManager2, self).Run()
+
+
+def GetIsalist():
+ args = ["isalist"]
+ isalist_proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+ stdout, stderr = isalist_proc.communicate()
+ ret = isalist_proc.wait()
+ if ret:
+ logging.error("Calling isalist has failed.")
+ isalist = re.split(r"\s+", stdout.strip())
+ return tuple(isalist)
+
+
+def ErrorTagsFromFile(file_name):
+ fd = open(file_name)
+ error_tags = []
+ for line in fd:
+ if line.startswith("#"):
+ continue
+ pkgname, tag_name, tag_info = tag.ParseTagLine(line)
+ error_tags.append(tag.CheckpkgTag(pkgname, tag_name, tag_info))
+ return error_tags
+
+
+def SliceList(l, size):
+ """Trasforms a list into a list of lists."""
+ idxes = xrange(0, len(l), size)
+ sliced = [l[i:i+size] for i in idxes]
+ return sliced
+
def IsMd5(s):
# For optimization, move the compilation elsewhere.
- return MD5_RE.match(s)
+ md5_re = re.compile(MD5_RE)
+ return md5_re.match(s)
def GetPackageStatsByFilenamesOrMd5s(args, debug=False):
filenames = []
@@ -128,15 +1059,15 @@
filenames.append(arg)
srv4_pkgs = [inspective_package.InspectiveCswSrv4File(x) for x in filenames]
pkgstat_objs = []
- pbar = progressbar.ProgressBar()
- pbar.maxval = len(md5s) + len(srv4_pkgs)
- pbar.start()
+ bar = progressbar.ProgressBar()
+ bar.maxval = len(md5s) + len(srv4_pkgs)
+ bar.start()
counter = itertools.count()
for pkg in srv4_pkgs:
pkgstat_objs.append(package_stats.PackageStats(pkg, debug=debug))
- pbar.update(counter.next())
+ bar.update(counter.next())
for md5 in md5s:
pkgstat_objs.append(package_stats.PackageStats(None, md5sum=md5, debug=debug))
- pbar.update(counter.next())
- pbar.finish()
+ bar.update(counter.next())
+ bar.finish()
return pkgstat_objs
Deleted: csw/mgar/gar/v2/lib/python/checkpkg2.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg2.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/checkpkg2.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -1,178 +0,0 @@
-#!/usr/bin/env python2.6
-#
-# checkpkg
-#
-
-import logging
-import operator
-import optparse
-import os
-import sys
-import textwrap
-import configuration
-import datetime
-import database
-
-import package_stats
-import checkpkg
-import checkpkg_lib
-import overrides
-import models
-import sqlobject
-
-USAGE = """%prog [ options ] pkg1 [ pkg2 [ ... ] ]"""
-CHECKPKG_MODULE_NAME = "The main checking module."
-BEFORE_OVERRIDES = """If any of the reported errors were false positives, you
-can override them pasting the lines below to the GAR recipe."""
-
-AFTER_OVERRIDES = """Please note that checkpkg isn't suggesting you should
-simply add these overrides do the Makefile. It only informs what the overrides
-could look like. You need to understand what are the reported issues about and
-use your best judgement to decide whether to fix the underlying problems or
-override them. For more information, scroll up and read the detailed
-messages."""
-
-UNAPPLIED_OVERRIDES = """WARNING: Some overrides did not match any errors.
-They can be removed, as they don't take any effect anyway. If you're getting
-errors at the same time, maybe you didn't specify the overrides correctly."""
-
-
-class Error(Exception):
- """Generic error."""
-
-
-class UsageError(Error):
- """Problem with usage, e.g. command line options."""
-
-
-def main():
- configuration.SetUpSqlobjectConnection()
- parser = optparse.OptionParser(USAGE)
- parser.add_option("-d", "--debug",
- dest="debug",
- action="store_true",
- default=False,
- help="Switch on debugging messages")
- parser.add_option("-q", "--quiet",
- dest="quiet",
- action="store_true",
- default=False,
- help="Display less messages")
- parser.add_option("--catalog-release",
- dest="catrel",
- default="unstable",
- help="A catalog release: experimental, unstable, testing, stable.")
- parser.add_option("-r", "--os-releases",
- dest="osrel_commas",
- help=("Comma separated list of ['SunOS5.9', 'SunOS5.10'], "
- "e.g. 'SunOS5.9,SunOS5.10'."))
- parser.add_option("-a", "--architecture",
- dest="arch",
- help="Architecture: i386, sparc.")
- parser.add_option("--profile", dest="profile",
- default=False, action="store_true",
- help="Enable profiling (a developer option).")
- options, args = parser.parse_args()
- assert len(args), "The list of files or md5 sums must be not empty."
- logging_level = logging.INFO
- if options.quiet:
- logging_level = logging.WARNING
- elif options.debug:
- # If both flags are set, debug wins.
- logging_level = logging.DEBUG
- logging.basicConfig(level=logging_level)
- logging.debug("Starting.")
-
- dm = database.DatabaseManager()
- dm.AutoManage()
-
-
- err_msg_list = []
- if not options.osrel_commas:
- err_msg_list.append("Please specify --os-releases.")
- if not options.arch:
- err_msg_list.append("Please specify --architecture.")
- if err_msg_list:
- raise UsageError(" ".join(err_msg_list))
-
- stats_list = []
- collector = package_stats.StatsCollector(
- logger=logging,
- debug=options.debug)
- # We need to separate files and md5 sums.
- md5_sums, file_list = [], []
- for arg in args:
- if checkpkg.MD5_RE.match(arg):
- md5_sums.append(arg)
- else:
- file_list.append(arg)
- if file_list:
- stats_list = collector.CollectStatsFromFiles(file_list, None)
- # We need the md5 sums of these files
- md5_sums.extend([x["basic_stats"]["md5_sum"] for x in stats_list])
- assert md5_sums, "The list of md5 sums must not be empty."
- logging.debug("md5_sums: %s", md5_sums)
- osrel_list = options.osrel_commas.split(",")
- logging.debug("Reading packages data from the database.")
- # This part might need improvements in order to handle a whole
- # catalog. On the other hand, if we already have the whole catalog in
- # the database, we can do it altogether differently.
- # Transforming the result to a list in order to force object
- # retrieval.
- sqo_pkgs = list(models.Srv4FileStats.select(
- sqlobject.IN(models.Srv4FileStats.q.md5_sum, md5_sums)))
- tags_for_all_osrels = []
- sqo_arch = models.Architecture.selectBy(name=options.arch).getOne()
- sqo_catrel = models.CatalogRelease.selectBy(name=options.catrel).getOne()
- for osrel in osrel_list:
- sqo_osrel = models.OsRelease.selectBy(short_name=osrel).getOne()
- check_manager = checkpkg_lib.CheckpkgManager2(
- CHECKPKG_MODULE_NAME,
- sqo_pkgs,
- osrel,
- options.arch,
- options.catrel,
- debug=options.debug,
- show_progress=(not options.quiet))
- # Running the checks, reporting and exiting.
- exit_code, screen_report, tags_report = check_manager.Run()
- screen_report = unicode(screen_report)
- if not options.quiet and screen_report:
- # TODO: Write this to screen only after overrides are applied.
- sys.stdout.write(screen_report)
- else:
- logging.debug("No screen report.")
-
- overrides_list = [list(pkg.GetOverridesResult()) for pkg in sqo_pkgs]
- override_list = reduce(operator.add, overrides_list)
- args = (sqo_osrel, sqo_arch, sqo_catrel)
- tag_lists = [list(pkg.GetErrorTagsResult(*args)) for pkg in sqo_pkgs]
- error_tags = reduce(operator.add, tag_lists)
- # TODO: Set the 'overriden' tag for specific error tags
- (tags_after_overrides,
- unapplied_overrides) = overrides.ApplyOverrides(error_tags, override_list)
- tags_for_all_osrels.extend(tags_after_overrides)
- if not options.quiet:
- if tags_after_overrides:
- print textwrap.fill(BEFORE_OVERRIDES, 80)
- for checkpkg_tag in tags_after_overrides:
- print checkpkg_tag.ToGarSyntax()
- print textwrap.fill(AFTER_OVERRIDES, 80)
- if unapplied_overrides:
- print textwrap.fill(UNAPPLIED_OVERRIDES, 80)
- for override in unapplied_overrides:
- print "* Unused %s" % override
- exit_code = bool(tags_for_all_osrels)
- sys.exit(exit_code)
-
-
-if __name__ == '__main__':
- if "--profile" in sys.argv:
- import cProfile
- t_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M")
- home = os.environ["HOME"]
- cprof_file_name = os.path.join(
- home, ".checkpkg", "run-modules-%s.cprof" % t_str)
- cProfile.run("main()", sort=1, filename=cprof_file_name)
- else:
- main()
Deleted: csw/mgar/gar/v2/lib/python/checkpkg_lib.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg_lib.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/checkpkg_lib.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -1,600 +0,0 @@
-# A collection of checkpkg-specific classes.
-#
-# This file is supposed to drain the checkpkg.py file until is becomes
-# empty and goes away.
-
-import copy
-from Cheetah import Template
-import logging
-import package_stats
-import package_checks
-import sqlobject
-import itertools
-import progressbar
-import database
-import models as m
-import textwrap
-import os.path
-import tag
-import pprint
-import operator
-import common_constants
-import sharedlib_utils
-import mute_progressbar
-import cPickle
-
-
-class Error(Exception):
- pass
-
-
-class CatalogDatabaseError(Error):
- pass
-
-
-SCREEN_ERROR_REPORT_TMPL = u"""#if $errors
-#if $debug
-ERROR: One or more errors have been found by $name.
-#end if
-#for $pkgname in $errors
-$pkgname:
-#for $error in $errors[$pkgname]
-#if $debug
- $repr($error)
-#elif $error.msg
-$textwrap.fill($error.msg, 78, initial_indent="# ", subsequent_indent="# ")
-# -> $repr($error)
-
-#end if
-#end for
-#end for
-#else
-#if $debug
-OK: $repr($name) module found no problems.
-#end if
-#end if
-#if $messages
-#for $msg in $messages
-$textwrap.fill($msg, 78, initial_indent=" * ", subsequent_indent=" ")
-#end for
-#end if
-#if $gar_lines
-
-# Checkpkg suggests adding the following lines to the GAR recipe:
-# This is a summary; see above for details.
-#for $line in $gar_lines
-$line
-#end for
-#end if
-"""
-
-# http://www.cheetahtemplate.org/docs/users_guide_html_multipage/language.directives.closures.html
-TAG_REPORT_TMPL = u"""#if $errors
-# Tags reported by $name module
-#for $pkgname in $errors
-#for $tag in $errors[$pkgname]
-#if $tag.msg
-$textwrap.fill($tag.msg, 70, initial_indent="# ", subsequent_indent="# ")
-#end if
-$pkgname: ${tag.tag_name}#if $tag.tag_info# $tag.tag_info#end if#
-#end for
-#end for
-#end if
-"""
-
-
-class SqlobjectHelperMixin(object):
-
- def GetSqlobjectTriad(self, osrel, arch, catrel):
- logging.debug("GetSqlobjectTriad(%s, %s, %s)", osrel, arch, catrel)
- sqo_arch = m.Architecture.select(
- m.Architecture.q.name==arch).getOne()
- sqo_osrel = m.OsRelease.select(
- m.OsRelease.q.short_name==osrel).getOne()
- sqo_catrel = m.CatalogRelease.select(
- m.CatalogRelease.q.name==catrel).getOne()
- return sqo_osrel, sqo_arch, sqo_catrel
-
-
-class CheckpkgManagerBase(SqlobjectHelperMixin):
- """Common functions between the older and newer calling functions."""
-
- def __init__(self, name, sqo_pkgs_list, osrel, arch, catrel, debug=False,
- show_progress=False):
- self.debug = debug
- self.name = name
- self.sqo_pkgs_list = sqo_pkgs_list
- self.errors = []
- self.individual_checks = []
- self.set_checks = []
- self.packages = []
- self.osrel = osrel
- self.arch = arch
- self.catrel = catrel
- self.show_progress = show_progress
-
- def GetProgressBar(self):
- if self.show_progress:
- return progressbar.ProgressBar()
- else:
- return mute_progressbar.MuteProgressBar()
-
- def GetSqlobjectTriad(self):
- return super(CheckpkgManagerBase, self).GetSqlobjectTriad(
- self.osrel, self.arch, self.catrel)
-
- def GetPackageStatsList(self):
- raise RuntimeError("Please don't use this function as it violates "
- "the Law of Demeter.")
-
- def FormatReports(self, errors, messages, gar_lines):
- namespace = {
- "name": self.name,
- "errors": errors,
- "debug": self.debug,
- "textwrap": textwrap,
- "messages": messages,
- "gar_lines": gar_lines,
- }
- screen_t = Template.Template(SCREEN_ERROR_REPORT_TMPL, searchList=[namespace])
- tags_report_t = Template.Template(TAG_REPORT_TMPL, searchList=[namespace])
- return screen_t, tags_report_t
-
- def SetErrorsToDict(self, set_errors, a_dict):
- # These were generated by a set, but are likely to be bound to specific
- # packages. We'll try to preserve the package assignments.
- errors = copy.copy(a_dict)
- for tag in set_errors:
- if tag.pkgname:
- if not tag.pkgname in errors:
- errors[tag.pkgname] = []
- errors[tag.pkgname].append(tag)
- else:
- if "package-set" not in errors:
- errors["package-set"] = []
- errors["package-set"].append(tag)
- return errors
-
- def GetOptimizedAllStats(self, stats_obj_list):
- logging.info("Unwrapping candies...")
- pkgs_data = []
- counter = itertools.count()
- length = len(stats_obj_list)
- pbar = self.GetProgressBar()
- pbar.maxval = length
- pbar.start()
- for stats_obj in stats_obj_list:
- # This bit is tightly tied to the data structures returned by
- # PackageStats.
- #
- # Python strings are already implementing the flyweight pattern. What's
- # left is lists and dictionaries.
- i = counter.next()
- raw_pkg_data = cPickle.loads(stats_obj.data_obj.pickle)
- pkg_data = raw_pkg_data
- pkgs_data.append(pkg_data)
- pbar.update(i)
- pbar.finish()
- return pkgs_data
-
- def Run(self):
- """Runs all the checks
-
- Returns a tuple of an exit code and a report.
- """
- # packages_data = self.GetPackageStatsList()
- assert self.sqo_pkgs_list, "The list of packages must not be empty."
- db_stat_objs_by_pkgname = {}
- for pkg in self.sqo_pkgs_list:
- db_stat_objs_by_pkgname[pkg.pkginst.pkgname] = pkg
- logging.debug("Deleting old errors from the database.")
- for pkgname, db_obj in db_stat_objs_by_pkgname.iteritems():
- sqo_os_rel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad()
- db_obj.RemoveCheckpkgResults(
- sqo_os_rel, sqo_arch, sqo_catrel)
- errors, messages, gar_lines = self.GetAllTags(self.sqo_pkgs_list)
- no_errors = len(errors) + 1
- pbar = self.GetProgressBar()
- pbar.maxval = no_errors
- count = itertools.count(1)
- logging.info("Stuffing the candies under the pillow...")
- pbar.start()
- for pkgname, es in errors.iteritems():
- logging.debug("Saving errors of %s to the database.", pkgname)
- for e in es:
- if e.pkgname not in db_stat_objs_by_pkgname:
- logging.warning("Not saving an error for %s.", e.pkgname)
- continue
- db_error = m.CheckpkgErrorTag(srv4_file=db_stat_objs_by_pkgname[e.pkgname],
- pkgname=e.pkgname,
- tag_name=e.tag_name,
- tag_info=e.tag_info,
- msg=e.msg,
- os_rel=sqo_os_rel,
- catrel=sqo_catrel,
- arch=sqo_arch)
- pbar.update(count.next())
- pbar.finish()
- flat_error_list = reduce(operator.add, errors.values(), [])
- screen_report, tags_report = self.FormatReports(errors, messages, gar_lines)
- exit_code = 0
- return (exit_code, screen_report, tags_report)
-
-
-class CheckInterfaceBase(object):
- """Provides an interface for checking functions.
-
- It wraps access to the catalog database.
- """
-
- def __init__(self, osrel, arch, catrel, catalog=None, lines_dict=None):
- self.osrel = osrel
- self.arch = arch
- self.catrel = catrel
- self.catalog = catalog
- if not self.catalog:
- self.catalog = Catalog()
- self.common_paths = {}
- if lines_dict:
- self.lines_dict = lines_dict
- else:
- self.lines_dict = {}
-
- def GetPathsAndPkgnamesByBasename(self, basename):
- """Proxies calls to class member."""
- return self.catalog.GetPathsAndPkgnamesByBasename(
- basename, self.osrel, self.arch, self.catrel)
-
- def GetPkgByPath(self, file_path):
- """Proxies calls to self.system_pkgmap."""
- return self.catalog.GetPkgByPath(
- file_path, self.osrel, self.arch, self.catrel)
-
- def GetInstalledPackages(self):
- return self.catalog.GetInstalledPackages(
- self.osrel, self.arch, self.catrel)
-
- def _GetPathsForArch(self, arch):
- if not arch in self.lines_dict:
- file_name = os.path.join(
- os.path.dirname(__file__), "..", "..", "etc", "commondirs-%s" % arch)
- logging.debug("opening %s", file_name)
- f = open(file_name, "r")
- self.lines_dict[arch] = f.read().splitlines()
- f.close()
- return self.lines_dict[arch]
-
- def GetCommonPaths(self, arch):
- """Returns a list of paths for architecture, from gar/etc/commondirs*."""
- # TODO: If this was cached, it could save a significant amount of time.
- if arch not in ('i386', 'sparc', 'all'):
- logging.warn("Wrong arch: %s", repr(arch))
- return []
- if arch == 'all':
- archs = ('i386', 'sparc')
- else:
- archs = [arch]
- lines = []
- for arch in archs:
- lines.extend(self._GetPathsForArch(arch))
- return lines
-
-
-class IndividualCheckInterface(CheckInterfaceBase):
- """To be passed to the checking functions.
-
- Wraps the creation of tag.CheckpkgTag objects.
- """
-
- def __init__(self, pkgname, osrel, arch, catrel, catalog=None):
- super(IndividualCheckInterface, self).__init__(osrel, arch, catrel, catalog)
- self.pkgname = pkgname
- self.errors = []
-
- def ReportError(self, tag_name, tag_info=None, msg=None):
- logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s)",
- repr(tag_name), repr(tag_info), repr(msg))
- checkpkg_tag = tag.CheckpkgTag(self.pkgname, tag_name, tag_info, msg=msg)
- self.errors.append(checkpkg_tag)
-
-
-class SetCheckInterface(CheckInterfaceBase):
- """To be passed to set checking functions."""
-
- def __init__(self, osrel, arch, catrel, catalog=None):
- super(SetCheckInterface, self).__init__(osrel, arch, catrel, catalog)
- self.errors = []
-
- def ReportError(self, pkgname, tag_name, tag_info=None, msg=None):
- logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s, %s)",
- repr(pkgname),
- repr(tag_name), repr(tag_info), repr(msg))
- checkpkg_tag = tag.CheckpkgTag(pkgname, tag_name, tag_info, msg=msg)
- self.errors.append(checkpkg_tag)
-
-
-class CheckpkgMessenger(object):
- """Class responsible for passing messages from checks to the user."""
- def __init__(self):
- self.messages = []
- self.one_time_messages = {}
- self.gar_lines = []
-
- def Message(self, m):
- logging.debug("self.messenger.Message(%s)", repr(m))
- self.messages.append(m)
-
- def OneTimeMessage(self, key, m):
- logging.debug("self.messenger.OneTimeMessage(%s, %s)", repr(key), repr(m))
- if key not in self.one_time_messages:
- self.one_time_messages[key] = m
-
- def SuggestGarLine(self, m):
- logging.debug("self.messenger.SuggestGarLine(%s)", repr(m))
- self.gar_lines.append(m)
-
-
-class CheckpkgManager2(CheckpkgManagerBase):
- """The second incarnation of the checkpkg manager.
-
- Implements the API to be used by checking functions.
-
- Its purpose is to reduce the amount of boilerplate code and allow for easier
- unit test writing.
- """
- def _RegisterIndividualCheck(self, function):
- self.individual_checks.append(function)
-
- def _RegisterSetCheck(self, function):
- self.set_checks.append(function)
-
- def _AutoregisterChecks(self):
- """Autodetects all defined checks."""
- logging.debug("CheckpkgManager2._AutoregisterChecks()")
- checkpkg_module = package_checks
- members = dir(checkpkg_module)
- for member_name in members:
- logging.debug("Examining module member: %s", repr(member_name))
- member = getattr(checkpkg_module, member_name)
- if callable(member):
- if member_name.startswith("Check"):
- logging.debug("Registering individual check %s", repr(member_name))
- self._RegisterIndividualCheck(member)
- elif member_name.startswith("SetCheck"):
- logging.debug("Registering set check %s", repr(member_name))
- self._RegisterSetCheck(member)
-
- def GetAllTags(self, stats_obj_list):
- errors = {}
- catalog = Catalog()
- logging.debug("Loading all package statistics.")
- pkgs_data = self.GetOptimizedAllStats(stats_obj_list)
- logging.debug("All package statistics loaded.")
- messenger = CheckpkgMessenger()
- # Individual checks
- count = itertools.count()
- pbar = self.GetProgressBar()
- pbar.maxval = len(pkgs_data) * len(self.individual_checks)
- logging.info("Tasting candies one by one...")
- pbar.start()
- for pkg_data in pkgs_data:
- pkgname = pkg_data["basic_stats"]["pkgname"]
- check_interface = IndividualCheckInterface(
- pkgname, self.osrel, self.arch, self.catrel, catalog)
- for function in self.individual_checks:
- logger = logging.getLogger("%s-%s" % (pkgname, function.__name__))
- logger.debug("Calling %s", function.__name__)
- function(pkg_data, check_interface, logger=logger, messenger=messenger)
- if check_interface.errors:
- errors[pkgname] = check_interface.errors
- pbar.update(count.next())
- pbar.finish()
- # Set checks
- logging.info("Tasting them all at once...")
- for function in self.set_checks:
- logger = logging.getLogger(function.__name__)
- check_interface = SetCheckInterface(
- self.osrel, self.arch, self.catrel, catalog)
- logger.debug("Calling %s", function.__name__)
- function(pkgs_data, check_interface, logger=logger, messenger=messenger)
- if check_interface.errors:
- errors = self.SetErrorsToDict(check_interface.errors, errors)
- messages = messenger.messages + messenger.one_time_messages.values()
- return errors, messages, messenger.gar_lines
-
- def Run(self):
- self._AutoregisterChecks()
- return super(CheckpkgManager2, self).Run()
-
-
-def SliceList(l, size):
- """Trasforms a list into a list of lists."""
- idxes = xrange(0, len(l), size)
- sliced = [l[i:i+size] for i in idxes]
- return sliced
-
-
-class CatalogMixin(SqlobjectHelperMixin):
- """Responsible for functionality related to catalog operations.
-
- These include:
- - getting a list of all packages
- - getting a list of packages that contain certain files
- - getting a list of packages that contain files of certain names
- """
-
- def __init__(self):
- super(CatalogMixin, self).__init__()
- self.pkgs_by_path_cache = {}
-
- def GetInstalledPackages(self, osrel, arch, catrel):
- sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
- osrel, arch, catrel)
- res = m.Srv4FileInCatalog.select(
- sqlobject.AND(
- m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- m.Srv4FileInCatalog.q.arch==sqo_arch,
- m.Srv4FileInCatalog.q.catrel==sqo_catrel))
- pkgs = []
- for srv4_in_cat in res:
- pkgs.append(srv4_in_cat.srv4file.pkginst.pkgname)
- return pkgs
-
- def GetPathsAndPkgnamesByBasename(self, basename, osrel, arch, catrel):
- """Retrieves pkginst names of packages that have certain files.
-
- Since it needs to match a specific catalog, a table join is required:
- - CswFile (basename)
- - related Srv4FileStats
- - related Srv4FileInCatalog
-
- Args:
- basename: u'libfoo.so.1'
- osrel: u'5.9'
- arch: 'sparc', 'x86'
- catrel: 'stable'
-
- Returns:
- {"/opt/csw/lib": ["CSWfoo", "CSWbar"],
- "/opt/csw/1/lib": ["CSWfoomore"]}
- """
- pkgs = {}
- sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
- osrel, arch, catrel)
-
- # Looks like this join is hard to do that way.
- # res = m.Srv4FileInCatalog.select(
- # sqlobject.AND(
- # m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- # m.Srv4FileInCatalog.q.arch==sqo_arch,
- # m.Srv4FileInCatalog.q.catrel==sqo_catrel)).
- # throughTo.srv4file.thoughTo.files
-
- # We'll implement it on the application level. First, we'll get all
- # the files that match the basename, and then filter them based on
- # catalog properties.
- res = m.CswFile.select(m.CswFile.q.basename==basename)
- file_list = []
- for f in res:
- # Check whether osrel, arch and catrel are matching.
- for cat in f.srv4_file.in_catalogs:
- if (f.srv4_file.registered
- and cat.osrel == sqo_osrel
- and cat.arch == sqo_arch
- and cat.catrel == sqo_catrel):
- file_list.append(f)
- for obj in file_list:
- pkgs.setdefault(obj.path, [])
- pkgs[obj.path].append(obj.pkginst.pkgname)
- logging.debug("self.error_mgr_mock.GetPathsAndPkgnamesByBasename(%s)"
- ".AndReturn(%s)", repr(basename), pprint.pformat(pkgs))
- return pkgs
-
- def GetPkgByPath(self, full_file_path, osrel, arch, catrel):
- """Returns a list of packages."""
- # Memoization for performance
- key = (full_file_path, osrel, arch, catrel)
- if key not in self.pkgs_by_path_cache:
- pkgs = []
- file_path, basename = os.path.split(full_file_path)
- sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
- osrel, arch, catrel)
- res = m.CswFile.select(
- sqlobject.AND(
- m.CswFile.q.path==file_path,
- m.CswFile.q.basename==basename))
- for sqo_file in res:
- # Making sure that we're taking packages only from the right catalog.
- for cat in sqo_file.srv4_file.in_catalogs:
- if (sqo_file.srv4_file.registered
- and cat.osrel == sqo_osrel
- and cat.arch == sqo_arch
- and cat.catrel == sqo_catrel):
- pkgs.append(sqo_file.srv4_file.pkginst.pkgname)
- self.pkgs_by_path_cache[key] = frozenset(pkgs)
- return self.pkgs_by_path_cache[key]
-
- def CommonArchByString(self, s):
- return sharedlib_utils.ArchByString(s)
-
- def Srv4MatchesCatalog(self, sqo_srv4, sqo_arch):
- cat_arch = self.CommonArchByString(sqo_arch.name)
- pkg_arch = self.CommonArchByString(sqo_srv4.arch.name)
- ans = (cat_arch == pkg_arch) or (pkg_arch == common_constants.ARCH_ALL)
- if not ans:
- logging.debug("Srv4MatchesCatalog(): mismatch: %s / %s and %s / %s",
- cat_arch, pkg_arch, pkg_arch, common_constants.ARCH_ALL)
- # Some packages have the architecture in the file saying 'all', but pkginfo
- # says 'i386'.
- filename_arch = self.CommonArchByString(sqo_srv4.filename_arch.name)
- if filename_arch == common_constants.ARCH_ALL:
- ans = True
- if filename_arch != pkg_arch:
- logging.warning(
- "Package %s declares %s in pkginfo and %s in the filename.",
- sqo_srv4, repr(pkg_arch), repr(filename_arch))
- return ans
-
- def AddSrv4ToCatalog(self, sqo_srv4, osrel, arch, catrel):
- """Registers a srv4 file in a catalog."""
- logging.debug("AddSrv4ToCatalog(%s, %s, %s, %s)",
- sqo_srv4, osrel, arch, catrel)
- sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
- osrel, arch, catrel)
- if not self.Srv4MatchesCatalog(sqo_srv4, sqo_arch):
- raise CatalogDatabaseError(
- "Specified package does not match the catalog. "
- "Package: %s, catalog: %s %s %s"
- % (sqo_srv4, osrel, arch, catrel))
- if not sqo_srv4.registered:
- raise CatalogDatabaseError(
- "Package %s (%s) is not registered for releases."
- % (sqo_srv4.basename, sqo_srv4.md5_sum))
- # Checking for presence of a different srv4 with the same pkginst in the
- # same catalog
- pkginst = sqo_srv4.pkginst
- res = m.Srv4FileStats.select(
- m.Srv4FileStats.q.pkginst==pkginst).throughTo.in_catalogs.filter(
- sqlobject.AND(
- m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- m.Srv4FileInCatalog.q.arch==sqo_arch,
- m.Srv4FileInCatalog.q.catrel==sqo_catrel,
- m.Srv4FileInCatalog.q.srv4file!=sqo_srv4))
- if len(list(res)):
- raise CatalogDatabaseError(
- "There already is a package with that pkgname: %s" % pkginst)
- # Checking for presence of the same srv4 already in the catalog.
- res = m.Srv4FileInCatalog.select(
- sqlobject.AND(
- m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- m.Srv4FileInCatalog.q.arch==sqo_arch,
- m.Srv4FileInCatalog.q.catrel==sqo_catrel,
- m.Srv4FileInCatalog.q.srv4file==sqo_srv4))
- if len(list(res)):
- logging.debug("%s is already part of %s %s %s",
- sqo_srv4, osrel, arch, catrel)
- # Our srv4 is already part of that catalog.
- return
- obj = m.Srv4FileInCatalog(
- arch=sqo_arch,
- osrel=sqo_osrel,
- catrel=sqo_catrel,
- srv4file=sqo_srv4)
-
- def RemoveSrv4(self, sqo_srv4, osrel, arch, catrel):
- sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
- osrel, arch, catrel)
- sqo_srv4_in_cat = m.Srv4FileInCatalog.select(
- sqlobject.AND(
- m.Srv4FileInCatalog.q.arch==sqo_arch,
- m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- m.Srv4FileInCatalog.q.catrel==sqo_catrel,
- m.Srv4FileInCatalog.q.srv4file==sqo_srv4)).getOne()
- sqo_srv4_in_cat.registered = False
- # TODO(maciej): Remove all files belonging to that one
- for cat_file in sqo_srv4_in_cat.srv4file.files:
- cat_file.destroySelf()
-
-
-class Catalog(CatalogMixin):
- pass
Deleted: csw/mgar/gar/v2/lib/python/checkpkg_lib_test.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg_lib_test.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/checkpkg_lib_test.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -1,74 +0,0 @@
-#!/usr/bin/env python2.6
-
-import copy
-import unittest
-import checkpkg_lib
-import tag
-import package_stats
-import database
-import sqlobject
-import models
-import package_stats
-import inspective_package
-import mox
-import test_base
-
-from testdata.neon_stats import pkgstats as neon_stats
-
-
-class CheckpkgManager2UnitTest(unittest.TestCase):
-
- def setUp(self):
- super(CheckpkgManager2UnitTest, self).setUp()
- self.mox = mox.Mox()
-
- def testSingleTag(self):
- m = checkpkg_lib.CheckpkgManager2(
- "testname", [], "5.9", "sparc", "unstable")
- tags = {
- "CSWfoo": [
- tag.CheckpkgTag("CSWfoo", "foo-tag", "foo-info"),
- ],
- }
- screen_report, tags_report = m.FormatReports(tags, [], [])
- expected = u'# Tags reported by testname module\nCSWfoo: foo-tag foo-info\n'
- self.assertEqual(expected, unicode(tags_report))
-
- def testThreeTags(self):
- m = checkpkg_lib.CheckpkgManager2(
- "testname", [], "5.9", "sparc", "unstable")
- tags = {
- "CSWfoo": [
- tag.CheckpkgTag("CSWfoo", "foo-tag", "foo-info"),
- tag.CheckpkgTag("CSWfoo", "bar-tag", "bar-info"),
- tag.CheckpkgTag("CSWfoo", "baz-tag"),
- ],
- }
- screen_report, tags_report = m.FormatReports(tags, [], [])
- expected = (u'# Tags reported by testname module\n'
- u'CSWfoo: foo-tag foo-info\n'
- u'CSWfoo: bar-tag bar-info\n'
- u'CSWfoo: baz-tag\n')
- self.assertEqual(expected, unicode(tags_report))
-
-
-class CheckpkgManager2DatabaseIntegrationTest(
- test_base.SqlObjectTestMixin, unittest.TestCase):
-
- def setUp(self):
- super(CheckpkgManager2DatabaseIntegrationTest, self).setUp()
- self.mox = mox.Mox()
-
- def testInsertNeon(self):
- self.dbc.InitialDataImport()
- sqo_pkg = package_stats.PackageStats.SaveStats(neon_stats[0], True)
- cm = checkpkg_lib.CheckpkgManager2(
- "testname", [sqo_pkg], "SunOS5.9", "sparc", "unstable",
- show_progress=False)
- cm.Run()
- # Verifying that there are some reported error tags.
- self.assertTrue(list(models.CheckpkgErrorTag.select()))
-
-
-if __name__ == '__main__':
- unittest.main()
Modified: csw/mgar/gar/v2/lib/python/checkpkg_test.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg_test.py 2010-12-10 16:48:58 UTC (rev 11879)
+++ csw/mgar/gar/v2/lib/python/checkpkg_test.py 2010-12-10 20:55:58 UTC (rev 11880)
@@ -1,24 +1,16 @@
-#!/usr/bin/env python2.6
+#!/opt/csw/bin/python2.6
# $Id$
-import copy
import re
import unittest
import mox
import difflib
import checkpkg
-import checkpkg_lib
-import database
-import models as m
import tag
-import package_stats
-import sqlite3
-import sqlobject
-import test_base
+import testdata.dump_output_1 as dump_1
+import testdata.dump_output_2 as dump_2
+import testdata.dump_output_3 as dump_3
-from testdata.tree_stats import pkgstats as tree_stats
-from testdata.neon_stats import pkgstats as neon_stats
-
"""A set of unit tests for the library checking code.
A bunch of lines to test in the interactive Python shell.
@@ -31,6 +23,250 @@
SELECT * FROM systempkgmap WHERE basename = 'libncursesw.so.5';
"""
+class GetLinesBySonameUnitTest(unittest.TestCase):
+
+ def setUp(self):
+ self.pkgmap_mocker = mox.Mox()
+ self.e = checkpkg.LddEmulator()
+
+ def testExpandRunpath_1(self):
+ isalist = ["foo", "bar"]
+ runpath = "/opt/csw/lib/$ISALIST"
+ expected = ["/opt/csw/lib", "/opt/csw/lib/foo", "/opt/csw/lib/bar"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_2(self):
+ isalist = ["foo", "bar"]
+ runpath = "/opt/csw/mysql5/lib/$ISALIST/mysql"
+ expected = ["/opt/csw/mysql5/lib/mysql",
+ "/opt/csw/mysql5/lib/foo/mysql",
+ "/opt/csw/mysql5/lib/bar/mysql"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_OriginSimple(self):
+ isalist = ()
+ runpath = "$ORIGIN"
+ expected = ["/opt/csw/lib"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_OriginDots(self):
+ isalist = ()
+ runpath = "$ORIGIN/.."
+ expected = ["/opt/csw/lib"]
+ bin_path = "opt/csw/lib/subdir"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_Caching(self):
+ """Make sure that the cache doesn't mess it up.
+
+ Two invocations, where the only difference is the binary path.
+ """
+ isalist = ()
+ runpath = "/opt/csw/lib/foo"
+ expected = ["/opt/csw/lib/foo"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+ expected = ["/opt/csw/lib/foo"]
+ bin_path = "/opt/csw/lib/foo"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_OriginCaching(self):
+ """Make sure that the cache doesn't mess it up.
+
+ Two invocations, where the only difference is the binary path.
+ """
+ isalist = ()
+ runpath = "$ORIGIN"
+ expected = ["/opt/csw/lib"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected,
+ self.e.ExpandRunpath(runpath, isalist, bin_path))
+ expected = ["/opt/csw/foo/lib"]
+ bin_path = "/opt/csw/foo/lib"
+ self.assertEquals(expected,
+ self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testExpandRunpath_OnlyIsalist(self):
+ """Make sure that the cache doesn't mess it up.
+
+ Two invocations, where the only difference is the binary path.
+ """
+ isalist = ("bar",)
+ runpath = "/opt/csw/lib/$ISALIST"
+ expected = ["/opt/csw/lib", "/opt/csw/lib/bar"]
+ bin_path = "opt/csw/lib"
+ self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+ def testEmulate64BitSymlinks_1(self):
+ runpath_list = ["/opt/csw/mysql5/lib/foo/mysql/64"]
+ expected = "/opt/csw/mysql5/lib/foo/mysql/amd64"
+ self.assertTrue(expected in self.e.Emulate64BitSymlinks(runpath_list))
+
+ def testEmulate64BitSymlinks_2(self):
+ runpath_list = ["/opt/csw/mysql5/lib/64/mysql/foo"]
+ expected = "/opt/csw/mysql5/lib/amd64/mysql/foo"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertTrue(expected in result, "%s not in %s" % (expected, result))
+
+ def testEmulate64BitSymlinks_3(self):
+ runpath_list = ["/opt/csw/mysql5/lib/64/mysql/foo"]
+ expected = "/opt/csw/mysql5/lib/sparcv9/mysql/foo"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertTrue(expected in result, "%s not in %s" % (expected, result))
+
+ def testEmulate64BitSymlinks_4(self):
+ """No repeated paths because of symlink expansion"""
+ runpath_list = ["/opt/csw/lib"]
+ expected = "/opt/csw/lib"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertEquals(1, len(result), "len(%s) != %s" % (result, 1))
+
+ def testEmulateSymlinks_3(self):
+ runpath_list = ["/opt/csw/bdb4"]
+ expected = "/opt/csw/bdb42"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertTrue(expected in result, "%s not in %s" % (expected, result))
+
+ def testEmulateSymlinks_4(self):
+ runpath_list = ["/opt/csw/bdb42"]
+ expected = "/opt/csw/bdb42"
+ not_expected = "/opt/csw/bdb422"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertTrue(expected in result,
+ "%s not in %s" % (expected, result))
+ self.assertFalse(not_expected in result,
+ "%s is in %s" % (not_expected, result))
+
+ def testEmulateSymlinks_5(self):
+ """Install time symlink expansion."""
+ runpath_list = ["/opt/csw/lib/i386"]
+ expected = "/opt/csw/lib"
+ result = self.e.Emulate64BitSymlinks(runpath_list)
+ self.assertTrue(expected in result, "%s not in %s" % (expected, result))
+
+ def testEmulateSymlinks_6(self):
+ """ExpandSymlink for /opt/csw/lib/i386."""
+ runpath_list = ["/opt/csw/lib/i386"]
+ expected = "/opt/csw/lib"
+ not_expected = "/opt/csw/lib/i386"
+ result = self.e.ExpandSymlink("/opt/csw/lib/i386",
+ "/opt/csw/lib",
+ "/opt/csw/lib/i386")
+ self.assertTrue(expected in result, "%s not in %s" % (expected, result))
+ self.assertFalse(not_expected in result,
+ "%s is in %s" % (not_expected, result))
+
+ def testSanitizeRunpath_1(self):
+ self.assertEqual("/opt/csw/lib",
+ self.e.SanitizeRunpath("/opt/csw/lib/"))
+
+ def testSanitizeRunpath_2(self):
+ self.assertEqual("/opt/csw/lib",
+ self.e.SanitizeRunpath("/opt//csw////lib/"))
+
+
+
+class ParseDumpOutputUnitTest(unittest.TestCase):
+
+ def test_1(self):
+ expected = {
+ 'RPATH set': True,
+ 'RUNPATH RPATH the same': True,
+ 'RUNPATH set': True,
+ 'needed sonames': ('librt.so.1',
+ 'libresolv.so.2',
+ 'libc.so.1',
+ 'libgen.so.1',
+ 'libsocket.so.1',
+ 'libnsl.so.1',
+ 'libm.so.1',
+ 'libz.so.1'),
+ 'runpath': ('/opt/csw/lib/$ISALIST',
+ '/opt/csw/lib',
+ '/opt/csw/mysql5/lib/$ISALIST',
+ '/opt/csw/mysql5/lib',
+ '/opt/csw/mysql5/lib/$ISALIST/mysql'),
+ 'soname': 'libmysqlclient.so.15',
+ }
+ self.assertEqual(expected,
+ checkpkg.ParseDumpOutput(dump_1.DATA_DUMP_OUTPUT))
+
+ def testEmpty(self):
+ expected_runpath = ()
+ self.assertEqual(
+ expected_runpath,
+ checkpkg.ParseDumpOutput(dump_2.DATA_DUMP_OUTPUT)["runpath"])
+
+ def testRpathOnly(self):
+ expected = {
+ 'RPATH set': True,
+ 'RUNPATH RPATH the same': False,
+ 'RUNPATH set': False,
+ 'needed sonames': ('librt.so.1',
+ 'libresolv.so.2',
+ 'libc.so.1',
+ 'libgen.so.1',
+ 'libsocket.so.1',
+ 'libnsl.so.1',
+ 'libm.so.1',
+ 'libz.so.1'),
+ 'runpath': ('/opt/csw/lib/$ISALIST',
+ '/opt/csw/lib',
+ '/opt/csw/mysql5/lib/$ISALIST',
+ '/opt/csw/mysql5/lib',
+ '/opt/csw/mysql5/lib/$ISALIST/mysql'),
+ 'soname': 'libmysqlclient.so.15',
+ }
+ self.assertEqual(
+ expected,
+ checkpkg.ParseDumpOutput(dump_3.DATA_DUMP_OUTPUT))
+
+
+class SystemPkgmapUnitTest(unittest.TestCase):
+
+ def testParsePkginfoLine(self):
+ line = ('application CSWcswclassutils '
+ 'cswclassutils - CSW class action utilities')
+ expected = ('CSWcswclassutils',
+ 'cswclassutils - CSW class action utilities')
+ spkgmap = checkpkg.SystemPkgmap()
+ self.assertEqual(expected, spkgmap._ParsePkginfoLine(line))
+
+ def test_InferPackagesFromPkgmapLine(self):
+ line = ("/opt/csw/sbin d none 0755 root bin CSWfping CSWbonobo2 "
+ "CSWkrb5libdev CSWsasl CSWschilybase CSWschilyutils CSWstar "
+ "CSWcommon CSWcacertificates CSWfacter")
+ expected = ["CSWfping", "CSWbonobo2", "CSWkrb5libdev", "CSWsasl",
+ "CSWschilybase", "CSWschilyutils", "CSWstar", "CSWcommon",
+ "CSWcacertificates", "CSWfacter"]
+ spkgmap = checkpkg.SystemPkgmap()
+ self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+ def test_InferPackagesFromPkgmapLine_2(self):
+ line = ("/usr/lib/sparcv9/libpthread.so.1 f none 0755 root bin 41296 28258 "
+ "1018129099 SUNWcslx")
+ expected = ["SUNWcslx"]
+ spkgmap = checkpkg.SystemPkgmap()
+ self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+ def test_InferPackagesFromPkgmapLine_3(self):
+ line = ("/usr/lib/libCrun.so.1 f none 0755 root bin 63588 "
+ "6287 1256043984 SUNWlibC")
+ expected = ["SUNWlibC"]
+ spkgmap = checkpkg.SystemPkgmap()
+ self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+ def test_InferPackagesFromPkgmapLine_4(self):
+ line = ("/opt/csw/apache2/lib/libapr-1.so.0=libapr-1.so.0.3.8 s none "
+ "CSWapache2rt")
+ expected = ["CSWapache2rt"]
+ spkgmap = checkpkg.SystemPkgmap()
+ self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+
class ExtractorsUnitTest(unittest.TestCase):
def testExtractDescriptionFromGoodData(self):
@@ -60,67 +296,68 @@
self.assertEqual(expected, re.match(checkpkg.PSTAMP_RE, pstamp).groupdict())
+class CheckpkgManager2UnitTest(unittest.TestCase):
+
+ def test_1(self):
+ m = checkpkg.CheckpkgManager2("testname", "/tmp", ["CSWfoo"])
+ tags = {
+ "CSWfoo": [
+ tag.CheckpkgTag("CSWfoo", "foo-tag", "foo-info"),
+ ],
+ }
+ screen_report, tags_report = m.FormatReports(tags, [], [])
+ expected = u'# Tags reported by testname module\nCSWfoo: foo-tag foo-info\n'
+ self.assertEqual(expected, unicode(tags_report))
+
+ def test_2(self):
+ m = checkpkg.CheckpkgManager2("testname", "/tmp", ["CSWfoo"])
+ tags = {
+ "CSWfoo": [
+ tag.CheckpkgTag("CSWfoo", "foo-tag", "foo-info"),
+ tag.CheckpkgTag("CSWfoo", "bar-tag", "bar-info"),
+ tag.CheckpkgTag("CSWfoo", "baz-tag"),
+ ],
+ }
+ screen_report, tags_report = m.FormatReports(tags, [], [])
+ expected = (u'# Tags reported by testname module\n'
+ u'CSWfoo: foo-tag foo-info\n'
+ u'CSWfoo: bar-tag bar-info\n'
+ u'CSWfoo: baz-tag\n')
+ self.assertEqual(expected, unicode(tags_report))
+
+
class SliceListUnitTest(unittest.TestCase):
def testOne(self):
l = [1, 2, 3, 4, 5]
s = 1
expected = [[1], [2], [3], [4], [5]]
- self.assertTrue(expected, checkpkg_lib.SliceList(l, s))
+ self.assertTrue(expected, checkpkg.SliceList(l, s))
def testTwo(self):
l = [1, 2, 3, 4, 5]
s = 2
expected = [[1, 2], [3, 4], [5]]
- self.assertTrue(expected, checkpkg_lib.SliceList(l, s))
+ self.assertTrue(expected, checkpkg.SliceList(l, s))
-class SqliteUnitTest(unittest.TestCase):
+class LddEmulartorUnitTest(unittest.TestCase):
- "Makes sure that we can lose state between tests."
-
def setUp(self):
- self.conn = sqlite3.connect(":memory:")
- self.c = self.conn.cursor()
+ self.pkgmap_mocker = mox.Mox()
+ self.e = checkpkg.LddEmulator()
- def tearDown(self):
- self.conn = None
+ def testResolveSoname_1(self):
+ # runpath_list, soname, isalist, path_list, binary_path
+ runpath_list = ["/opt/csw/bdb47/lib", "/opt/csw/lib"]
+ soname = "foo.so.1"
+ path_list = ["/opt/csw/lib", "/opt/csw/bdb47/lib", "/usr/lib"]
+ binary_path = "unused"
+ isalist = ["amd64"]
+ result = self.e.ResolveSoname(runpath_list, soname, isalist,
+ path_list, binary_path)
+ self.assertEqual("/opt/csw/bdb47/lib", result)
- def testCannotCreateTwoTables(self):
- self.c.execute("CREATE TABLE foo (INT bar);")
- self.assertRaises(
- sqlite3.OperationalError,
- self.c.execute, "CREATE TABLE foo (INT bar);")
- def testOne(self):
- self.c.execute("CREATE TABLE foo (INT bar);")
-
- def testTwo(self):
- self.c.execute("CREATE TABLE foo (INT bar);")
-
-
-class SqlobjectUnitTest(test_base.SqlObjectTestMixin, unittest.TestCase):
-
- "Makes sure that we can lose state between methods."
-
- class TestModel(sqlobject.SQLObject):
- name = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
-
- # This does not work. Why?
- # def testCannotCreateTwoTables(self):
- # self.TestModel.createTable()
- # self.assertRaises(
- # sqlite3.OperationalError,
- # self.TestModel.createTable)
-
- def testOne(self):
- self.TestModel.createTable()
-
- def testTwo(self):
@@ Diff output truncated at 100000 characters. @@
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
More information about the devel
mailing list