[csw-devel] SF.net SVN: gar:[10541] csw/mgar/gar/v2

wahwah at users.sourceforge.net wahwah at users.sourceforge.net
Mon Jul 19 10:39:22 CEST 2010


Revision: 10541
          http://gar.svn.sourceforge.net/gar/?rev=10541&view=rev
Author:   wahwah
Date:     2010-07-19 08:39:22 +0000 (Mon, 19 Jul 2010)

Log Message:
-----------
A major update to checkpkg.

* uses sqlite backend for stats storage; you may purge your ~/.checkpkg/stats
  directories

* performance tweaks allow it to process the whole catalog in 70 minutes
  including extracting all packages, or in 30 minutes for incremental updates

* understands alternative dependencies; if two packages provide foo.so.1,
  dependency on any of them is sufficient

* supports $ORIGIN in RPATH, including relative paths such as $ORIGIN/..

* offers better on-screen messages, e.g. explains the details of every
  soname-not-found error

Modified Paths:
--------------
    csw/mgar/gar/v2/bin/analyze_module_results.py
    csw/mgar/gar/v2/bin/checkpkg
    csw/mgar/gar/v2/bin/checkpkg_collect_stats.py
    csw/mgar/gar/v2/bin/checkpkg_collect_stats_test.py
    csw/mgar/gar/v2/bin/checkpkg_inspect_stats.py
    csw/mgar/gar/v2/bin/checkpkg_run_modules.py
    csw/mgar/gar/v2/bin/update_contents_cache.py
    csw/mgar/gar/v2/lib/python/checkpkg.py
    csw/mgar/gar/v2/lib/python/checkpkg_test.py
    csw/mgar/gar/v2/lib/python/dependency_checks.py
    csw/mgar/gar/v2/lib/python/models.py
    csw/mgar/gar/v2/lib/python/opencsw.py
    csw/mgar/gar/v2/lib/python/package_checks.py
    csw/mgar/gar/v2/lib/python/package_checks_test.py
    csw/mgar/gar/v2/tests/run_tests.py

Added Paths:
-----------
    csw/mgar/gar/v2/bin/pkgdb
    csw/mgar/gar/v2/lib/__init__.py
    csw/mgar/gar/v2/lib/python/__init__.py
    csw/mgar/gar/v2/lib/python/dependency_checks_test.py
    csw/mgar/gar/v2/lib/python/pkgdb.py
    csw/mgar/gar/v2/lib/python/testdata/javasvn_stats.py
    csw/mgar/gar/v2/lib/python/testdata/stubs.py
    csw/mgar/gar/v2/lib/python/testdata/sudo_stats.py
    csw/mgar/gar/v2/lib/python/testdata/tree_stats.py

Modified: csw/mgar/gar/v2/bin/analyze_module_results.py
===================================================================
--- csw/mgar/gar/v2/bin/analyze_module_results.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/analyze_module_results.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -5,8 +5,10 @@
 import operator
 import optparse
 import os
+import pprint
 import progressbar
 import sys
+import textwrap
 
 # The following bit of code sets the correct path to Python libraries
 # distributed with GAR.
@@ -14,52 +16,55 @@
              "..", "lib", "python"]
 sys.path.append(os.path.join(*path_list))
 import checkpkg
-import opencsw
 import overrides
 
+BEFORE_OVERRIDES = """If any of the reported errors were false positives, you
+can override them pasting the lines below to the GAR recipe."""
+
+AFTER_OVERRIDES = """Please note that checkpkg isn't suggesting you should
+simply add these overrides do the Makefile.  It only informs what the overrides
+could look like.  You need to understand what are the reported issues about and
+use your best judgement to decide whether to fix the underlying problems or
+override them. For more information, scroll up and read the detailed
+messages."""
+
+UNAPPLIED_OVERRIDES = """WARNING: Some overrides did not match any errors.
+They can be removed, as they don't take any effect anyway.  If you're getting
+errors at the same time, maybe you didn't specify the overrides correctly."""
+
 def main():
   parser = optparse.OptionParser()
-  parser.add_option("-e", "--extract-dir", dest="extractdir",
-                    help="Directory with extracted packages "
-                         "(with error tag files)")
+  parser.add_option("-c", "--catalog_file", dest="catalog",
+                    help="Optional catalog file")
+  parser.add_option("-q", "--quiet", dest="quiet",
+                    default=False, action="store_true",
+                    help=("Display less messages"))
   options, args = parser.parse_args()
   filenames = args
-  srv4_pkgs = [opencsw.CswSrv4File(x) for x in filenames]
-  pkgstats = [checkpkg.PackageStats(x) for x in srv4_pkgs]
+
+  # This might be bottleneck.  Perhaps a list of md5 sums can be given to this
+  # script instead.
+
+  # It might be a good idea to store the error tags in the database and
+  # eliminate the need to access the directory with the error tag files.
+
+  pkgstats = checkpkg.StatsListFromCatalog(filenames, options.catalog)
   overrides_list = [pkg.GetSavedOverrides() for pkg in pkgstats]
-  files = os.listdir(options.extractdir)
-  error_tags = []
-  no_files = len(files)
-  bar = progressbar.ProgressBar()
-  bar.maxval = no_files
-  bar.start()
-  counter = itertools.count()
-  for file_name in files:
-    full_path = os.path.join(options.extractdir, file_name)
-    error_tags.extend(checkpkg.ErrorTagsFromFile(full_path))
-    bar.update(counter.next())
-  bar.finish()
   override_list = reduce(operator.add, overrides_list)
+  error_tags = reduce(operator.add, [stat.GetSavedErrorTags() for stat in pkgstats])
   (tags_after_overrides,
    unapplied_overrides) = overrides.ApplyOverrides(error_tags, override_list)
+  if not options.quiet:
+    if tags_after_overrides:
+      print textwrap.fill(BEFORE_OVERRIDES, 80)
+      for checkpkg_tag in tags_after_overrides:
+        print checkpkg_tag.ToGarSyntax()
+      print textwrap.fill(AFTER_OVERRIDES, 80)
+    if unapplied_overrides:
+      print textwrap.fill(UNAPPLIED_OVERRIDES, 80)
+      for override in unapplied_overrides:
+        print "* Unused %s" % override
   exit_code = bool(tags_after_overrides)
-  if tags_after_overrides:
-    print "If any of the reported errors were false positives, you can"
-    print "override them pasting the lines below to the GAR recipe."
-    for checkpkg_tag in tags_after_overrides:
-      print checkpkg_tag.ToGarSyntax()
-    print "Please note that checkpkg isn't suggesting you should "
-    print "use these overrides.  It only informs what the overrides could "
-    print "look like.  You need to understand what are the reported issues about"
-    print "and use your best judgement to decide whether to fix the underlying"
-    print "problems or override them."
-  if unapplied_overrides:
-    print "WARNING: Some overrides did not match any errors."
-    print "         They can be removed, as they don't take any effect anyway."
-    print "         If you're getting errors at the same time, maybe you didn't"
-    print "         specify the overrides correctly."
-    for override in unapplied_overrides:
-      print "* Unused %s" % override
   sys.exit(exit_code)
 
 

Modified: csw/mgar/gar/v2/bin/checkpkg
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/checkpkg	2010-07-19 08:39:22 UTC (rev 10541)
@@ -108,8 +108,10 @@
 MD5_SUMS_CATALOG_FILE=""
 INSTALL_CONTENTS_FILES="/var/sadm/install/contents"
 ANALYZE=1
+PROFILE=0
+QUIET=0
 
-while getopts hsdNM:o:c:A opt; do
+while getopts hsdNM:o:c:Apq opt; do
 	case "${opt}" in
 	  c)
 	    INSTALL_CONTENTS_FILES="${INSTALL_CONTENTS_FILES} ${OPTARG}"
@@ -135,6 +137,11 @@
     A)
       ANALYZE=0
       ;;
+    p)
+      PROFILE=1
+      ;;
+    q) QUIET=1
+      ;;
     *)
       echo "Unknown option '${opt}'"
       ;;
@@ -148,6 +155,8 @@
 readonly SAVE_TAGS
 readonly SKIP_STATS_COLLECTION
 readonly ANALYZE
+readonly PROFILE
+readonly QUIET
 
 if [[ "${display_help}" -eq 1 ]] ; then
   print 'Usage: checkpkg [options] pkg1 [pkg2 ....]'
@@ -159,6 +168,8 @@
   print '   -N         skip statistics collection'
   print '   -M <file>  use package md5sums from a catalog file'
   print '   -A         Do not analyze the results.'
+  print '   -p         Enable profiling'
+  print '   -q         Display less messages'
   exit 0
 fi
 
@@ -242,6 +253,14 @@
 if [[ "${DEBUG}" != "" ]]; then
 	extra_options="--debug"
 fi
+if [[ "${PROFILE}" -eq 1 ]]; then
+	extra_options="${extra_options} --profile"
+fi
+if [[ "${QUIET}" -eq 1 ]]; then
+	quiet_options="--quiet"
+else
+	quiet_options=""
+fi
 
 if [[ -n "${MD5_SUMS_CATALOG_FILE}" ]]; then
 	catalog_options="--catalog=${MD5_SUMS_CATALOG_FILE}"
@@ -250,6 +269,8 @@
 fi
 
 # /var/sadm/install/contents cache update
+# TODO: Either remove this section or stop the stats collection phase from
+# updating the cache.
 ${command_basedir}/update_contents_cache.py ${extra_options}
 if [[ $? -ne 0 ]]; then
 	errmsg "Updating the contents cache has failed."
@@ -265,9 +286,6 @@
   fi
 fi
 
-debugmsg "checkpkg_module_dir: '$checkpkg_module_dir'"
-log_files=""
-module_name_format="%-40s"
 # TODO: A performance problem. The following line means that the md5sums are
 # calculated once more.
 if [ "${MD5_SUMS_CATALOG_FILE}" ]; then
@@ -280,55 +298,12 @@
   md5sums=`gmd5sum "$@" | awk '{print $1}'`
 fi
 debugmsg "All md5 sums: ${md5sums}"
-if [[ -d "${checkpkg_module_dir}" ]]; then
-	for plugin in "${checkpkg_module_dir}/${checkpkg_module_tag}"*; do
-		if [[ -x "${plugin}" ]]; then
-			plugin_base_name=`basename ${plugin}`
-			plugin_log="${EXTRACTDIR}/${plugin_base_name}.log"
-			log_files="${log_files} ${plugin_log}"
-			plugin_name="`echo ${plugin} | sed -e 's+.*/checkpkg-++' | sed -e 's+\.py$++'`"
-			error_tag_file="tags.${plugin_name}"
-			debugmsg "Executing: ${plugin} $extra_options -b \"${checkpkg_stats_basedir}\"" \
-		           "-o \"${EXTRACTDIR}/${error_tag_file}\" `echo ${md5sums}`"
-			printf "${BOLD}${module_name_format}${COLOR_RESET} running..." "${plugin_name}"
-			${plugin} \
-				$extra_options \
-				-b "${checkpkg_stats_basedir}" \
-				-o "${EXTRACTDIR}/${error_tag_file}" \
-				${md5sums} \
-				> "${plugin_log}" 2>&1
-			if [[ "$?" -ne 0 ]]; then
-				printf "\r${module_name_format} ${RED}[ERROR]${COLOR_RESET}        \\n" \
-				       "${plugin_name}"
-				test_suite_ok=0
-			else
-				printf "\r${module_name_format} [Done]        \\n" "${plugin_name}"
-			fi
-		else
-			debugmsg "'${plugin}' is not executable"
-		fi
-	done
-else
-	debugmsg "module dir ${checkpkg_module_dir} does not exist"
-fi
 
-for log_file in ${log_files}; do
-	log_len=`gsed -e 's/\s\+//g' "${log_file}" | gtr -s '\n' | gwc -c | awk '{print $1}'`
-	if [[ "${log_len}" -gt 1 ]]; then
-		print "printing ${log_file}..."
-		debugmsg ">> LOG START: ${log_file}"
-		cat "${log_file}"
-		debugmsg "<< LOG END: ${log_file}"
-	else
-		debugmsg "-- LOG ${log_file} is empty"
-	fi
-done
-
-# This check is special; it uses one executable for all the checks written in Python.
+# Running the checks.
 ${command_basedir}/checkpkg_run_modules.py \
     ${extra_options} \
     -b "${checkpkg_stats_basedir}" \
-    -o "${EXTRACTDIR}/tags.python-checks" \
+    ${quiet_options} \
     ${md5sums}
 if [[ "$?" -ne 0 ]]; then
   print "There was a problem analyzing package stats."
@@ -339,38 +314,13 @@
 	errmsg "One or more tests have finished with an error."
 fi
 
-if [[ "${DEBUG}" != "" ]]; then
-	override_info_printed=0
-	for tagfile in ${EXTRACTDIR}/tags.*; do
-		if [[ -s "${tagfile}" ]]; then
-			if [[ "${override_info_printed}" -ne 1 ]]; then
-				echo "# You can use the following lines to create overrides"
-				echo "# See http://wiki.opencsw.org/checkpkg"
-				override_info_printed=1
-			fi
-			echo "# ${tagfile}:"
-			cat "${tagfile}"
-		fi
-    if [[ "${SAVE_TAGS}" -eq 1 ]]; then
-    	echo "Saving ${tagfile} to `pwd`/${SAVED_TAGS_FILE}"
-      cat "${tagfile}" >> "${SAVED_TAGS_FILE}"
-    fi
-	done
-fi
-
-for tagfile in ${EXTRACTDIR}/tags.*; do
-  if [[ "${SAVE_TAGS}" -eq 1 ]]; then
-    echo "Saving ${tagfile} to ${SAVED_TAGS_FILE} (pwd = `pwd`)"
-    cat "${tagfile}" >> "${SAVED_TAGS_FILE}"
-  fi
-done
-
 if [[ "${ANALYZE}" -eq 1 ]]; then
 # Collecting errors and applying the overrides.
 # This has to use the original files.
   echo "Applying the overrides and analyzing the results."
   ${command_basedir}/analyze_module_results.py \
-      -e "${EXTRACTDIR}" \
+      ${catalog_options} \
+      ${quiet_options} \
       "$@"
   if [[ "$?" -ne 0 ]]; then
     errmsg "${RED}Modular checks are reporting errors.${COLOR_RESET}"

Modified: csw/mgar/gar/v2/bin/checkpkg_collect_stats.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_collect_stats.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/checkpkg_collect_stats.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -22,14 +22,16 @@
 import checkpkg
 import opencsw
 
-
 def main():
   parser = optparse.OptionParser()
   parser.add_option("-d", "--debug", dest="debug",
                     default=False, action="store_true",
                     help="Turn on debugging messages")
-  parser.add_option("-c", "--catalog", dest="catalog_file",
+  parser.add_option("-c", "--catalog", dest="catalog",
                     help="Catalog file")
+  parser.add_option("-p", "--profile", dest="profile",
+                    default=False, action="store_true",
+                    help="A disabled option")
   options, args = parser.parse_args()
   if options.debug:
     logging.basicConfig(level=logging.DEBUG)
@@ -39,30 +41,20 @@
   args_display = args
   if len(args_display) > 5:
     args_display = args_display[:5] + ["...more..."]
+  file_list = args
   logging.debug("Processing: %s, please be patient", args_display)
-  packages = [opencsw.CswSrv4File(x, options.debug) for x in args]
-  if options.catalog_file:
-    # Using cached md5sums to save time: injecting md5sums
-    # from the catalog.
-    catalog = opencsw.OpencswCatalog(options.catalog_file)
-    md5s_by_basename = catalog.GetDataByBasename()
-    for pkg in packages:
-      basename = os.path.basename(pkg.pkg_path)
-      # It might be the case that a file is present on disk, but missing from
-      # the catalog file.
-      if basename in md5s_by_basename:
-        pkg.md5sum = md5s_by_basename[basename]["md5sum"]
-  stats_list = [checkpkg.PackageStats(pkg) for pkg in packages]
-  md5s_by_basename = None # To free memory
-  catalog = None          # To free memory
-  del(packages)
+  stats_list = checkpkg.StatsListFromCatalog(
+      file_list, options.catalog, options.debug)
+  # Reversing the item order in the list, so that the pop() method can be used
+  # to get packages, and the order of processing still matches the one in the
+  # catalog file.
   stats_list.reverse()
   total_packages = len(stats_list)
   counter = itertools.count(1)
+  logging.info("Juicing the srv4 package stream files...")
   bar = progressbar.ProgressBar()
   bar.maxval = total_packages
   bar.start()
-  logging.info("Unpacking and examining the srv4 files needed.")
   while stats_list:
     # This way objects will get garbage collected as soon as they are removed
     # from the list by pop().  The destructor (__del__()) of the srv4 class

Modified: csw/mgar/gar/v2/bin/checkpkg_collect_stats_test.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_collect_stats_test.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/checkpkg_collect_stats_test.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -20,15 +20,6 @@
   def setUp(self):
     self.mocker = mox.Mox()
 
-  def testGetStatsPath(self):
-    mock_pkg = self.mocker.CreateMock(opencsw.CswSrv4File)
-    mock_pkg.GetMd5sum().AndReturn("abcdef")
-    self.mocker.ReplayAll()
-    sc = ccs.PackageStats(mock_pkg)
-    expected = "/home/joe/.checkpkg/stats/ab/abcdef"
-    self.assertEqual(expected, sc.GetStatsPath("/home/joe"))
-    self.mocker.VerifyAll()
 
-
 if __name__ == '__main__':
 	unittest.main()

Modified: csw/mgar/gar/v2/bin/checkpkg_inspect_stats.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_inspect_stats.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/checkpkg_inspect_stats.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -34,6 +34,8 @@
   pkgstats = [checkpkg.PackageStats(x) for x in srv4_pkgs]
   pkgstats = [x.GetAllStats() for x in pkgstats]
   if options.print_stats:
+    print "import datetime"
+    print "pkgstats = ",
     pprint.pprint(pkgstats)
   else:
     code.interact(local=locals())

Modified: csw/mgar/gar/v2/bin/checkpkg_run_modules.py
===================================================================
--- csw/mgar/gar/v2/bin/checkpkg_run_modules.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/checkpkg_run_modules.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -3,10 +3,13 @@
 
 """This script runs all the checks written in Python."""
 
+import datetime
 import logging
+import os
 import os.path
 import sys
 import re
+import cProfile
 
 CHECKPKG_MODULE_NAME = "Second checkpkg API version"
 
@@ -34,10 +37,8 @@
                                             options.debug)
   # Running the checks, reporting and exiting.
   exit_code, screen_report, tags_report = check_manager.Run()
-  f = open(options.output, "w")
-  f.write(tags_report)
-  f.close()
-  if screen_report:
+  screen_report = unicode(screen_report)
+  if not options.quiet and screen_report:
     sys.stdout.write(screen_report)
   else:
     logging.debug("No screen report.")
@@ -45,4 +46,11 @@
 
 
 if __name__ == '__main__':
-  main()
+  if "--profile" in sys.argv:
+    t_str = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M")
+    home = os.environ["HOME"]
+    cprof_file_name = os.path.join(
+        home, ".checkpkg", "run-modules-%s.cprof" % t_str)
+    cProfile.run("main()", sort=1, filename=cprof_file_name)
+  else:
+    main()

Added: csw/mgar/gar/v2/bin/pkgdb
===================================================================
--- csw/mgar/gar/v2/bin/pkgdb	                        (rev 0)
+++ csw/mgar/gar/v2/bin/pkgdb	2010-07-19 08:39:22 UTC (rev 10541)
@@ -0,0 +1 @@
+link ../lib/python/pkgdb.py
\ No newline at end of file


Property changes on: csw/mgar/gar/v2/bin/pkgdb
___________________________________________________________________
Added: svn:special
   + *

Modified: csw/mgar/gar/v2/bin/update_contents_cache.py
===================================================================
--- csw/mgar/gar/v2/bin/update_contents_cache.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/bin/update_contents_cache.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -24,6 +24,11 @@
       dest="debug",
       default=False,
       action="store_true")
+  parser.add_option("-p", "--profile",
+      dest="profile",
+      default=False,
+      action="store_true",
+      help="A disabled option")
   (options, args) = parser.parse_args()
   if options.debug:
     logging.basicConfig(level=logging.DEBUG)

Added: csw/mgar/gar/v2/lib/__init__.py
===================================================================
Added: csw/mgar/gar/v2/lib/python/__init__.py
===================================================================
Modified: csw/mgar/gar/v2/lib/python/checkpkg.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/checkpkg.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -32,8 +32,8 @@
 import tag
 
 DEBUG_BREAK_PKGMAP_AFTER = False
-DB_SCHEMA_VERSION = 4L
-PACKAGE_STATS_VERSION = 8L
+DB_SCHEMA_VERSION = 5L
+PACKAGE_STATS_VERSION = 9L
 SYSTEM_PKGMAP = "/var/sadm/install/contents"
 NEEDED_SONAMES = "needed sonames"
 RUNPATH = "runpath"
@@ -42,7 +42,7 @@
 CONFIG_DB_SCHEMA = "db_schema_version"
 DO_NOT_REPORT_SURPLUS = set([u"CSWcommon", u"CSWcswclassutils", u"CSWisaexec"])
 DO_NOT_REPORT_MISSING = set([])
-DO_NOT_REPORT_MISSING_RE = [r"SUNW.*", r"\*SUNW.*"]
+DO_NOT_REPORT_MISSING_RE = [r"\*?SUNW.*"]
 DUMP_BIN = "/usr/ccs/bin/dump"
 PSTAMP_RE = r"(?P<username>\w+)@(?P<hostname>[\w\.-]+)-(?P<timestamp>\d+)"
 DESCRIPTION_RE = r"^([\S]+) - (.*)$"
@@ -66,20 +66,11 @@
     "/lib",
 ]
 
+CONTENT_PKG_RE = r"^\*?(CSW|SUNW)[0-9a-zA-Z\-]?[0-9a-z\-]+$"
 
 # This shared library is present on Solaris 10 on amd64, but it's missing on
 # Solaris 8 on i386.  It's okay if it's missing.
 ALLOWED_ORPHAN_SONAMES = set([u"libm.so.2"])
-DEPENDENCY_FILENAME_REGEXES = (
-    (r".*\.pl$", u"CSWperl"),
-    (r".*\.pm$", u"CSWperl"),
-    (r".*\.py$", u"CSWpython"),
-    (r".*\.rb$", u"CSWruby"),
-    (r".*\.el$", u"CSWemacscommon"),
-    (r".*\.elc$", u"CSWemacscommon"),
-)
-# Compiling the regexes ahead of time.
-DEPENDENCY_FILENAME_REGEXES = tuple([(re.compile(x), y) for x, y in DEPENDENCY_FILENAME_REGEXES])
 
 REPORT_TMPL = u"""#if $missing_deps or $surplus_deps or $orphan_sonames
 Dependency issues of $pkgname:
@@ -170,19 +161,21 @@
 
 def GetOptions():
   parser = optparse.OptionParser()
-  parser.add_option("-b", dest="stats_basedir",
+  parser.add_option("-b", "--stats-basedir", dest="stats_basedir",
                     help=("The base directory with package statistics "
                           "in yaml format, e.g. ~/.checkpkg/stats"))
   parser.add_option("-d", "--debug", dest="debug",
                     default=False, action="store_true",
                     help="Turn on debugging messages")
-  parser.add_option("-o", "--output", dest="output",
-                    help="Output error tag file")
+  parser.add_option("-p", "--profile", dest="profile",
+                    default=False, action="store_true",
+                    help=("Turn on profiling"))
+  parser.add_option("-q", "--quiet", dest="quiet",
+                    default=False, action="store_true",
+                    help=("Print less messages"))
   (options, args) = parser.parse_args()
   if not options.stats_basedir:
     raise ConfigurationError("ERROR: the -b option is missing.")
-  if not options.output:
-    raise ConfigurationError("ERROR: the -o option is missing.")
   # Using set() to make the arguments unique.
   return options, set(args)
 
@@ -208,11 +201,16 @@
 
   CHECKPKG_DIR = ".checkpkg"
   SQLITE3_DBNAME_TMPL = "checkpkg-db-%(fqdn)s"
-  TABLES = (m.CswConfig,
-            m.CswFile,
-            m.CswPackage,
+  TABLES_THAT_NEED_UPDATES = (m.CswFile,)
+  TABLES = TABLES_THAT_NEED_UPDATES + (
+            m.Pkginst,
+            m.CswConfig,
             m.Srv4FileStats,
-            m.CheckpkgOverride)
+            m.CheckpkgOverride,
+            m.CheckpkgErrorTag,
+            m.Architecture,
+            m.OsRelease,
+            m.Maintainer)
   sqo_conn = None
   db_path = None
 
@@ -261,6 +259,7 @@
     Read it slowly the first time and cache it for later."""
     super(SystemPkgmap, self).__init__(debug=debug)
     self.cache = {}
+    self.pkgs_by_path_cache = {}
     self.file_mtime = None
     self.cache_mtime = None
     self.initialized = False
@@ -268,6 +267,8 @@
       self.system_pkgmap_files = [SYSTEM_PKGMAP]
     else:
       self.system_pkgmap_files = system_pkgmap_files
+    self.csw_pkg_re = re.compile(CONTENT_PKG_RE)
+    self.digits_re = re.compile(r"^[0-9]+$")
 
   def _LazyInitializeDatabase(self):
     if not self.initialized:
@@ -303,13 +304,20 @@
       need_to_create_tables = True
     if need_to_create_tables:
       self.CreateTables()
+      self.PerformInitialDataImport()
     if not self.IsDatabaseUpToDate():
-      logging.info("Rebuilding the package cache, can take a few minutes.")
-      self.PurgeDatabase()
-      self.PopulateDatabase()
+      logging.debug("Rebuilding the package cache, can take a few minutes.")
+      self.ClearTablesForUpdates()
+      self.RefreshDatabase()
     self.initialized = True
 
-  def PopulateDatabase(self):
+  def RefreshDatabase(self):
+    for pkgmap_path in self.system_pkgmap_files:
+      self._ProcessSystemPkgmap(pkgmap_path)
+    self.PopulatePackagesTable()
+    self.SetDatabaseMtime()
+
+  def PerformInitialDataImport(self):
     """Imports data into the database.
 
     Original bit of code from checkpkg:
@@ -343,8 +351,8 @@
       estimated_lines = break_after
     else:
       estimated_lines = contents_length / INSTALL_CONTENTS_AVG_LINE_LENGTH
-    # The progressbar library doesn't like to handle large numbers, and it
-    # displays up to 99% if we feed it a maxval in the range of hundreds of
+    # The progressbar library doesn't like handling larger numbers
+    # It displays up to 99% if we feed it a maxval in the range of hundreds of
     # thousands.
     progressbar_divisor = int(estimated_lines / 1000)
     if progressbar_divisor < 1:
@@ -357,7 +365,7 @@
     # soname - {<path1>: <line1>, <path2>: <line2>, ...}
     logging.debug("Building database cache db of the %s file",
                   pkgmap_path)
-    print "Processing %s" % pkgmap_path
+    logging.info("Processing %s, it can take a few minutes", pkgmap_path)
     count = itertools.count()
     bar = progressbar.ProgressBar()
     bar.maxval = estimated_lines / progressbar_divisor
@@ -380,7 +388,8 @@
       pkgmap_entry_dir, pkgmap_entry_base_name = os.path.split(pkgmap_entry_path)
       # The following SQLObject-driven inserts are 60 times slower than the raw
       # sqlite API.
-      # pkgmap_entry = m.CswFile(basename=pkgmap_entry_base_name, path=pkgmap_entry_dir, line=line.strip())
+      # pkgmap_entry = m.CswFile(basename=pkgmap_entry_base_name,
+      #                          path=pkgmap_entry_dir, line=line.strip())
       # This page has some hints:
       # http://www.mail-archive.com/sqlobject-discuss@lists.sourceforge.net/msg04641.html
       # "These are simple straightforward INSERTs without any additional
@@ -398,7 +407,7 @@
         break
     bar.finish()
     self.sqlite_conn.commit()
-    logging.info("All lines of %s were processed.", pkgmap_path)
+    logging.debug("All lines of %s were processed.", pkgmap_path)
 
   def _ParsePkginfoLine(self, line):
     fields = re.split(c.WS_RE, line)
@@ -418,15 +427,19 @@
     bar.start()
     count = itertools.count()
     INSERT_SQL = """
-    INSERT INTO csw_package (pkgname, pkg_desc)
+    INSERT INTO pkginst (pkgname, pkg_desc)
     VALUES (?, ?);
     """
+    # If self.GetInstalledPackages calls out to the initialization,
+    # the result is an infinite recursion.
+    installed_pkgs = self.GetInstalledPackages(initialize=False)
     for line in stdout.splitlines():
       pkgname, pkg_desc = self._ParsePkginfoLine(line)
-      # This is slow:
-      # pkg = m.CswPackage(pkgname=pkgname, pkg_desc=pkg_desc)
-      # This is much faster:
-      self.sqlite_conn.execute(INSERT_SQL, [pkgname, pkg_desc])
+      if pkgname not in installed_pkgs:
+        # This is slow:
+        # pkg = m.Pkginst(pkgname=pkgname, pkg_desc=pkg_desc)
+        # This is much faster:
+        self.sqlite_conn.execute(INSERT_SQL, [pkgname, pkg_desc])
       i = count.next()
       bar.update(i)
     # Need to commit, otherwise subsequent SQLObject calls will fail.
@@ -444,13 +457,13 @@
       res.getOne().float_value = mtime
 
   def SetDatabaseSchemaVersion(self):
-    res = m.CswConfig.select(m.CswConfig.q.option_key==CONFIG_DB_SCHEMA)
-    if res.count() == 0:
+    try:
+      config_option = m.CswConfig.select(
+          m.CswConfig.q.option_key==CONFIG_DB_SCHEMA).getOne()
+      config_option.int_value = DB_SCHEMA_VERSION
+    except sqlobject.main.SQLObjectNotFound, e:
       version = m.CswConfig(option_key=CONFIG_DB_SCHEMA,
                             int_value=DB_SCHEMA_VERSION)
-    else:
-      config_option = res.getOne()
-      config_option.int_value = DB_SCHEMA_VERSION
 
   def GetPkgmapLineByBasename(self, filename):
     """Returns pkgmap lines by basename:
@@ -459,12 +472,9 @@
         path2: line2,
       }
     """
-    self._LazyInitializeDatabase()
     if filename in self.cache:
       return self.cache[filename]
-    # sql = "SELECT path, line FROM systempkgmap WHERE basename = ?;"
-    # c = self.conn.cursor()
-    # c.execute(sql, [filename])
+    self._LazyInitializeDatabase()
     res = m.CswFile.select(m.CswFile.q.basename==filename)
     lines = {}
     for obj in res:
@@ -472,15 +482,26 @@
     if len(lines) == 0:
       logging.debug("Cache doesn't contain filename %s", filename)
     self.cache[filename] = lines
-    logging.debug("GetPkgmapLineByBasename(%s) --> %s",
-                  filename, lines)
     return lines
 
   def _InferPackagesFromPkgmapLine(self, line):
-    """A stub of a function, to be enhanced."""
+    """Given a pkgmap line, return all packages it contains."""
     line = line.strip()
     parts = re.split(c.WS_RE, line)
-    return [parts[-1]]
+    pkgs = []
+    if parts[1] == 'd':
+      parts = parts[6:]
+    while parts:
+      part = parts.pop()
+      if self.digits_re.match(part):
+        break
+      elif "none" == part:
+        break
+      pkgs.append(part)
+    # Make the packages appear in the same order as in the install/contents
+    # file.
+    pkgs.reverse()
+    return pkgs
 
   def GetPathsAndPkgnamesByBasename(self, filename):
     """Returns paths and packages by basename.
@@ -490,11 +511,35 @@
      "/opt/csw/1/lib": ["CSWfoomore"]}
     """
     lines = self.GetPkgmapLineByBasename(filename)
+    pkgs = {}
     # Infer packages
     for file_path in lines:
-      lines[file_path] = self._InferPackagesFromPkgmapLine(lines[file_path])
-    return lines
+      pkgs[file_path] = self._InferPackagesFromPkgmapLine(lines[file_path])
+    # self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libc.so.1').AndReturn({
+    #       "/usr/lib": (u"SUNWcsl",)})
+    logging.debug("self.error_mgr_mock.GetPathsAndPkgnamesByBasename(%s).AndReturn(%s)",
+                  repr(filename), pprint.pformat(pkgs))
+    return pkgs
 
+  def GetPkgByPath(self, full_path):
+    if full_path not in self.pkgs_by_path_cache:
+      self._LazyInitializeDatabase()
+      path, basename = os.path.split(full_path)
+      try:
+        obj = m.CswFile.select(
+            sqlobject.AND(
+              m.CswFile.q.path==path,
+              m.CswFile.q.basename==basename)).getOne()
+        self.pkgs_by_path_cache[full_path] = self._InferPackagesFromPkgmapLine(
+            obj.line)
+      except sqlobject.main.SQLObjectNotFound, e:
+        logging.debug("Couldn't find in the db: %s/%s", path, basename)
+        logging.debug(e)
+        self.pkgs_by_path_cache[full_path] = []
+    logging.debug("self.error_mgr_mock.GetPkgByPath(%s).AndReturn(%s)",
+                  repr(full_path), pprint.pformat(self.pkgs_by_path_cache[full_path]))
+    return self.pkgs_by_path_cache[full_path]
+
   def GetDatabaseMtime(self):
     if not self.cache_mtime:
       res = m.CswConfig.select(m.CswConfig.q.option_key==CONFIG_MTIME)
@@ -541,20 +586,25 @@
                   repr(good_version), repr(fresh))
     return fresh and good_version
 
+  def ClearTablesForUpdates(self):
+    for table in self.TABLES_THAT_NEED_UPDATES:
+      table.clearTable()
+
   def PurgeDatabase(self, drop_tables=False):
     if drop_tables:
       for table in self.TABLES:
         if table.tableExists():
           table.dropTable()
     else:
-      logging.info("Truncating all tables")
+      logging.debug("Truncating all tables")
       for table in self.TABLES:
         table.clearTable()
 
-  def GetInstalledPackages(self):
+  def GetInstalledPackages(self, initialize=True):
     """Returns a dictionary of all installed packages."""
-    self._LazyInitializeDatabase()
-    res = m.CswPackage.select()
+    if initialize:
+      self._LazyInitializeDatabase()
+    res = m.Pkginst.select()
     return dict([[str(x.pkgname), str(x.pkg_desc)] for x in res])
 
 
@@ -578,8 +628,6 @@
       isalist: isalist elements
       binary_path: Necessary to expand $ORIGIN
     """
-    # TODO: Implement $ORIGIN support
-    # Probably not here as it would make caching unusable.
     key = (runpath, tuple(isalist))
     if key not in self.runpath_expand_cache:
       origin_present = False
@@ -595,7 +643,8 @@
             binary_path = "/" + binary_path
           runpath = runpath.replace('$ORIGIN', binary_path)
       if '$ISALIST' in runpath:
-        expanded_list = [runpath.replace('$ISALIST', isa) for isa in isalist]
+        expanded_list  = [runpath.replace('/$ISALIST', '')]
+        expanded_list += [runpath.replace('$ISALIST', isa) for isa in isalist]
       else:
         expanded_list = [runpath]
       expanded_list = [os.path.abspath(p) for p in expanded_list]
@@ -609,8 +658,6 @@
   def ExpandSymlink(self, symlink, target, input_path):
     key = (symlink, target, input_path)
     if key not in self.symlink_expand_cache:
-      # A lot of time is spent here, e.g. 13841985 calls, 206s.
-      # TODO: Optimize this.  Make it a class and add a cache?
       symlink_re = re.compile(r"%s(/|$)" % symlink)
       if re.search(symlink_re, input_path):
         result = input_path.replace(symlink, target)
@@ -673,12 +720,12 @@
       # in the path_list.
       for expanded_p in expanded_p_list:
         original_paths_by_expanded_paths[expanded_p] = p
-    logging.debug("%s: looking for %s in %s",
-        soname, runpath_list, original_paths_by_expanded_paths.keys())
+    # logging.debug("%s: looking for %s in %s",
+    #     soname, runpath_list, original_paths_by_expanded_paths.keys())
     for runpath_expanded in runpath_list:
       if runpath_expanded in original_paths_by_expanded_paths:
-        logging.debug("Found %s",
-                      original_paths_by_expanded_paths[runpath_expanded])
+        # logging.debug("Found %s",
+        #               original_paths_by_expanded_paths[runpath_expanded])
         return original_paths_by_expanded_paths[runpath_expanded]
 
 
@@ -729,10 +776,7 @@
     self.packages = []
 
   def GetPackageStatsList(self):
-    stats_list = []
-    for md5sum in self.md5sum_list:
-      stats_list.append(PackageStats(None, self.stats_basedir, md5sum))
-    return stats_list
+    return [PackageStats(None, self.stats_basedir, x) for x in self.md5sum_list]
 
   def FormatReports(self, errors, messages, gar_lines):
     namespace = {
@@ -745,9 +789,7 @@
     }
     screen_t = Template.Template(SCREEN_ERROR_REPORT_TMPL, searchList=[namespace])
     tags_report_t = Template.Template(TAG_REPORT_TMPL, searchList=[namespace])
-    screen_report = unicode(screen_t)
-    tags_report = unicode(tags_report_t)
-    return screen_report, tags_report
+    return screen_t, tags_report_t
 
   def SetErrorsToDict(self, set_errors, a_dict):
     # These were generated by a set, but are likely to be bound to specific
@@ -765,7 +807,7 @@
     return errors
 
   def GetOptimizedAllStats(self, stats_obj_list):
-    logging.info("Loading package statistics.")
+    logging.info("Unwrapping candies...")
     pkgs_data = []
     counter = itertools.count()
     length = len(stats_obj_list)
@@ -795,7 +837,59 @@
     Returns a tuple of an exit code and a report.
     """
     packages_data = self.GetPackageStatsList()
+    db_stat_objs_by_pkgname = {}
+    obj_id_list = []
+    for pkg in packages_data:
+      db_obj = pkg.GetDbObject()
+      db_stat_objs_by_pkgname[db_obj.pkginst.pkgname] = db_obj
+      obj_id_list.append(db_obj.id)
+    logging.debug("Deleting old %s errors from the database.",
+                  db_obj.pkginst.pkgname)
+    conn = sqlobject.sqlhub.processConnection
+    # It's the maximum number of ORs in a SQL statement.
+    # Slicing the long list up into s-sized segments.  1000 is too much.
+    obj_id_lists = SliceList(obj_id_list, 900)
+    for obj_id_list in obj_id_lists:
+      # WARNING: This is raw SQL, potentially breaking during a transition to
+      # another db.  It's here for efficiency.
+      sql = ("DELETE FROM checkpkg_error_tag WHERE %s;"
+             % " OR ".join("srv4_file_id = %s" % x for x in obj_id_list))
+      conn.query(sql)
+    # Need to construct the predicate by hand.  Otherwise:
+    # File "/opt/csw/lib/python/site-packages/sqlobject/sqlbuilder.py",
+    # line 829, in OR
+    # return SQLOp("OR", op1, OR(*ops))
+    # RuntimeError: maximum recursion depth exceeded while calling a Python object
+    #
+    # The following also tries to use recursion and fails.
+    # delete_predicate = sqlobject.OR(False)
+    # for pred in delete_predicate_list:
+    #   delete_predicate = sqlobject.OR(delete_predicate, pred)
+    # conn.query(
+    #     conn.sqlrepr(sqlbuilder.Delete(m.CheckpkgErrorTag.sqlmeta.table,
+    #       delete_predicate
+    #     )))
+      # res = m.CheckpkgErrorTag.select(m.CheckpkgErrorTag.q.srv4_file==db_obj)
+      # for obj in res:
+      #   obj.destroySelf()
     errors, messages, gar_lines = self.GetAllTags(packages_data)
+    no_errors = len(errors) + 1
+    bar = progressbar.ProgressBar()
+    bar.maxval = no_errors
+    count = itertools.count(1)
+    logging.info("Stuffing the candies under the pillow...")
+    bar.start()
+    for pkgname, es in errors.iteritems():
+      logging.debug("Saving %s errors to the database.", pkgname)
+      for e in es:
+        db_error = m.CheckpkgErrorTag(srv4_file=db_stat_objs_by_pkgname[e.pkgname],
+                                      pkgname=e.pkgname,
+                                      tag_name=e.tag_name,
+                                      tag_info=e.tag_info,
+                                      msg=e.msg)
+      bar.update(count.next())
+    bar.finish()
+    flat_error_list = reduce(operator.add, errors.values(), [])
     screen_report, tags_report = self.FormatReports(errors, messages, gar_lines)
     exit_code = 0
     return (exit_code, screen_report, tags_report)
@@ -817,19 +911,17 @@
     else:
       self.lines_dict = {}
 
-  def GetPkgmapLineByBasename(self, basename):
-    """Proxies calls to self.system_pkgmap."""
-    logging.warning("GetPkgmapLineByBasename(%s): deprecated function",
-                    basename)
-    return self.system_pkgmap.GetPkgmapLineByBasename(basename)
-
   def GetPathsAndPkgnamesByBasename(self, basename):
     """Proxies calls to self.system_pkgmap."""
     return self.system_pkgmap.GetPathsAndPkgnamesByBasename(basename)
 
-  def GetInstalledPackages(self):
-    return self.system_pkgmap.GetInstalledPackages()
+  def GetPkgByPath(self, path):
+    """Proxies calls to self.system_pkgmap."""
+    return self.system_pkgmap.GetPkgByPath(path)
 
+  def GetInstalledPackages(self, initialize=True):
+    return self.system_pkgmap.GetInstalledPackages(initialize)
+
   def _GetPathsForArch(self, arch):
     if not arch in self.lines_dict:
       file_name = os.path.join(
@@ -866,6 +958,8 @@
     self.errors = []
 
   def ReportError(self, tag_name, tag_info=None, msg=None):
+    logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s)",
+                  repr(tag_name), repr(tag_info), repr(msg))
     checkpkg_tag = tag.CheckpkgTag(self.pkgname, tag_name, tag_info, msg=msg)
     self.errors.append(checkpkg_tag)
 
@@ -878,6 +972,9 @@
     self.errors = []
 
   def ReportError(self, pkgname, tag_name, tag_info=None, msg=None):
+    logging.debug("self.error_mgr_mock.ReportError(%s, %s, %s, %s)",
+                  repr(pkgname),
+                  repr(tag_name), repr(tag_info), repr(msg))
     checkpkg_tag = tag.CheckpkgTag(pkgname, tag_name, tag_info, msg=msg)
     self.errors.append(checkpkg_tag)
 
@@ -890,13 +987,16 @@
     self.gar_lines = []
 
   def Message(self, m):
+    logging.debug("self.messenger.Message(%s)", repr(m))
     self.messages.append(m)
 
   def OneTimeMessage(self, key, m):
+    logging.debug("self.messenger.OneTimeMessage(%s, %s)", repr(key), repr(m))
     if key not in self.one_time_messages:
       self.one_time_messages[key] = m
 
   def SuggestGarLine(self, m):
+    logging.debug("self.messenger.SuggestGarLine(%s)", repr(m))
     self.gar_lines.append(m)
 
 
@@ -941,7 +1041,7 @@
     count = itertools.count()
     bar = progressbar.ProgressBar()
     bar.maxval = len(pkgs_data) * len(self.individual_checks)
-    logging.info("Running individual checks.")
+    logging.info("Tasting candies one by one...")
     bar.start()
     for pkg_data in pkgs_data:
       pkgname = pkg_data["basic_stats"]["pkgname"]
@@ -955,7 +1055,7 @@
         bar.update(count.next())
     bar.finish()
     # Set checks
-    logging.info("Running set checks.")
+    logging.info("Tasting them all at once...")
     for function in self.set_checks:
       logger = logging.getLogger(function.__name__)
       check_interface = SetCheckInterface(pkgmap)
@@ -989,31 +1089,12 @@
   instantiated object.
   TODO: Store overrides in a separate table for performance.
   """
-  # This list needs to be synchronized with the CollectStats() method.
-  STAT_FILES = [
-      "bad_paths",
-      "binaries",
-      "binaries_dump_info",
-      # "defined_symbols",
-      "depends",
-      "isalist",
-      # "ldd_dash_r",
-      "overrides",
-      "pkgchk",
-      "pkginfo",
-      "pkgmap",
-      # This entry needs to be last because of the assumption in the
-      # CollectStats() function.
-      "basic_stats",
-      "files_metadata",
-  ]
 
   def __init__(self, srv4_pkg, stats_basedir=None, md5sum=None, debug=False):
     super(PackageStats, self).__init__(debug=debug)
     self.srv4_pkg = srv4_pkg
     self.md5sum = md5sum
     self.dir_format_pkg = None
-    self.stats_path = None
     self.all_stats = {}
     self.stats_basedir = stats_basedir
     self.db_pkg_stats = None
@@ -1037,14 +1118,6 @@
       self.md5sum = self.srv4_pkg.GetMd5sum()
     return self.md5sum
 
-  def GetStatsPath(self):
-    if not self.stats_path:
-      md5sum = self.GetMd5sum()
-      two_chars = md5sum[0:2]
-      parts = [self.stats_basedir, two_chars, md5sum]
-      self.stats_path = os.path.join(*parts)
-    return self.stats_path
-
   def GetDbObject(self):
     if not self.db_pkg_stats:
       md5_sum = self.GetMd5sum()
@@ -1078,9 +1151,8 @@
       self.dir_format_pkg = self.srv4_pkg.GetDirFormatPkg()
     return self.dir_format_pkg
 
-  def MakeStatsDir(self):
-    stats_path = self.GetStatsPath()
-    self._MakeDirP(stats_path)
+  def GetMtime(self):
+    return self.srv4_pkg.GetMtime()
 
   def _MakeDirP(self, dir_path):
     """mkdir -p equivalent.
@@ -1125,6 +1197,7 @@
         basic_stats["pkg_basename"])
     basic_stats["pkgname"] = dir_pkg.pkgname
     basic_stats["catalogname"] = dir_pkg.GetCatalogname()
+    basic_stats["md5_sum"] = self.GetMd5sum()
     return basic_stats
 
   def GetOverrides(self):
@@ -1216,18 +1289,14 @@
 
   def CollectStats(self, force=False):
     """Lazy stats collection."""
-    if force:
+    if force or not self.StatsExist():
       return self._CollectStats()
-    if not self.StatsExist():
-      return self._CollectStats()
     return self.ReadSavedStats()
 
   def _CollectStats(self):
     """The list of variables needs to be synchronized with the one
     at the top of this class.
     """
-    stats_path = self.GetStatsPath()
-    self.MakeStatsDir()
     dir_pkg = self.GetDirFormatPkg()
     logging.debug("Collecting %s package statistics.", repr(dir_pkg.pkgname))
     override_dicts = self.GetOverrides()
@@ -1243,11 +1312,67 @@
         "bad_paths": dir_pkg.GetFilesContaining(BAD_CONTENT_REGEXES),
         "basic_stats": self.GetBasicStats(),
         "files_metadata": dir_pkg.GetFilesMetadata(),
+        "mtime": self.GetMtime(),
     }
-    db_pkg_stats = m.Srv4FileStats(md5_sum=self.GetMd5sum(),
-                                   pkgname=pkg_stats["basic_stats"]["pkgname"],
-                                   stats_version=PACKAGE_STATS_VERSION,
-                                   data=cPickle.dumps(pkg_stats))
+    pkgname = pkg_stats["basic_stats"]["pkgname"]
+    # Getting sqlobject representations.
+    try:
+      pkginst = m.Pkginst.select(m.Pkginst.q.pkgname==pkgname).getOne()
+    except sqlobject.main.SQLObjectNotFound, e:
+      logging.debug(e)
+      pkginst = m.Pkginst(pkgname=pkgname)
+    try:
+      res = m.Architecture.select(
+          m.Architecture.q.name==pkg_stats["pkginfo"]["ARCH"])
+      arch = res.getOne()
+    except sqlobject.main.SQLObjectNotFound, e:
+      logging.debug(e)
+      arch = m.Architecture(name=pkg_stats["pkginfo"]["ARCH"])
+    parsed_basename = pkg_stats["basic_stats"]["parsed_basename"]
+    os_rel_name = parsed_basename["osrel"]
+    try:
+      os_rel = m.OsRelease.select(
+          m.OsRelease.q.short_name==os_rel_name).getOne()
+    except sqlobject.main.SQLObjectNotFound, e:
+      logging.debug(e)
+      os_rel = m.OsRelease(short_name=os_rel_name, full_name=os_rel_name)
+    try:
+      maint_email = pkg_stats["pkginfo"]["EMAIL"]
+      maintainer = m.Maintainer.select(
+          m.Maintainer.q.email==maint_email).getOne()
+    except sqlobject.main.SQLObjectNotFound, e:
+      logging.debug(e)
+      maintainer = m.Maintainer(email=maint_email)
+
+    # If there are any previous records of the same pkginst, arch and os_rel,
+    # we're marking them as not-latest.
+    # This assumes that the packages are examined in a chronological order.
+    res = m.Srv4FileStats.select(sqlobject.AND(
+        m.Srv4FileStats.q.pkginst==pkginst,
+        m.Srv4FileStats.q.arch==arch,
+        m.Srv4FileStats.q.os_rel==os_rel))
+    for obj in res:
+      obj.latest = False
+
+    rev=None
+    if "revision_info" in parsed_basename:
+      if "REV" in parsed_basename["revision_info"]:
+        rev = parsed_basename["revision_info"]["REV"]
+    # Creating the object in the database.
+    db_pkg_stats = m.Srv4FileStats(
+        md5_sum=self.GetMd5sum(),
+        pkginst=pkginst,
+        catalogname=pkg_stats["basic_stats"]["catalogname"],
+        stats_version=PACKAGE_STATS_VERSION,
+        os_rel=os_rel,
+        arch=arch,
+        basename=pkg_stats["basic_stats"]["pkg_basename"],
+        maintainer=maintainer,
+        latest=True,
+        version_string=parsed_basename["full_version_string"],
+        rev=rev,
+        mtime=self.GetMtime(),
+        data=cPickle.dumps(pkg_stats))
     # Inserting overrides as rows into the database
     for override_dict in override_dicts:
       o = m.CheckpkgOverride(srv4_file=db_pkg_stats,
@@ -1288,6 +1413,13 @@
       override_list.append(overrides.Override(**d))
     return override_list
 
+  def GetSavedErrorTags(self):
+    pkg_stats = self.GetDbObject()
+    res = m.CheckpkgErrorTag.select(m.CheckpkgErrorTag.q.srv4_file==pkg_stats)
+    tag_list = [tag.CheckpkgTag(x.pkgname, x.tag_name, x.tag_info, x.msg)
+                for x in res]
+    return tag_list
+
   def ReadSavedStats(self):
     if not self.all_stats:
       md5_sum = self.GetMd5sum()
@@ -1378,3 +1510,24 @@
     pkgname, tag_name, tag_info = tag.ParseTagLine(line)
     error_tags.append(tag.CheckpkgTag(pkgname, tag_name, tag_info))
   return error_tags
+
+
+def StatsListFromCatalog(file_name_list, catalog_file_name=None, debug=False):
+  packages = [opencsw.CswSrv4File(x, debug) for x in file_name_list]
+  if catalog_file_name:
+    catalog = opencsw.OpencswCatalog(catalog_file_name)
+    md5s_by_basename = catalog.GetDataByBasename()
+    for pkg in packages:
+      basename = os.path.basename(pkg.pkg_path)
+      # It might be the case that a file is present on disk, but missing from
+      # the catalog file.
+      if basename in md5s_by_basename:
+        pkg.md5sum = md5s_by_basename[basename]["md5sum"]
+  stats_list = [PackageStats(pkg) for pkg in packages]
+  return stats_list
+
+def SliceList(l, size):
+  """Trasforms a list into a list of lists."""
+  idxes = xrange(0, len(l), size)
+  sliced = [l[i:i+size] for i in idxes]
+  return sliced

Modified: csw/mgar/gar/v2/lib/python/checkpkg_test.py
===================================================================
--- csw/mgar/gar/v2/lib/python/checkpkg_test.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/checkpkg_test.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -44,14 +44,15 @@
   def testExpandRunpath_1(self):
     isalist = ["foo", "bar"]
     runpath = "/opt/csw/lib/$ISALIST"
-    expected = ["/opt/csw/lib/foo", "/opt/csw/lib/bar"]
+    expected = ["/opt/csw/lib", "/opt/csw/lib/foo", "/opt/csw/lib/bar"]
     bin_path = "opt/csw/lib"
     self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
 
   def testExpandRunpath_2(self):
     isalist = ["foo", "bar"]
     runpath = "/opt/csw/mysql5/lib/$ISALIST/mysql"
-    expected = ["/opt/csw/mysql5/lib/foo/mysql",
+    expected = ["/opt/csw/mysql5/lib/mysql",
+                "/opt/csw/mysql5/lib/foo/mysql",
                 "/opt/csw/mysql5/lib/bar/mysql"]
     bin_path = "opt/csw/lib"
     self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
@@ -93,9 +94,22 @@
     runpath = "$ORIGIN"
     expected = ["/opt/csw/lib"]
     bin_path = "opt/csw/lib"
-    self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
+    self.assertEquals(expected,
+                      self.e.ExpandRunpath(runpath, isalist, bin_path))
     expected = ["/opt/csw/foo/lib"]
     bin_path = "/opt/csw/foo/lib"
+    self.assertEquals(expected,
+                      self.e.ExpandRunpath(runpath, isalist, bin_path))
+
+  def testExpandRunpath_OnlyIsalist(self):
+    """Make sure that the cache doesn't mess it up.
+
+    Two invocations, where the only difference is the binary path.
+    """
+    isalist = ("bar",)
+    runpath = "/opt/csw/lib/$ISALIST"
+    expected = ["/opt/csw/lib", "/opt/csw/lib/bar"]
+    bin_path = "opt/csw/lib"
     self.assertEquals(expected, self.e.ExpandRunpath(runpath, isalist, bin_path))
 
   def testEmulate64BitSymlinks_1(self):
@@ -233,7 +247,38 @@
     spkgmap = checkpkg.SystemPkgmap()
     self.assertEqual(expected, spkgmap._ParsePkginfoLine(line))
 
+  def test_InferPackagesFromPkgmapLine(self):
+    line = ("/opt/csw/sbin d none 0755 root bin CSWfping CSWbonobo2 "
+            "CSWkrb5libdev CSWsasl CSWschilybase CSWschilyutils CSWstar "
+            "CSWcommon CSWcacertificates CSWfacter")
+    expected = ["CSWfping", "CSWbonobo2", "CSWkrb5libdev", "CSWsasl",
+                "CSWschilybase", "CSWschilyutils", "CSWstar", "CSWcommon",
+                "CSWcacertificates", "CSWfacter"]
+    spkgmap = checkpkg.SystemPkgmap()
+    self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
 
+  def test_InferPackagesFromPkgmapLine_2(self):
+    line = ("/usr/lib/sparcv9/libpthread.so.1 f none 0755 root bin 41296 28258 "
+            "1018129099 SUNWcslx")
+    expected = ["SUNWcslx"]
+    spkgmap = checkpkg.SystemPkgmap()
+    self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+  def test_InferPackagesFromPkgmapLine_3(self):
+    line = ("/usr/lib/libCrun.so.1 f none 0755 root bin 63588 "
+            "6287 1256043984 SUNWlibC")
+    expected = ["SUNWlibC"]
+    spkgmap = checkpkg.SystemPkgmap()
+    self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+  def test_InferPackagesFromPkgmapLine_4(self):
+    line = ("/opt/csw/apache2/lib/libapr-1.so.0=libapr-1.so.0.3.8 s none "
+            "CSWapache2rt")
+    expected = ["CSWapache2rt"]
+    spkgmap = checkpkg.SystemPkgmap()
+    self.assertEqual(expected, spkgmap._InferPackagesFromPkgmapLine(line))
+
+
 class PackageStatsUnitTest(unittest.TestCase):
 
   def setUp(self):
@@ -384,7 +429,7 @@
     }
     screen_report, tags_report = m.FormatReports(tags, [], [])
     expected = u'# Tags reported by testname module\nCSWfoo: foo-tag foo-info\n'
-    self.assertEqual(expected, tags_report)
+    self.assertEqual(expected, unicode(tags_report))
 
   def test_2(self):
     m = checkpkg.CheckpkgManager2("testname", "/tmp", ["CSWfoo"])
@@ -400,9 +445,23 @@
                 u'CSWfoo: foo-tag foo-info\n'
                 u'CSWfoo: bar-tag bar-info\n'
                 u'CSWfoo: baz-tag\n')
-    self.assertEqual(expected, tags_report)
+    self.assertEqual(expected, unicode(tags_report))
 
 
+class SliceListUnitTest(unittest.TestCase):
 
+  def testOne(self):
+    l = [1, 2, 3, 4, 5]
+    s = 1
+    expected = [[1], [2], [3], [4], [5]]
+    self.assertTrue(expected, checkpkg.SliceList(l, s))
+
+  def testTwo(self):
+    l = [1, 2, 3, 4, 5]
+    s = 2
+    expected = [[1, 2], [3, 4], [5]]
+    self.assertTrue(expected, checkpkg.SliceList(l, s))
+
+
 if __name__ == '__main__':
   unittest.main()

Modified: csw/mgar/gar/v2/lib/python/dependency_checks.py
===================================================================
--- csw/mgar/gar/v2/lib/python/dependency_checks.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/dependency_checks.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -8,8 +8,6 @@
     ("/opt/csw/lib", "libdb-4.7.so", "Deprecated Berkeley DB location"),
     ("/opt/csw/lib/mysql", "libmysqlclient_r.so.15",
      "Please use /opt/csw/mysql5/..."),
-    ("/opt/csw/lib/sparcv9/mysql", "libmysqlclient_r.so.15",
-     "Please use /opt/csw/mysql5/..."),
     ("/opt/csw/lib/mysql", "libmysqlclient.so.15",
      "Please use /opt/csw/mysql5/..."),
 )
@@ -18,7 +16,33 @@
     r'^opt/csw/lib/python/site-packages.*',
 )
 
-def Libraries(pkg_data, error_mgr, logger, messenger, path_and_pkg_by_basename):
+DEPENDENCY_FILENAME_REGEXES = (
+    (r".*\.pl$",   (u"CSWperl",)),
+    (r".*\.pm$",   (u"CSWperl",)),
+    (r".*\.py$",   (u"CSWpython",)),
+    (r".*\.rb$",   (u"CSWruby",)),
+    (r".*\.elc?$", (u"CSWemacscommon",)),
+)
+
+PREFERRED_DIRECTORY_PROVIDERS = set([u"CSWcommon"])
+
+def Libraries(pkg_data, error_mgr, logger, messenger, path_and_pkg_by_basename,
+              pkg_by_path):
+  """Checks shared libraries.
+
+  Returns:
+    [
+      (u"CSWfoo", "why is this needed"),
+    ]
+
+  New idea, a list of reasons:
+    [
+      [(u"CSWfoo", "why is it needed"),
+       (u"CSWfooalt", "there must be a reason"),
+       (u"CSWfooyetanother", "here's another one")],
+      [(u"CSWbar", "this serves another purpose")],
+    ]
+  """
   pkgname = pkg_data["basic_stats"]["pkgname"]
   logger.debug("Libraries(): pkgname = %s", repr(pkgname))
   orphan_sonames = []
@@ -30,14 +54,15 @@
     for soname in binary_info["needed sonames"]:
       resolved = False
       path_list = path_and_pkg_by_basename[soname].keys()
-      logger.debug("%s @ %s: looking for %s in %s",
-                   soname,
-                   binary_info["path"],
-                   binary_info["runpath"],
-                   path_list)
+      # logger.debug("%s @ %s: looking for %s in %s",
+      #              soname,
+      #              binary_info["path"],
+      #              binary_info["runpath"],
+      #              path_list)
       runpath_tuple = (tuple(binary_info["runpath"])
                       + tuple(checkpkg.SYS_DEFAULT_RUNPATH))
       runpath_history = []
+      alternative_deps = set()
       for runpath in runpath_tuple:
         runpath = ldd_emulator.SanitizeRunpath(runpath)
         runpath_list = ldd_emulator.ExpandRunpath(runpath, isalist, binary_path)
@@ -50,14 +75,11 @@
                                                    path_list,
                                                    binary_path)
         if resolved_path:
-          logger.debug("%s needed by %s:",
-                 soname, binary_info["path"])
-          logger.debug("=> %s provided by %s",
-              resolved_path, path_and_pkg_by_basename[soname][resolved_path])
           resolved = True
-          req_pkg = path_and_pkg_by_basename[soname][resolved_path][-1]
+          req_pkgs = path_and_pkg_by_basename[soname][resolved_path]
           reason = ("provides %s/%s needed by %s"
                     % (resolved_path, soname, binary_info["path"]))
+          # Looking for deprecated libraries.
           for bad_path, bad_soname, msg in DEPRECATED_LIBRARY_LOCATIONS:
             if resolved_path == bad_path and soname == bad_soname:
               logger.debug("Bad lib found: %s/%s", bad_path, bad_soname)
@@ -66,8 +88,10 @@
                   "deprecated-library",
                   ("%s %s %s/%s"
                    % (binary_info["path"], msg, resolved_path, soname)))
-          required_deps.append((req_pkg, reason))
-          break
+          for req_pkg in req_pkgs:
+            alternative_deps.add((req_pkg, reason))
+      # print "alternative_deps:", alternative_deps
+      required_deps.append(list(alternative_deps))
       if not resolved:
         orphan_sonames.append((soname, binary_info["path"]))
         if path_list:
@@ -87,16 +111,109 @@
   # TODO: Report orphan sonames here
   return required_deps
 
-def ByFilename(pkg_data, error_mgr, logger, messenger, path_and_pkg_by_basename):
+def ByFilename(pkg_data, error_mgr, logger, messenger,
+               path_and_pkg_by_basename, pkg_by_path):
   pkgname = pkg_data["basic_stats"]["pkgname"]
+  reason_group = []
   req_pkgs_reasons = []
   dep_regexes = [(re.compile(x), x, y)
-                 for x, y in checkpkg.DEPENDENCY_FILENAME_REGEXES]
-  for regex, regex_str, dep_pkgname in dep_regexes:
+                 for x, y in DEPENDENCY_FILENAME_REGEXES]
+  for regex, regex_str, dep_pkgnames in dep_regexes:
     for pkgmap_entry in pkg_data["pkgmap"]:
       if pkgmap_entry["path"] and regex.match(pkgmap_entry["path"]):
         msg = ("found file(s) matching %s, e.g. %s"
                % (regex_str, repr(pkgmap_entry["path"])))
-        req_pkgs_reasons.append((dep_pkgname, msg))
+        for dep_pkgname in dep_pkgnames:
+          reason_group.append((dep_pkgname, msg))
         break
+    if reason_group:
+      req_pkgs_reasons.append(reason_group)
+      reason_group = []
   return req_pkgs_reasons
+
+def ByDirectory(pkg_data, error_mgr, logger, messenger,
+                path_and_pkg_by_basename, pkg_by_path):
+  """Finds packages that provide each directory's parent.
+
+  1. For each directory
+    1.1. Find the parent
+    1.2. Add the parent to the list of packages to depend on.
+  """
+  pkgname = pkg_data["basic_stats"]["pkgname"]
+  req_pkgs_reasons = []
+  needed_dirs = set()
+  # Adding base dirs of all the files to the dirs that need to be checked.
+  for pkgmap_entry in pkg_data["pkgmap"]:
+    if "path" in pkgmap_entry and pkgmap_entry["path"]:
+      base_dir, dirname = os.path.split(pkgmap_entry["path"])
+      needed_dirs.add(base_dir)
+  for needed_dir in needed_dirs:
+    reason_group = []
+    # TODO: The preferred directory providers should not depend on other packages to
+    # provide directories.
+    if pkgname not in PREFERRED_DIRECTORY_PROVIDERS:
+      # If the path is provided by CSWcommon or other preferred package, don't
+      # mention other packages.
+      pkgs_to_mention = []
+      preferred_mentioned = False
+      for preferred_pkg in PREFERRED_DIRECTORY_PROVIDERS:
+        if preferred_pkg in pkg_by_path[needed_dir]:
+          pkgs_to_mention.append(preferred_pkg)
+          preferred_mentioned = True
+      if not preferred_mentioned:
+        if not pkg_by_path[needed_dir]:
+          # There's no sense in reporting '/' and ''.
+          if needed_dir and needed_dir != '/':
+            error_mgr.ReportError(pkgname, "base-dir-not-found", repr(needed_dir))
+        elif len(pkg_by_path[needed_dir]) < 5:
+          pkgs_to_mention = pkg_by_path[needed_dir]
+        else:
+          pkgs_to_mention = pkg_by_path[needed_dir][:5] + ["and/or others"]
+      msg = (u"%s provides directory %s is needed by the package %s"
+             % (pkgs_to_mention, needed_dir, pkgname))
+      for pkg_to_mention in pkgs_to_mention:
+        reason_group.append((pkg_to_mention, msg))
+      if reason_group:
+        req_pkgs_reasons.append(reason_group)
+    else:
+      error_mgr.ReportError(pkgname, "base-dir-not-provided-by-any-package", needed_dir)
+  return req_pkgs_reasons
+
+
+def GetPathAndPkgByBasename(error_mgr, logger, basenames,
+                            path_and_pkg_by_basename=None):
+  """{"<basename>": {"/path/1": ["CSWfoo1"], "/path/2": ["CSWfoo2"]}}"""
+  if not path_and_pkg_by_basename:
+    path_and_pkg_by_basename = {}
+  for basename in basenames:
+    path_and_pkg_by_basename[basename] = (
+        error_mgr.GetPathsAndPkgnamesByBasename(basename))
+  return path_and_pkg_by_basename
+
+def GetPkgByFullPath(error_mgr, logger, paths_to_verify, pkg_by_path):
+  """Resolves a list of paths to a mapping between paths and packages.
+
+  Returns: {"/opt/csw/lib": ["CSWcommon", "CSWfoo"]}
+  """
+  if not pkg_by_path:
+    pkg_by_path = {}
+  for path in paths_to_verify:
+    if path not in pkg_by_path:
+      result = error_mgr.GetPkgByPath(path)
+      # logger.warning("error_mgr.GetPkgByPath(%s) => %s", repr(path), repr(result))
+      pkg_by_path[path] = result
+  # logger.warning("New paths: %s" % pprint.pformat(pkg_by_path))
+  return pkg_by_path
+
+def MissingDepsFromReasonGroups(reason_groups, declared_deps_set):
+  missing_dep_groups = []
+  for reason_group in reason_groups:
+    dependency_fulfilled = False
+    pkgnames = [x for x, y in reason_group]
+    for pkgname in pkgnames:
+      if pkgname in declared_deps_set:
+        dependency_fulfilled = True
+        break
+    if not dependency_fulfilled:
+      missing_dep_groups.append(pkgnames)
+  return missing_dep_groups

Added: csw/mgar/gar/v2/lib/python/dependency_checks_test.py
===================================================================
--- csw/mgar/gar/v2/lib/python/dependency_checks_test.py	                        (rev 0)
+++ csw/mgar/gar/v2/lib/python/dependency_checks_test.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -0,0 +1,334 @@
+#!/opt/csw/bin/python2.6
+
+import checkpkg
+import copy
+import mox
+import unittest
+import pprint
+import dependency_checks as depchecks
+from testdata import stubs
+from testdata.tree_stats import pkgstats as tree_stats
+from testdata.sudo_stats import pkgstats as sudo_stats
+from testdata.javasvn_stats import pkgstats as javasvn_stats
+
+
+class TestGetPkgByFullPath(unittest.TestCase):
+
+  def testOneCall(self):
+    path_list = ["/foo", "/foo/bar"]
+    pkg_by_path = {"/foo": ["CSWfoo"]}
+    expected = {'/foo': ['CSWfoo'], '/foo/bar': ['CSWbar']}
+    self.mocker = mox.Mox()
+    self.error_mgr_mock = self.mocker.CreateMock(
+        checkpkg.SetCheckInterface)
+    self.error_mgr_mock.GetPkgByPath('/foo/bar').AndReturn(["CSWbar"])
+    self.mocker.ReplayAll()
+    logger_stub = stubs.LoggerStub()
+    self.assertEqual(
+        expected,
+        depchecks.GetPkgByFullPath(self.error_mgr_mock,
+                                   logger_stub,
+                                   path_list,
+                                   pkg_by_path))
+    self.mocker.VerifyAll()
+
+  def testDodgyCall(self):
+    paths_to_verify = set(
+     ['/opt/csw/bin',
+      '/opt/csw/bin/bar',
+      '/opt/csw/lib',
+      '/opt/csw/lib/libfoo.so.1'])
+    pkg_by_path = {'/opt/csw/bin/bar': ['CSWbar'],
+                   '/opt/csw/lib/libfoo.so.1': ['CSWbar']}
+    self.mocker = mox.Mox()
+    self.error_mgr_mock = self.mocker.CreateMock(
+        checkpkg.SetCheckInterface)
+    self.error_mgr_mock.GetPkgByPath('/opt/csw/lib').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath('/opt/csw/bin').AndReturn(["CSWcommon"])
+    self.mocker.ReplayAll()
+    logger_stub = stubs.LoggerStub()
+    expected = {
+        '/opt/csw/bin': [u'CSWcommon'],
+        '/opt/csw/bin/bar': ['CSWbar'],
+        '/opt/csw/lib': [u'CSWcommon'],
+        '/opt/csw/lib/libfoo.so.1': ['CSWbar']}
+    self.assertEqual(
+        expected,
+        depchecks.GetPkgByFullPath(self.error_mgr_mock,
+                                   logger_stub,
+                                   paths_to_verify,
+                                   pkg_by_path))
+    self.mocker.VerifyAll()
+
+
+class TestByDirectory(unittest.TestCase):
+
+  def setUp(self):
+    self.mocker = mox.Mox()
+    self.logger_stub = stubs.LoggerStub()
+    self.messenger_stub = stubs.MessengerStub()
+    self.error_mgr_mock = self.mocker.CreateMock(
+        checkpkg.SetCheckInterface)
+    self.pkg_data = copy.deepcopy(tree_stats[0])
+
+  def testByDirectory_1(self):
+    path_and_pkg_by_basename = {
+         'libc.so.1': {u'/usr/lib': [u'SUNWcsl'],
+                       u'/usr/lib/libp/sparcv9': [u'SUNWdplx'],
+                       u'/usr/lib/sparcv9': [u'SUNWcslx']},
+         'license': {'/opt/csw/share/doc/tree': ['CSWtree']},
+         'man1': {'/opt/csw/share/man': ['CSWtree']},
+         'tree': {'/opt/csw/bin': ['CSWtree'],
+                  '/opt/csw/share/doc': ['CSWtree']},
+         'tree.1': {'/opt/csw/share/man/man1': ['CSWtree']}}
+    pkg_by_path = {
+       '/opt/csw/bin': [
+           u'CSWautogen', u'CSWbinutils', u'CSWbonobo2', u'CSWcommon',
+           u'CSWcryptopp', u'CSWcvs', u'CSWdejagnu', u'CSWemacs',
+           u'CSWemacsbincommon', u'CSWemacschooser', u'CSWenscript',
+           u'CSWevince', u'CSWexpect', u'CSWfacter', u'CSWfakeroot',
+           u'CSWfindutils', u'CSWflex', u'CSWfltk', u'CSWfoomaticfilters',
+           u'CSWgawk', u'CSWgdb', u'CSWgedit', u'CSWggv', u'CSWglib',
+           u'CSWgmake', u'CSWgnomedesktop', u'CSWgnomedocutils',
+           u'CSWgnomemenus', u'CSWgnuplot', u'CSWgperf', u'CSWgstplugins',
+           u'CSWgstreamer', u'CSWgtk', u'CSWgtk2', u'CSWgtkmmdevel',
+           u'CSWguile', u'CSWgwhois', u'CSWhevea', u'CSWhtmltidy', u'CSWimlib',
+           u'CSWisaexec', u'CSWjikes', u'CSWjove', u'CSWkrb5libdev', u'CSWksh',
+           u'CSWlatex2html', u'CSWlibbonoboui', u'CSWlibdvdreaddevel',
+           u'CSWlibgegl', u'CSWlibgnome', u'CSWlibgphoto2', u'CSWlibm17n',
+           u'CSWlibm17ndevel', u'CSWlibnet', u'CSWlibofx', u'CSWlibotf',
+           u'CSWlibotfdevel', u'CSWlibxft2', u'CSWlibxine', u'CSWlibxml',
+           u'CSWlsof', u'CSWm17ndb', u'CSWmbrowse', u'CSWmikmod', u'CSWmono',
+           u'CSWnautilus', u'CSWnetcat', u'CSWnetpbm', u'CSWngrep', u'CSWnmap',
+           u'CSWntop', u'CSWocaml', u'CSWopensp', u'CSWpango', u'CSWpkgget',
+           u'CSWpkgutil', u'CSWpmlclemktxtsimple', u'CSWpmnetsnmp',
+           u'CSWpmsvnmirror', u'CSWpstoedit', u'CSWpstree', u'CSWqt',
+           u'CSWrdist', u'CSWsamefile', u'CSWsbcl', u'CSWschilybase',
+           u'CSWschilyutils', u'CSWsdlsound', u'CSWsetoolkit', u'CSWstar',
+           u'CSWt1lib', u'CSWtaglibgcc', u'CSWtcl', u'CSWtetex', u'CSWtk',
+           u'CSWtransfig', u'CSWvte', u'CSWxmms', u'CSWxpm', u'CSWzope'],
+       '/opt/csw/bin/tree': ['CSWtree'],
+       '/opt/csw/share/doc': [
+           u'CSWcairomm', u'CSWtcpwrap', u'CSWfltk', u'CSWgsfonts',
+           u'CSWlibsigc++rt', u'CSWglibmmdevel', u'CSWgstreamer', u'CSWgtkmm2',
+           u'CSWksh', u'CSWlibgphoto2', u'CSWlibxine', u'CSWmeanwhile',
+           u'CSWsasl', u'CSWsbcl', u'CSWsilctoolkit', u'CSWt1lib',
+           u'CSWtaglibgcc', u'CSWtetex', u'CSWgperf', u'CSWjikes',
+           u'CSWlibgnome', u'CSWdejagnu', u'CSWnetpbm', u'CSWlibgnomeui',
+           u'CSWsetoolkit', u'CSWgtksourceview', u'CSWhevea', u'CSWopensprt',
+           u'CSWopensp', u'CSWplotutilrt', u'CSWplotutildevel',
+           u'CSWpstoeditrt', u'CSWpstoedit', u'CSWpstoeditdevel',
+           u'CSWopenspdevel', u'CSWlibdvdread', u'CSWlibdvdreaddevel',
+           u'CSWschilyutils', u'CSWstar', u'CSWautogenrt', u'CSWlatex2html',
+           u'CSWautogen', u'CSWlibotf', u'CSWlibotfdevel', u'CSWgcc3corert',
+           u'CSWgcc3g++rt', u'CSWlibofxrt', u'CSWgcc3adart', u'CSWgcc3rt',
+           u'CSWgcc3g++', u'CSWgcc3ada', u'CSWgcc3', u'CSWlibm17n',
+           u'CSWm17ndb', u'CSWlibm17ndevel', u'CSWgcc2core', u'CSWgcc2g++',
+           u'CSWgcc3g77rt', u'CSWgcc3g77', u'CSWgcc4g95', u'CSWemacscommon',
+           u'CSWemacsbincommon', u'CSWemacs', u'CSWcommon', u'CSWbashcmplt',
+           u'CSWcacertificates', u'CSWgstplugins', u'CSWgnomemenus',
+           u'CSWgnomedesktop', u'CSWnautilus', u'CSWlibofx', u'CSWgamin',
+           u'CSWpkgutil', u'CSWgcc3core', u'CSWgnomemime2'],
+       '/opt/csw/share/doc/tree': ['CSWtree'],
+       '/opt/csw/share/doc/tree/license': ['CSWtree'],
+       '/opt/csw/share/man': [
+           u'CSWgdbm', u'CSWlibnet', u'CSWbinutils', u'CSWtcpwrap',
+           u'CSWenscript', u'CSWffcall', u'CSWflex', u'CSWfltk', u'CSWfping',
+           u'CSWglib', u'CSWgmake', u'CSWgstreamer', u'CSWgtk', u'CSWgwhois',
+           u'CSWbonobo2', u'CSWkrb5libdev', u'CSWksh', u'CSWlibgphoto2',
+           u'CSWmikmod', u'CSWlibxine', u'CSWlsof', u'CSWngrep', u'CSWocaml',
+           u'CSWpmmd5', u'CSWpmlclemktxtsimple', u'CSWpmtextdiff', u'CSWsasl',
+           u'CSWpmprmsvldt', u'CSWpmmathinterpolate', u'CSWpmprmscheck',
+           u'CSWrdist', u'CSWsbcl', u'CSWtetex', u'CSWnetcat', u'CSWjikes',
+           u'CSWfoomaticfilters', u'CSWlibgnome', u'CSWexpect', u'CSWdejagnu',
+           u'CSWnetpbm', u'CSWpmmailsendmail', u'CSWgnomedocutils', u'CSWnmap',
+           u'CSWsetoolkit', u'CSWntop', u'CSWtransfig', u'CSWxmms',
+           u'CSWpstoedit', u'CSWgdb', u'CSWschilybase', u'CSWschilyutils',
+           u'CSWstar', u'CSWfindutils', u'CSWfakeroot', u'CSWautogen',
+           u'CSWpmmimetools', u'CSWpmclsautouse', u'CSWpmlogmessage',
+           u'CSWpmlogmsgsimple', u'CSWpmsvnsimple', u'CSWpmlistmoreut',
+           u'CSWpmunivrequire', u'CSWpmiodigest', u'CSWpmsvnmirror',
+           u'CSWpmhtmltmpl', u'CSWemacscommon', u'CSWcommon', u'CSWgnuplot',
+           u'CSWpkgget', u'CSWsamefile', u'CSWpmnetdnsreslvprg',
+           u'CSWpmx11protocol', u'CSWmono', u'CSWgstplugins',
+           u'CSWgnomedesktop', u'CSWevince', u'CSWgedit', u'CSWfacter',
+           u'CSWpmiopager', u'CSWxpm', u'CSWgawk', u'CSWpmcfginifls',
+           u'CSWlibxft2', u'CSWpango', u'CSWgtk2', u'CSWpkgutil'],
+       '/opt/csw/share/man/man1': ['CSWtree'],
+       '/opt/csw/share/man/man1/tree.1': ['CSWtree']}
+    result = depchecks.ByDirectory(self.pkg_data,
+                          self.error_mgr_mock,
+                          self.logger_stub,
+                          self.messenger_stub,
+                          path_and_pkg_by_basename, pkg_by_path)
+
+  def testByDirectory_2(self):
+    path_and_pkg_by_basename = {
+         'libc.so.1': {u'/usr/lib': [u'SUNWcsl'],
+                       u'/usr/lib/libp/sparcv9': [u'SUNWdplx'],
+                       u'/usr/lib/sparcv9': [u'SUNWcslx']},
+         'license': {'/opt/csw/share/doc/tree': ['CSWtree']},
+         'man1': {'/opt/csw/share/man': ['CSWtree']},
+         'tree': {'/opt/csw/bin': ['CSWtree'],
+                  '/opt/csw/share/doc': ['CSWtree']},
+         'tree.1': {'/opt/csw/share/man/man1': ['CSWtree']}}
+    pkg_by_path = {
+       '/opt/csw/bin': [u'CSWautogen', u'CSWbinutils', u'CSWcommon'],
+       '/opt/csw/bin/tree': ['CSWtree'],
+       '/opt/csw/share/doc': [
+           u'CSWemacsbincommon', u'CSWemacs', u'CSWcommon', u'CSWbashcmplt'],
+       '/opt/csw/share/doc/tree': ['CSWtree'],
+       '/opt/csw/share/doc/tree/license': ['CSWtree'],
+       '/opt/csw/share/man': [u'CSWcommon', u'CSWgnuplot'],
+       '/opt/csw/share/man/man1': ['CSWtree'],
+       '/opt/csw/share/man/man1/tree.1': ['CSWtree']}
+    result = depchecks.ByDirectory(self.pkg_data,
+                          self.error_mgr_mock,
+                          self.logger_stub,
+                          self.messenger_stub,
+                          path_and_pkg_by_basename, pkg_by_path)
+    expected = [
+       [('CSWtree',
+         u"['CSWtree'] provides directory /opt/csw/share/man/man1 is needed by the package CSWtree")],
+       [('CSWtree',
+         u"['CSWtree'] provides directory /opt/csw/share/doc/tree is needed by the package CSWtree")],
+       [(u'CSWcommon',
+         u"[u'CSWcommon'] provides directory /opt/csw/share/doc is needed by the package CSWtree")],
+       [(u'CSWcommon',
+         u"[u'CSWcommon'] provides directory /opt/csw/bin is needed by the package CSWtree")],
+       [(u'CSWcommon',
+         u"[u'CSWcommon'] provides directory /opt/csw/share/man is needed by the package CSWtree")]]
+    self.assertEquals(expected, result)
+
+  def testLibraries_1(self):
+    path_and_pkg_by_basename = {
+         'libc.so.1': {u'/usr/lib': [u'SUNWcsl'],
+                       u'/usr/lib/libp/sparcv9': [u'SUNWdplx'],
+                       u'/usr/lib/sparcv9': [u'SUNWcslx']},
+         'license': {'/opt/csw/share/doc/tree': ['CSWtree']},
+         'man1': {'/opt/csw/share/man': ['CSWtree']},
+         'tree': {'/opt/csw/bin': ['CSWtree'],
+                  '/opt/csw/share/doc': ['CSWtree']},
+         'tree.1': {'/opt/csw/share/man/man1': ['CSWtree']}}
+    pkg_by_path = {
+       '/opt/csw/bin': [u'CSWautogen', u'CSWbinutils', u'CSWcommon'],
+       '/opt/csw/bin/tree': ['CSWtree'],
+       '/opt/csw/share/doc': [
+           u'CSWemacsbincommon', u'CSWemacs', u'CSWcommon', u'CSWbashcmplt'],
+       '/opt/csw/share/doc/tree': ['CSWtree'],
+       '/opt/csw/share/doc/tree/license': ['CSWtree'],
+       '/opt/csw/share/man': [u'CSWcommon', u'CSWgnuplot'],
+       '/opt/csw/share/man/man1': ['CSWtree'],
+       '/opt/csw/share/man/man1/tree.1': ['CSWtree']}
+    result = depchecks.Libraries(self.pkg_data,
+                          self.error_mgr_mock,
+                          self.logger_stub,
+                          self.messenger_stub,
+                          path_and_pkg_by_basename, pkg_by_path)
+    # It needs to be a list.
+    expected = [[
+      (u'SUNWcsl', u'provides /usr/lib/libc.so.1 needed by opt/csw/bin/tree')]]
+    self.assertEqual(expected, result)
+
+  def testLibraries_Javasvn(self):
+    self.pkg_data = copy.deepcopy(javasvn_stats[0])
+    path_and_pkg_by_basename = {
+        'libCrun.so.1': {u'/usr/lib': [u'SUNWlibC'], u'/usr/lib/sparcv9': [u'SUNWlibCx']},
+        'libCstd.so.1': {u'/usr/lib': [u'SUNWlibC'], u'/usr/lib/sparcv9': [u'SUNWlibCx']},
+        'libapr-1.so.0': {u'/opt/csw/apache2/lib': [u'CSWapache2rt'], u'/opt/csw/lib': [u'CSWapr'], u'/opt/csw/lib/sparcv9': [u'CSWapr']},
+        'libaprutil-1.so.0': {u'/opt/csw/apache2/lib': [u'CSWapache2rt']},
+        'libc.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/libp/sparcv9': [u'SUNWdplx'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libdl.so.1': {u'/etc/lib': [u'SUNWcsr'], u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libexpat.so.1': {u'/opt/csw/lib': [u'CSWexpat'], u'/opt/csw/lib/sparcv9': [u'CSWexpat']},
+        'libiconv.so.2': {u'/opt/csw/lib': [u'CSWiconv'], u'/opt/csw/lib/sparcv9': [u'CSWiconv']},
+        'libintl.so.8': {u'/opt/csw/lib': [u'CSWggettextrt'], u'/opt/csw/lib/sparcv9': [u'CSWggettextrt']},
+        'liblber-2.4.so.2': {u'/opt/csw/lib': [u'CSWoldaprt'], u'/opt/csw/lib/sparcv9': [u'CSWoldaprt']},
+        'libldap-2.4.so.2': {u'/opt/csw/lib': [u'CSWoldaprt'], u'/opt/csw/lib/sparcv9': [u'CSWoldaprt']},
+        'libneon.so.27': {u'/opt/csw/lib': [u'CSWneon'], u'/opt/csw/lib/sparcv9': [u'CSWneon']},
+        'libnsl.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libpthread.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'librt.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libsendfile.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libsocket.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+        'libsvn_client-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_delta-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_diff-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_fs-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_ra-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_repos-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_subr-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libsvn_wc-1.so.0': {u'/opt/csw/lib/svn': [u'CSWsvn']},
+        'libuuid.so.1': {u'/usr/lib': [u'SUNWcsl'], u'/usr/lib/sparcv9': [u'SUNWcslx']},
+    }
+
+    expected = [
+     [(u'CSWggettextrt', u'provides /opt/csw/lib/libintl.so.8 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_repos-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_client-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_wc-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_ra-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_delta-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_diff-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_subr-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWsvn', u'provides /opt/csw/lib/svn/libsvn_fs-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWapache2rt', u'provides /opt/csw/apache2/lib/libaprutil-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWoldaprt', u'provides /opt/csw/lib/libldap-2.4.so.2 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWoldaprt', u'provides /opt/csw/lib/liblber-2.4.so.2 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWexpat', u'provides /opt/csw/lib/libexpat.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWiconv', u'provides /opt/csw/lib/libiconv.so.2 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWapr', u'provides /opt/csw/lib/libapr-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0'),
+      (u'CSWapache2rt', u'provides /opt/csw/apache2/lib/libapr-1.so.0 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libuuid.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libsendfile.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/librt.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libnsl.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libpthread.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libdl.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'CSWneon', u'provides /opt/csw/lib/libneon.so.27 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libsocket.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWcsl', u'provides /usr/lib/libc.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWlibC', u'provides /usr/lib/libCstd.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')],
+     [(u'SUNWlibC', u'provides /usr/lib/libCrun.so.1 needed by opt/csw/lib/svn/libsvnjavahl-1.so.0.0.0')]]
+
+    # pkg_by_path is not important for depchecks.Libraries.
+    pkg_by_path = {}
+    result = depchecks.Libraries(self.pkg_data,
+                          self.error_mgr_mock,
+                          self.logger_stub,
+                          self.messenger_stub,
+                          path_and_pkg_by_basename, pkg_by_path)
+    self.assertEqual(expected, result)
+
+
+class TestMissingDepsFromReasonGroups(unittest.TestCase):
+
+  def testOne(self):
+    reason_groups = [
+        [(u"CSWfoo1", ""),
+         (u"CSWfoo2", "")],
+        [(u"CSWbar", "")],
+    ]
+    declared_deps = set([u"CSWfoo2"])
+    expected = [[u"CSWbar"]]
+    result = depchecks.MissingDepsFromReasonGroups(
+        reason_groups, declared_deps)
+    self.assertEqual(result, expected)
+
+
+class TestLibraries(unittest.TestCase):
+
+  def setUp(self):
+    self.mocker = mox.Mox()
+    self.logger_stub = stubs.LoggerStub()
+    self.messenger_stub = stubs.MessengerStub()
+    self.error_mgr_mock = self.mocker.CreateMock(
+        checkpkg.SetCheckInterface)
+    self.pkg_data = copy.deepcopy(sudo_stats)
+
+  def testOne(self):
+    pass
+
+
+
+if __name__ == '__main__':
+  unittest.main()


Property changes on: csw/mgar/gar/v2/lib/python/dependency_checks_test.py
___________________________________________________________________
Added: svn:executable
   + *

Modified: csw/mgar/gar/v2/lib/python/models.py
===================================================================
--- csw/mgar/gar/v2/lib/python/models.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/models.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -5,11 +5,15 @@
 import sqlobject
 
 class DataSource(sqlobject.SQLObject):
-  "Represents: a /var/sadm/install/contents file, or CSW catalog."
+  """Represents: a /var/sadm/install/contents file, or CSW catalog.
+
+  - "local"
+  - "catalog"
+  """
   name = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
 
-class OsVersion(sqlobject.SQLObject):
-  "Short name: 5.9, long name: Solaris 9"
+class OsRelease(sqlobject.SQLObject):
+  "Short name: SunOS5.9, long name: Solaris 9"
   short_name = sqlobject.UnicodeCol(length=40, unique=True, notNone=True)
   full_name = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
 
@@ -17,6 +21,11 @@
   "One of: 'sparc', 'x86'."
   name = sqlobject.UnicodeCol(length=40, unique=True, notNone=True)
 
+class Maintainer(sqlobject.SQLObject):
+  """The maintainer of the package, identified by the e-mail address."""
+  email = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
+  full_name = sqlobject.UnicodeCol(length=255, default=None)
+
 class Host(sqlobject.SQLObject):
   "Hostname, as returned by socket.getfqdn()"
   fqdn = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
@@ -29,8 +38,7 @@
   int_value = sqlobject.IntCol(default=None)
   str_value = sqlobject.UnicodeCol(default=None)
 
-
-class CswPackage(sqlobject.SQLObject):
+class Pkginst(sqlobject.SQLObject):
   pkgname = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
   catalogname = sqlobject.UnicodeCol(default=None)
   pkg_desc = sqlobject.UnicodeCol(default=None)
@@ -42,13 +50,30 @@
   basename_idx = sqlobject.DatabaseIndex('basename')
 
 class Srv4FileStats(sqlobject.SQLObject):
+  """Represents a srv4 file."""
   md5_sum = sqlobject.UnicodeCol(notNone=True, unique=True)
-  pkgname = sqlobject.UnicodeCol(length=255, notNone=True)
+  pkginst = sqlobject.ForeignKey('Pkginst')
   stats_version = sqlobject.IntCol(notNone=True)
+  catalogname = sqlobject.UnicodeCol(notNone=True)
+  basename = sqlobject.UnicodeCol(notNone=True)
+  arch = sqlobject.ForeignKey('Architecture', notNone=True)
+  os_rel = sqlobject.ForeignKey('OsRelease', notNone=True)
+  maintainer = sqlobject.ForeignKey('Maintainer')
   data = sqlobject.UnicodeCol(notNone=True)
+  latest = sqlobject.BoolCol(notNone=True)
+  version_string = sqlobject.UnicodeCol(notNone=True)
+  rev = sqlobject.UnicodeCol(notNone=False)
+  mtime = sqlobject.DateTimeCol(notNone=False)
 
 class CheckpkgOverride(sqlobject.SQLObject):
   srv4_file = sqlobject.ForeignKey('Srv4FileStats')
   pkgname = sqlobject.UnicodeCol(default=None)
   tag_name = sqlobject.UnicodeCol(notNone=True)
   tag_info = sqlobject.UnicodeCol(default=None)
+
+class CheckpkgErrorTag(sqlobject.SQLObject):
+  srv4_file = sqlobject.ForeignKey('Srv4FileStats')
+  pkgname = sqlobject.UnicodeCol(default=None)
+  tag_name = sqlobject.UnicodeCol(notNone=True)
+  tag_info = sqlobject.UnicodeCol(default=None)
+  msg = sqlobject.UnicodeCol(default=None)

Modified: csw/mgar/gar/v2/lib/python/opencsw.py
===================================================================
--- csw/mgar/gar/v2/lib/python/opencsw.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/opencsw.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -25,6 +25,7 @@
 import shutil
 import subprocess
 import tempfile
+import time
 import urllib2
 import overrides
 import configuration as c
@@ -38,7 +39,6 @@
 ARCH_SPARC = "sparc"
 ARCH_i386 = "i386"
 ARCH_ALL = "all"
-
 ARCHITECTURES = [ARCH_SPARC, ARCH_i386, ARCH_ALL]
 OS_RELS = [u"SunOS5.9", u"SunOS5.10"]
 MAJOR_VERSION = "major version"
@@ -436,6 +436,7 @@
     self.debug = debug
     self.pkgname = None
     self.md5sum = None
+    self.mtime = None
 
   def __repr__(self):
     return u"CswSrv4File(%s)" % repr(self.pkg_path)
@@ -456,6 +457,9 @@
       gzip_suffix = ".gz"
       pkg_suffix = ".pkg"
       if self.pkg_path.endswith("%s%s" % (pkg_suffix, gzip_suffix)):
+        # Causing the class to stat the .gz file.  This call throws away the
+        # result, but the result will be cached as a class instance member.
+        self.GetMtime()
         base_name_gz = os.path.split(self.pkg_path)[1]
         shutil.copy(self.pkg_path, self.GetWorkDir())
         self.pkg_path = os.path.join(self.GetWorkDir(), base_name_gz)
@@ -504,6 +508,14 @@
       logging.debug("GetPkgname(): %s", repr(self.pkgname))
     return self.pkgname
 
+  def GetMtime(self):
+    if not self.mtime:
+      # This fails if the file is not there.
+      s = os.stat(self.pkg_path)
+      t = time.gmtime(s.st_mtime)
+      self.mtime = datetime.datetime(*t[:6])
+    return self.mtime
+
   def TransformToDir(self):
     """Transforms the file to the directory format.
 
@@ -561,6 +573,11 @@
     ret = pkgchk_proc.wait()
     return ret, stdout, stderr
 
+  def GetFileMtime(self):
+    if not self.mtime:
+      self.mtime = os.stat(self.pkg_path).st_mtime
+    return self.mtime
+
   def __del__(self):
     if self.workdir:
       logging.debug("Removing %s", repr(self.workdir))
@@ -832,15 +849,19 @@
       def StripRe(x, strip_re):
         return re.sub(strip_re, "", x)
       root_re = re.compile(r"^root/")
-      magic_cookie = magic.open(0)
-      magic_cookie.load()
-      magic_cookie.setflags(magic.MAGIC_MIME)
+      file_magic = FileMagic()
       for file_path in all_files:
         full_path = unicode(self.MakeAbsolutePath(file_path))
         file_info = {
             "path": StripRe(file_path, root_re),
-            "mime_type": magic_cookie.file(full_path),
+            "mime_type": file_magic.GetFileMimeType(full_path)
         }
+        if not file_info["mime_type"]:
+          logging.error("Could not establish the mime type of %s",
+                        full_path)
+          # We really don't want that, as it misses binaries.
+          raise PackageError("Could not establish the mime type of %s"
+                             % full_path)
         if IsBinary(file_info):
           parser = hp.createParser(full_path)
           if not parser:
@@ -1200,6 +1221,7 @@
         self.by_basename[d["file_basename"]] = d
     return self.by_basename
 
+
 def IsBinary(file_info):
   """Returns True or False depending on file metadata."""
   is_a_binary = False
@@ -1209,9 +1231,45 @@
   if not file_info["mime_type"]:
     # This should never happen, but it seems to have happened at least once.
     # TODO: Find the affected data and figure out why.
-    return false
+    raise PackageError("file_info is missing mime_type:" % file_info)
   for mimetype in BIN_MIMETYPES:
     if mimetype in file_info["mime_type"]:
       is_a_binary = True
       break
   return is_a_binary
+
+
+class FileMagic(object):
+  """Libmagic sometimes returns None, which I think is a bug.
+  Trying to come up with a way to work around that.
+  """
+
+  def __init__(self):
+    self.cookie_count = 0
+    self.magic_cookie = None
+
+  def _GetCookie(self):
+    magic_cookie = magic.open(self.cookie_count)
+    self.cookie_count += 1
+    magic_cookie.load()
+    magic_cookie.setflags(magic.MAGIC_MIME)
+    return magic_cookie
+
+  def _LazyInit(self):
+    if not self.magic_cookie:
+      self.magic_cookie = self._GetCookie()
+
+  def GetFileMimeType(self, full_path):
+    """Trying to run magic.file() a few times, not accepting None."""
+    self._LazyInit()
+    mime = None
+    for i in xrange(10):
+      mime = self.magic_cookie.file(full_path)
+      if mime:
+        break;
+      else:
+        # Returned mime is null. Re-initializing the cookie and trying again.
+        logging.error("magic_cookie.file(%s) returned None. Retrying.",
+                      full_path)
+        self.magic_cookie = self._GetCookie()
+    return mime

Modified: csw/mgar/gar/v2/lib/python/package_checks.py
===================================================================
--- csw/mgar/gar/v2/lib/python/package_checks.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/package_checks.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -13,6 +13,7 @@
 
 import copy
 import re
+import operator
 import os
 import checkpkg
 import opencsw
@@ -263,7 +264,7 @@
 def SetCheckLibraries(pkgs_data, error_mgr, logger, messenger):
   """Second version of the library checking code.
 
-  1. Collect all the data from the FS:
+  1. Collect all the needed data from the FS:
      {"<basename>": {"/path/1": ["CSWfoo1"], "/path/2": ["CSWfoo2"]}}
      1.1. find all needed sonames
      1.2. get all the data for needed sonames
@@ -273,18 +274,33 @@
   """
   needed_sonames = []
   pkgs_to_be_installed = [x["basic_stats"]["pkgname"] for x in pkgs_data]
+  paths_to_verify = set()
+  pkg_by_path = {}
+  logger.debug("Building needed_sonames, paths_to_verify and pkg_by_path...")
   for pkg_data in pkgs_data:
+    pkgname = pkg_data["basic_stats"]["pkgname"]
     for binary_info in pkg_data["binaries_dump_info"]:
       needed_sonames.extend(binary_info["needed sonames"])
+    # Creating an index of packages by path
+    for pkgmap_entry in pkg_data["pkgmap"]:
+      if "path" in pkgmap_entry and pkgmap_entry["path"]:
+        base_dir, basename = os.path.split(pkgmap_entry["path"])
+        paths_to_verify.add(base_dir)
+        paths_to_verify.add(pkgmap_entry["path"])
+        if pkgmap_entry["path"] not in pkg_by_path:
+          pkg_by_path[pkgmap_entry["path"]] = []
+        pkg_by_path[pkgmap_entry["path"]].append(pkgname)
   needed_sonames = sorted(set(needed_sonames))
   # Finding candidate libraries from the filesystem (/var/sadm/install/contents)
-  path_and_pkg_by_basename = {}
-  for needed_soname in needed_sonames:
-    path_and_pkg_by_basename[needed_soname] = error_mgr.GetPathsAndPkgnamesByBasename(
-        needed_soname)
+  path_and_pkg_by_basename = depchecks.GetPathAndPkgByBasename(
+      error_mgr, logger, needed_sonames)
   # Removing files from packages that are to be installed.
   path_and_pkg_by_basename = RemovePackagesUnderInstallation(
       path_and_pkg_by_basename, pkgs_to_be_installed)
+  # Populating the mapping using data from the local packages.  The call to
+  # GetPkgByFullPath will complete the mapping using data from the filesystem.
+  pkg_by_path = depchecks.GetPkgByFullPath(
+      error_mgr, logger, paths_to_verify, pkg_by_path)
   # Adding overlay based on the given package set
   # Considering files from the set under examination.
   for pkg_data in pkgs_data:
@@ -305,42 +321,64 @@
   for pkg_data in pkgs_data:
     pkgname = pkg_data["basic_stats"]["pkgname"]
     check_args = (pkg_data, error_mgr, logger, messenger,
-                  path_and_pkg_by_basename)
+                  path_and_pkg_by_basename, pkg_by_path)
     req_pkgs_reasons = depchecks.Libraries(*check_args)
     req_pkgs_reasons.extend(depchecks.ByFilename(*check_args))
+    # This test needs more work, or potentially, architectural changes.
+    # by_directory_reasons = depchecks.ByDirectory(*check_args)
+    # req_pkgs_reasons.extend(by_directory_reasons)
     missing_reasons_by_pkg = {}
-    for pkg, reason in req_pkgs_reasons:
-      if pkg not in missing_reasons_by_pkg:
-        missing_reasons_by_pkg[pkg] = list()
-      if len(missing_reasons_by_pkg[pkg]) < 4:
-        missing_reasons_by_pkg[pkg].append(reason)
-      elif len(missing_reasons_by_pkg[pkg]) == 4:
-        missing_reasons_by_pkg[pkg].append("...and more.")
+    for reason_group in req_pkgs_reasons:
+      for pkg, reason in reason_group:
+        if pkg not in missing_reasons_by_pkg:
+          missing_reasons_by_pkg[pkg] = []
+        if len(missing_reasons_by_pkg[pkg]) < 4:
+          missing_reasons_by_pkg[pkg].append(reason)
+        elif len(missing_reasons_by_pkg[pkg]) == 4:
+          missing_reasons_by_pkg[pkg].append("...and more.")
     declared_deps = pkg_data["depends"]
     declared_deps_set = set([x[0] for x in declared_deps])
-    req_pkgs_set = set([x[0] for x in req_pkgs_reasons])
-    missing_deps = req_pkgs_set.difference(declared_deps_set)
+    missing_dep_groups = depchecks.MissingDepsFromReasonGroups(
+        req_pkgs_reasons, declared_deps_set)
     pkgs_to_remove = set()
     for regex_str in checkpkg.DO_NOT_REPORT_MISSING_RE:
       regex = re.compile(regex_str)
-      for dep_pkgname in missing_deps:
+      for dep_pkgname in reduce(operator.add, missing_dep_groups, []):
         if re.match(regex, dep_pkgname):
           pkgs_to_remove.add(dep_pkgname)
-    if pkgname in missing_deps:
+    if pkgname in reduce(operator.add, missing_dep_groups, []):
       pkgs_to_remove.add(pkgname)
     logger.debug("Removing %s from the list of missing pkgs.", pkgs_to_remove)
-    missing_deps = missing_deps.difference(pkgs_to_remove)
-    surplus_deps = declared_deps_set.difference(req_pkgs_set)
+    new_missing_dep_groups = set()
+    for missing_deps in missing_dep_groups:
+      new_missing_deps = set()
+      for dep in missing_deps:
+        if dep not in pkgs_to_remove:
+          new_missing_deps.add(dep)
+      if new_missing_deps:
+        new_missing_dep_groups.add(tuple(new_missing_deps))
+    potential_req_pkgs = set(
+        (x for x, y in reduce(operator.add, req_pkgs_reasons, [])))
+    missing_dep_groups = new_missing_dep_groups
+    surplus_deps = declared_deps_set.difference(potential_req_pkgs)
     surplus_deps = surplus_deps.difference(checkpkg.DO_NOT_REPORT_SURPLUS)
-    missing_deps_reasons = []
-    for missing_dep in missing_deps:
-      error_mgr.ReportError(pkgname, "missing-dependency", "%s" % (missing_dep))
-      missing_deps_reasons.append((missing_dep, missing_reasons_by_pkg[missing_dep]))
+    # Using an index to avoid duplicated reasons.
+    missing_deps_reasons_by_pkg = []
+    missing_deps_idx = set()
+    for missing_deps in missing_dep_groups:
+      error_mgr.ReportError(pkgname,
+                            "missing-dependency",
+                            " or ".join(missing_deps))
+      for missing_dep in missing_deps:
+        item = (missing_dep, tuple(missing_reasons_by_pkg[missing_dep]))
+        if item not in missing_deps_idx:
+          missing_deps_reasons_by_pkg.append(item)
+          missing_deps_idx.add(item)
     for surplus_dep in surplus_deps:
       error_mgr.ReportError(pkgname, "surplus-dependency", surplus_dep)
     namespace = {
         "pkgname": pkgname,
-        "missing_deps": missing_deps_reasons,
+        "missing_deps": missing_deps_reasons_by_pkg,
         "surplus_deps": surplus_deps,
         "orphan_sonames": None,
     }
@@ -349,9 +387,19 @@
     if report.strip():
       for line in report.splitlines():
         messenger.Message(line)
-    for missing_dep in missing_deps:
-      messenger.SuggestGarLine(
-          "RUNTIME_DEP_PKGS_%s += %s" % (pkgname, missing_dep))
+    for missing_deps in missing_dep_groups:
+      alternatives = False
+      prefix = ""
+      if len(missing_deps) > 1:
+        alternatives = True
+        prefix = "  "
+      if alternatives:
+        messenger.SuggestGarLine("# One of the following:")
+      for missing_dep in missing_deps:
+        messenger.SuggestGarLine(
+            "%sRUNTIME_DEP_PKGS_%s += %s" % (prefix, pkgname, missing_dep))
+      if alternatives:
+        messenger.SuggestGarLine("# (end of the list of alternative dependencies)")
 
 
 def SetCheckDependencies(pkgs_data, error_mgr, logger, messenger):
@@ -682,8 +730,10 @@
   # Finding all shared libraries
   shared_libs = []
   for metadata in pkg_data["files_metadata"]:
-    if "sharedlib" in metadata["mime_type"]:
-      shared_libs.append(metadata["path"])
+    if "mime_type" in metadata and metadata["mime_type"]:
+      # TODO: Find out where mime_type is missing and why
+      if "sharedlib" in metadata["mime_type"]:
+        shared_libs.append(metadata["path"])
   shared_libs = set(shared_libs)
   for binary_info in pkg_data["binaries_dump_info"]:
     for soname in binary_info["needed sonames"]:
@@ -695,14 +745,15 @@
 
 
 def CheckDiscouragedFileNamePatterns(pkg_data, error_mgr, logger, messenger):
-  patterns = [(re.compile(x), y) for x, y in DISCOURAGED_FILE_PATTERNS]
+  patterns = [(x, re.compile(x), y) for x, y in DISCOURAGED_FILE_PATTERNS]
   for entry in pkg_data["pkgmap"]:
     if entry["path"]:
-      for pattern, msg in patterns:
-        if pattern.search(entry["path"]):
+      for pattern, pattern_re, msg in patterns:
+        if pattern_re.search(entry["path"]):
           error_mgr.ReportError("discouraged-path-in-pkgmap",
                                 entry["path"])
-          messenger.Message(msg)
+          messenger.OneTimeMessage(
+              "discouraged-path-in-pkgmap-%s" % pattern, msg)
 
 
 def CheckBadContent(pkg_data, error_mgr, logger, messenger):

Modified: csw/mgar/gar/v2/lib/python/package_checks_test.py
===================================================================
--- csw/mgar/gar/v2/lib/python/package_checks_test.py	2010-07-19 00:49:07 UTC (rev 10540)
+++ csw/mgar/gar/v2/lib/python/package_checks_test.py	2010-07-19 08:39:22 UTC (rev 10541)
@@ -3,6 +3,7 @@
 # $Id$
 
 import copy
+import datetime
 import unittest
 import package_checks as pc
 import checkpkg
@@ -16,7 +17,11 @@
 import testdata.checkpkg_pkgs_data_minimal as td_2
 import testdata.rpaths
 from testdata.rsync_pkg_stats import pkgstats as rsync_stats
+from testdata.tree_stats import pkgstats as tree_stats
 from testdata.ivtools_stats import pkgstats as ivtools_stats
+from testdata.sudo_stats import pkgstats as sudo_stats
+from testdata.javasvn_stats import pkgstats as javasvn_stats
+from testdata import stubs
 
 DEFAULT_PKG_STATS = None
 DEFAULT_PKG_DATA = rsync_stats[0]
@@ -27,28 +32,17 @@
 
   def setUp(self):
     self.pkg_stats = DEFAULT_PKG_STATS
-    # self.pkg_data = self.pkg_stats.GetAllStats()
-    # This makes one of the test break. To be investigated.
     self.pkg_data = copy.deepcopy(DEFAULT_PKG_DATA)
     self.mocker = mox.Mox()
 
+  def SetMessenger(self):
+    self.messenger = stubs.MessengerStub()
+
   def testDefault(self):
-
-    class LoggerStub(object):
-      def debug(self, debug_s, *kwords):
-        pass
-      def info(self, debug_s, *kwords):
-        pass
-    class MessengerStub(object):
-      def Message(self, m):
-        pass
-      def SuggestGarLine(self, m):
-        pass
-    # self.logger_mock = self.mocker.CreateMock(logging.Logger)
-    self.logger_mock = LoggerStub()
+    self.logger_mock = stubs.LoggerStub()
     self.error_mgr_mock = self.mocker.CreateMock(
         checkpkg.IndividualCheckInterface)
-    self.messenger = MessengerStub()
+    self.SetMessenger()
     self.CheckpkgTest()
     self.mocker.ReplayAll()
     getattr(pc, self.FUNCTION_NAME)(self.pkg_data,
@@ -320,7 +314,14 @@
        u'/lib/sparcv9': [u'SUNWcslr'],
        u'/usr/lib': [u'SUNWcsl'],
        u'/usr/lib/sparcv9': [u'SUNWcsl']})
-    self.error_mgr_mock.ReportError('CSWdjvulibrert', 'missing-dependency', u'CSWiconv')
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/lib').AndReturn([u"CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/share/doc').AndReturn([u"CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/lib/sparcv9').AndReturn([u"CSWcommon"])
+    self.error_mgr_mock.ReportError(
+        'CSWdjvulibrert', 'missing-dependency', u'CSWiconv')
 
 
 class TestCheckPstamp(CheckpkgUnitTestHelper, unittest.TestCase):
@@ -329,7 +330,8 @@
     self.pkg_data["pkginfo"]["PSTAMP"] = "build8s20090904191054"
     self.error_mgr_mock.ReportError(
         'pkginfo-pstamp-in-wrong-format', 'build8s20090904191054',
-        "It should be 'username at hostname-timestamp', but it's 'build8s20090904191054'.")
+        "It should be 'username at hostname-timestamp', but it's "
+        "'build8s20090904191054'.")
 
 
 class TestCheckRpath(CheckpkgUnitTestHelper, unittest.TestCase):
@@ -439,16 +441,27 @@
     binaries_dump_info = self.pkg_data["binaries_dump_info"]
     binaries_dump_info[0]["runpath"] = ("/opt/csw/lib",)
     binaries_dump_info[0]["needed sonames"] = ["libdb-4.7.so"]
-    self.pkg_data["depends"] = (("CSWfoo", None),)
+    self.pkg_data["depends"] = (("CSWfoo", None),(u"CSWcommon", ""))
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libdb-4.7.so').AndReturn({
        u'/opt/csw/lib': [u'CSWfoo'],
        u'/opt/csw/lib/sparcv9': [u'CSWfoo'],
     })
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/share/man').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/bin').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/bin/sparcv8').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/bin/sparcv9').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/share/doc').AndReturn(["CSWcommon"])
     self.error_mgr_mock.ReportError(
         'CSWrsync',
         'deprecated-library',
-        u'opt/csw/bin/sparcv8/rsync Deprecated Berkeley DB location /opt/csw/lib/libdb-4.7.so')
+        u'opt/csw/bin/sparcv8/rsync Deprecated Berkeley DB location '
+        u'/opt/csw/lib/libdb-4.7.so')
     self.pkg_data = [self.pkg_data]
 
 
@@ -524,6 +537,42 @@
     self.pkg_data = [self.CSWbar_DATA, self.CSWlibfoo_DATA]
 
 
+class TestSharedLibsOnlyIsalist(CheckpkgUnitTestHelper,
+                                            unittest.TestCase):
+  """/opt/csw/lib/$ISALIST in RPATH without the bare /opt/csw/lib."""
+  FUNCTION_NAME = 'SetCheckLibraries'
+  # Contains only necessary bits.  The data listed in full.
+  CSWbar_DATA = {
+        'basic_stats': {'catalogname': 'bar',
+                        'pkgname': 'CSWbar',
+                        'stats_version': 1},
+        'binaries_dump_info': [
+                               {'base_name': 'bar',
+                                'needed sonames': ['libfoo.so.1'],
+                                'path': 'opt/csw/bin/bar',
+                                'runpath': ('/opt/csw/lib/$ISALIST',),
+                               },
+                               {'base_name': 'libfoo.so.1',
+                                'needed sonames': (),
+                                'path': 'opt/csw/lib/libfoo.so.1',
+                                'runpath': ('/opt/csw/lib/$ISALIST',),
+                               },
+                              ],
+        # 'depends': (),
+        'depends': ((u"CSWcommon", ""),),
+        'isalist': ('foo'),
+        'pkgmap': [
+          { 'path': '/opt/csw/lib/libfoo.so.1', },
+          { 'path': '/opt/csw/bin/bar', },
+                  ],
+        }
+  def CheckpkgTest(self):
+    self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libfoo.so.1').AndReturn({})
+    self.error_mgr_mock.GetPkgByPath('/opt/csw/lib').AndReturn([u"CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath('/opt/csw/bin').AndReturn([u"CSWcommon"])
+    self.pkg_data = [self.CSWbar_DATA]
+
+
 class TestCheckLibrariesDlopenLibs_1(CheckpkgUnitTestHelper, unittest.TestCase):
   """For dlopen-style shared libraries, libraries from /opt/csw/lib should be
   counted as dependencies.  It's only a heuristic though."""
@@ -533,12 +582,22 @@
     binaries_dump_info[0]["runpath"] = ()
     binaries_dump_info[0]["needed sonames"] = ["libbar.so"]
     binaries_dump_info[0]["path"] = 'opt/csw/lib/python/site-packages/foo.so'
-    self.pkg_data["depends"] = tuple()
+    self.pkg_data["depends"] = ((u"CSWcommon", "This one provides directories"),)
     self.pkg_data["binaries_dump_info"] = binaries_dump_info[0:1]
     self.error_mgr_mock.GetPathsAndPkgnamesByBasename('libbar.so').AndReturn({
        u'/opt/csw/lib': [u'CSWlibbar'],
        u'/opt/csw/lib/sparcv9': [u'CSWlibbar'],
     })
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/share/man').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/bin').AndReturn(["CSWcommon"])
+    self.error_mgr_mock.GetPkgByPath(
+        '/opt/csw/bin/sparcv8').AndReturn(["CSWcommon"])

@@ Diff output truncated at 100000 characters. @@

This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.


More information about the devel mailing list