[csw-devel] SF.net SVN: gar:[13270] csw/mgar/gar/v2-uwatch2
wbonnet at users.sourceforge.net
wbonnet at users.sourceforge.net
Sun Feb 13 13:55:59 CET 2011
Revision: 13270
http://gar.svn.sourceforge.net/gar/?rev=13270&view=rev
Author: wbonnet
Date: 2011-02-13 12:55:58 +0000 (Sun, 13 Feb 2011)
Log Message:
-----------
Merging uwatch2 branch
Modified Paths:
--------------
csw/mgar/gar/v2-uwatch2/bin/ap2mod_build_scripts
csw/mgar/gar/v2-uwatch2/bin/gem2pkg
csw/mgar/gar/v2-uwatch2/categories/rbgems/category.mk
csw/mgar/gar/v2-uwatch2/gar.conf.mk
csw/mgar/gar/v2-uwatch2/gar.lib.mk
csw/mgar/gar/v2-uwatch2/gar.mk
csw/mgar/gar/v2-uwatch2/gar.pkg.mk
csw/mgar/gar/v2-uwatch2/lib/python/README
csw/mgar/gar/v2-uwatch2/lib/python/catalog.py
csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py
csw/mgar/gar/v2-uwatch2/lib/python/checkpkg.py
csw/mgar/gar/v2-uwatch2/lib/python/checkpkg2.py
csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib.py
csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib_test.py
csw/mgar/gar/v2-uwatch2/lib/python/common_constants.py
csw/mgar/gar/v2-uwatch2/lib/python/configuration.py
csw/mgar/gar/v2-uwatch2/lib/python/database.py
csw/mgar/gar/v2-uwatch2/lib/python/dependency_checks.py
csw/mgar/gar/v2-uwatch2/lib/python/models.py
csw/mgar/gar/v2-uwatch2/lib/python/models_test.py
csw/mgar/gar/v2-uwatch2/lib/python/opencsw.py
csw/mgar/gar/v2-uwatch2/lib/python/opencsw_test.py
csw/mgar/gar/v2-uwatch2/lib/python/package.py
csw/mgar/gar/v2-uwatch2/lib/python/package_checks.py
csw/mgar/gar/v2-uwatch2/lib/python/package_checks_test.py
csw/mgar/gar/v2-uwatch2/lib/python/package_stats.py
csw/mgar/gar/v2-uwatch2/lib/python/package_stats_test.py
csw/mgar/gar/v2-uwatch2/lib/python/pkgdb.py
csw/mgar/gar/v2-uwatch2/lib/python/sharedlib_utils.py
csw/mgar/gar/v2-uwatch2/lib/python/sharedlib_utils_test.py
csw/mgar/gar/v2-uwatch2/lib/python/shell.py
csw/mgar/gar/v2-uwatch2/lib/python/struct_util.py
csw/mgar/gar/v2-uwatch2/lib/python/testdata/neon_stats.py
csw/mgar/gar/v2-uwatch2/lib/python/testdata/tree_stats.py
csw/mgar/gar/v2-uwatch2/lib/sh/db_privileges.sh
csw/mgar/gar/v2-uwatch2/lib/sh/libcheckpkg.sh
csw/mgar/gar/v2-uwatch2/tests/run_tests.py
Added Paths:
-----------
csw/mgar/gar/v2-uwatch2/bin/csw-upload-pkg
csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier.py
csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier_test.py
csw/mgar/gar/v2-uwatch2/lib/python/csw_upload_pkg.py
csw/mgar/gar/v2-uwatch2/lib/python/rest.py
csw/mgar/gar/v2-uwatch2/lib/web/
csw/mgar/gar/v2-uwatch2/lib/web/README
csw/mgar/gar/v2-uwatch2/lib/web/pkgdb_web.py
csw/mgar/gar/v2-uwatch2/lib/web/releases_web.py
csw/mgar/gar/v2-uwatch2/lib/web/templates/
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalogDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalogList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Catalogname.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalognameList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/ErrorTagDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/ErrorTagList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerCheckpkgReport.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4Detail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4DetailFiles.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4List.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/index.html
csw/mgar/gar/v2-uwatch2/upload-application/
csw/mgar/gar/v2-uwatch2/upload-application/opencsw-upload-process-application.html
csw/mgar/gar/v2-uwatch2/upload-application/style.css
csw/mgar/gar/v2-uwatch2/upload-application/upload-process.svg
Removed Paths:
-------------
csw/mgar/gar/v2-uwatch2/lib/sh/run_full_cat.sh
csw/mgar/gar/v2-uwatch2/lib/web/README
csw/mgar/gar/v2-uwatch2/lib/web/pkgdb_web.py
csw/mgar/gar/v2-uwatch2/lib/web/releases_web.py
csw/mgar/gar/v2-uwatch2/lib/web/templates/
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalogDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalogList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Catalogname.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/CatalognameList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/ErrorTagDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/ErrorTagList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerCheckpkgReport.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerDetail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/MaintainerList.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4Detail.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4DetailFiles.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/Srv4List.html
csw/mgar/gar/v2-uwatch2/lib/web/templates/index.html
csw/mgar/gar/v2-uwatch2/upload-application/opencsw-upload-process-application.html
csw/mgar/gar/v2-uwatch2/upload-application/style.css
csw/mgar/gar/v2-uwatch2/upload-application/upload-process.svg
Property Changed:
----------------
csw/mgar/gar/v2-uwatch2/
csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py
csw/mgar/gar/v2-uwatch2/lib/python/models_test.py
csw/mgar/gar/v2-uwatch2/pkglib/csw/depend
Property changes on: csw/mgar/gar/v2-uwatch2
___________________________________________________________________
Modified: svn:mergeinfo
- /csw/mgar/gar/v2:4936-6678
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-sqlite:10434-10449
+ /csw/mgar/gar/v2:4936-6678,12141-13268
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-fortran:10883-12516
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-sqlite:10434-10449
Modified: csw/mgar/gar/v2-uwatch2/bin/ap2mod_build_scripts
===================================================================
--- csw/mgar/gar/v2-uwatch2/bin/ap2mod_build_scripts 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/bin/ap2mod_build_scripts 2011-02-13 12:55:58 UTC (rev 13270)
@@ -18,7 +18,7 @@
AP2_PREFIX=/opt/csw/apache2
AP2_BINDIR=\$AP2_PREFIX/sbin
AP2_LIBEXEC=\$AP2_PREFIX/libexec
-AP2_CONFDIR=/etc/opt/csw/apache2
+AP2_CONFDIR=\$AP2_PREFIX/etc
AP2_CONFIG=\$AP2_CONFDIR/httpd.conf
AP2_APXS=\$AP2_BINDIR/apxs
Copied: csw/mgar/gar/v2-uwatch2/bin/csw-upload-pkg (from rev 13268, csw/mgar/gar/v2/bin/csw-upload-pkg)
===================================================================
--- csw/mgar/gar/v2-uwatch2/bin/csw-upload-pkg (rev 0)
+++ csw/mgar/gar/v2-uwatch2/bin/csw-upload-pkg 2011-02-13 12:55:58 UTC (rev 13270)
@@ -0,0 +1 @@
+link ../lib/python/csw_upload_pkg.py
\ No newline at end of file
Modified: csw/mgar/gar/v2-uwatch2/bin/gem2pkg
===================================================================
--- csw/mgar/gar/v2-uwatch2/bin/gem2pkg 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/bin/gem2pkg 2011-02-13 12:55:58 UTC (rev 13270)
@@ -21,16 +21,19 @@
DESCRIPTION = #{spec.summary}
define BLURB
-#{blurb.gsub(/(.{1,70})( +|$\n?)|(.{1,70})/, "\\1\\3\n")}
+ #{blurb.gsub(/(.{1,70})( +|$\n?)|(.{1,70})/, "\\1\\3")}
endef
EOF
- spec.dependencies.each do |d|
- puts "RUNTIME_DEP_PKGS += CSWgem-#{d.name}"
+ unless spec.dependencies.empty?
+ spec.dependencies.each do |d|
+ puts "RUNTIME_DEP_PKGS += CSWgem-#{d.name}"
+ end
+ puts
end
- puts "\nARCHALL = 1" if spec.extensions.size.eql?(0)
+ puts "ARCHALL = 1\n\n" if spec.extensions.size.eql?(0)
puts "include gar/category.mk"
rescue Exception => e
Modified: csw/mgar/gar/v2-uwatch2/categories/rbgems/category.mk
===================================================================
--- csw/mgar/gar/v2-uwatch2/categories/rbgems/category.mk 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/categories/rbgems/category.mk 2011-02-13 12:55:58 UTC (rev 13270)
@@ -10,7 +10,7 @@
DISTFILES += $(GEMFILE)
GEMPKGNAME ?= $(GEMNAME)
-GEMCATALOGNAME ?= $(GEMPKGNAME)
+GEMCATALOGNAME ?= $(subst -,_,$(GEMPKGNAME))
# PACKAGES ?= CSWgem-$(GEMPKGNAME) CSWgem-$(GEMPKGNAME)-doc
PACKAGES ?= CSWgem-$(GEMPKGNAME)
Modified: csw/mgar/gar/v2-uwatch2/gar.conf.mk
===================================================================
--- csw/mgar/gar/v2-uwatch2/gar.conf.mk 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/gar.conf.mk 2011-02-13 12:55:58 UTC (rev 13270)
@@ -480,6 +480,16 @@
SOS11_CXX ?= $(SOS11_CC_HOME)/bin/CC
SOS12_CXX ?= $(SOS12_CC_HOME)/bin/CC
SOS12U1_CXX ?= $(SOS12U1_CC_HOME)/bin/CC
+ GCC3_F77 ?= $(GCC3_CC_HOME)/bin/g77
+ GCC4_F77 ?= $(GCC4_CC_HOME)/bin/gfortran
+ SOS11_F77 ?= $(SOS11_CC_HOME)/bin/f77
+ SOS12_F77 ?= $(SOS12_CC_HOME)/bin/f77
+ SOS12U1_F77 ?= $(SOS12U1_CC_HOME)/bin/f77
+ GCC3_FC ?= $(GCC3_CC_HOME)/bin/g77
+ GCC4_FC ?= $(GCC4_CC_HOME)/bin/gfortran
+ SOS11_FC ?= $(SOS11_CC_HOME)/bin/f95
+ SOS12_FC ?= $(SOS12_CC_HOME)/bin/f95
+ SOS12U1_FC ?= $(SOS12U1_CC_HOME)/bin/f95
GCC3_CC_FLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC3_CC_FLAGS) $(EXTRA_GCC_CC_FLAGS) $(EXTRA_CC_FLAGS)
GCC4_CC_FLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC4_CC_FLAGS) $(EXTRA_GCC_CC_FLAGS) $(EXTRA_CC_FLAGS)
@@ -501,22 +511,43 @@
SOS11_LD_FLAGS ?= $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS11_LD_FLAGS) $(EXTRA_SOS_LD_FLAGS) $(EXTRA_LD_FLAGS) -norunpath -xnorunpath
SOS12_LD_FLAGS ?= $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12_LD_FLAGS) $(EXTRA_SOS_LD_FLAGS) $(EXTRA_LD_FLAGS) -norunpath
SOS12U1_LD_FLAGS ?= $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12U1_LD_FLAGS) $(EXTRA_SOS_LD_FLAGS) $(EXTRA_LD_FLAGS) -norunpath
+ GCC3_FFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC3_FFLAGS) $(EXTRA_GCC_FFLAGS) $(EXTRA_FFLAGS)
+ GCC4_FFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC4_FFLAGS) $(EXTRA_GCC_FFLAGS) $(EXTRA_FFLAGS)
+ SOS11_FFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS11_FFLAGS) $(EXTRA_SOS_FFLAGS) $(EXTRA_FFLAGS) -norunpath
+ SOS12_FFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12_FFLAGS) $(EXTRA_SOS_FFLAGS) $(EXTRA_FFLAGS) -norunpath
+ SOS12U1_FFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12U1_FFLAGS) $(EXTRA_SOS_FFLAGS) $(EXTRA_FFLAGS) -norunpath
+ GCC3_FCFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC3_FCFLAGS) $(EXTRA_GCC_FCFLAGS) $(EXTRA_FCFLAGS)
+ GCC4_FCFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_GCC4_FCFLAGS) $(EXTRA_GCC_FCFLAGS) $(EXTRA_FCFLAGS)
+ SOS11_FCFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS11_FCFLAGS) $(EXTRA_SOS_FCFLAGS) $(EXTRA_FCFLAGS) -norunpath
+ SOS12_FCFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12_FCFLAGS) $(EXTRA_SOS_FCFLAGS) $(EXTRA_FCFLAGS) -norunpath
+ SOS12U1_FCFLAGS ?= $(FLAVOR_FLAGS) $(ARCHFLAGS_$(GARCOMPILER)_$(ISA)) $(EXTRA_SOS12U1_FCFLAGS) $(EXTRA_SOS_FCFLAGS) $(EXTRA_FCFLAGS) -norunpath
# Compiler version
GCC3_CC_VERSION = $(shell $(GCC3_CC) -v 2>&1| ggrep version)
GCC3_CXX_VERSION = $(shell $(GCC3_CXX) -v 2>&1| ggrep version)
+ GCC3_F77_VERSION = $(shell $(GCC3_F77) -v 2>&1| ggrep version)
GCC4_CC_VERSION = $(shell $(GCC4_CC) -v 2>&1| ggrep version)
GCC4_CXX_VERSION = $(shell $(GCC4_CXX) -v 2>&1| ggrep version)
+ GCC4_F77_VERSION = $(shell $(GCC4_F77) -v 2>&1| ggrep version)
+ GCC4_FC_VERSION = $(shell $(GCC4_FC) -v 2>&1| ggrep version)
SOS11_CC_VERSION = $(shell $(SOS11_CC) -V 2>&1| ggrep cc: | gsed -e 's/cc: //')
SOS11_CXX_VERSION = $(shell $(SOS11_CXX) -V 2>&1| ggrep CC: | gsed -e 's/CC: //')
+ SOS11_F77_VERSION = $(shell $(SOS11_F77) -V 2>&1| ggrep f90: | gsed -e 's/f90: //')
+ SOS11_FC_VERSION = $(shell $(SOS11_FC) -V 2>&1| ggrep f90: | gsed -e 's/f90: //')
SOS12_CC_VERSION = $(shell $(SOS12_CC) -V 2>&1| ggrep cc: | gsed -e 's/cc: //')
+ SOS12_F77_VERSION = $(shell $(SOS12_F77) -V 2>&1| ggrep -e 'f9[05]': | gsed -e 's/f9[05]: //')
+ SOS12_FC_VERSION = $(shell $(SOS12_FC) -V 2>&1| ggrep -e 'f9[05]:' | gsed -e 's/f9[05]: //')
SOS12_CXX_VERSION = $(shell $(SOS12_CXX) -V 2>&1| ggrep CC: | gsed -e 's/CC: //')
SOS12U1_CC_VERSION = $(shell $(SOS12U1_CC) -V 2>&1| ggrep cc: | gsed -e 's/cc: //')
SOS12U1_CXX_VERSION = $(shell $(SOS12U1_CXX) -V 2>&1| ggrep CC: | gsed -e 's/CC: //')
+SOS12U1_F77_VERSION = $(shell $(SOS12U1_F77) -V 2>&1| ggrep -e 'f9[05]': | gsed -e 's/f9[05]: //')
+ SOS12U1_FC_VERSION = $(shell $(SOS12U1_FC) -V 2>&1| ggrep -e 'f9[05]:' | gsed -e 's/f9[05]: //')
CC_VERSION = $($(GARCOMPILER)_CC_VERSION)
CXX_VERSION = $($(GARCOMPILER)_CXX_VERSION)
+F77_VERSION = $($(GARCOMPILER)_F77_VERSION)
+ FC_VERSION = $($(GARCOMPILER)_FC_VERSION)
#
# Construct compiler options
@@ -563,12 +594,17 @@
CC_HOME = $($(GARCOMPILER)_CC_HOME)
CC = $($(GARCOMPILER)_CC)
CXX = $($(GARCOMPILER)_CXX)
+F77 = $($(GARCOMPILER)_F77)
+FC = $($(GARCOMPILER)_FC)
+
CFLAGS ?= $(strip $($(GARCOMPILER)_CC_FLAGS) $(_CATEGORY_CFLAGS) $(EXTRA_CFLAGS))
CXXFLAGS ?= $(strip $($(GARCOMPILER)_CXX_FLAGS) $(_CATEGORY_CXXFLAGS) $(EXTRA_CXXFLAGS))
CPPFLAGS ?= $(strip $($(GARCOMPILER)_CPP_FLAGS) $(_CATEGORY_CPPFLAGS) $(EXTRA_CPPFLAGS) $(INCLUDE_FLAGS))
LDFLAGS ?= $(strip $($(GARCOMPILER)_LD_FLAGS) $(_CATEGORY_LDFLAGS) $(EXTRA_LDFLAGS) $(LINKER_FLAGS))
ASFLAGS ?= $(strip $($(GARCOMPILER)_AS_FLAGS) $(_CATEGORY_ASFLAGS) $(EXTRA_ASFLAGS))
OPTFLAGS ?= $(strip $($(GARCOMPILER)_CC_FLAGS) $(_CATEGORY_OPTFLAGS) $(EXTRA_OPTFLAGS))
+FFLAGS ?= $(strip $($(GARCOMPILER)_FFLAGS) $(_CATEGORY_FFLAGS) $(EXTRA_FFLAGS))
+FCFLAGS ?= $(strip $($(GARCOMPILER)_FCFLAGS) $(_CATEGORY_FCFLAGS) $(EXTRA_FCFLAGS))
GCC3_LD_OPTIONS = -R$(GCC3_CC_HOME)/lib $(EXTRA_GCC3_LD_OPTIONS) $(EXTRA_GCC_LD_OPTIONS)
GCC4_LD_OPTIONS = -R$(abspath $(GCC4_CC_HOME)/lib/$(MM_LIBDIR)) $(EXTRA_GCC4_LD_OPTIONS) $(EXTRA_GCC_LD_OPTIONS)
@@ -652,6 +688,7 @@
ifeq ($(origin COMPILER_EXPORTS), undefined)
COMPILER_EXPORTS = CPPFLAGS CFLAGS CXXFLAGS LDFLAGS
+COMPILER_EXPORTS += FFLAGS FCFLAGS
COMPILER_EXPORTS += ASFLAGS OPTFLAGS CC CXX
COMPILER_EXPORTS += CC_HOME CC_VERSION CXX_VERSION
endif
@@ -705,6 +742,8 @@
@echo
@echo " C Compiler: $(CC)"
@echo " C++ Compiler: $(CXX)"
+ @echo " F77 Compiler: $(F77)"
+ @echo " FC Compiler: $(FC)"
@echo
@echo "Compiler ISA generation matrix:"
@echo
@@ -753,6 +792,8 @@
echo " CFLAGS = $(CFLAGS)"; \
echo " CXXFLAGS = $(CXXFLAGS)"; \
echo " CPPFLAGS = $(CPPFLAGS)"; \
+ echo " FFLAGS = $(FFLAGS)"; \
+ echo " FCFLAGS = $(FCFLAGS)"; \
echo " LDFLAGS = $(LDFLAGS)"; \
echo " LD_OPTIONS = $(LD_OPTIONS)"; \
echo " ASFLAGS = $(ASFLAGS)"; \
Modified: csw/mgar/gar/v2-uwatch2/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2-uwatch2/gar.lib.mk 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/gar.lib.mk 2011-02-13 12:55:58 UTC (rev 13270)
@@ -723,37 +723,51 @@
xz-patch-%:
@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
@xz -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
- @( cd $(WORKSRC); git add -A; \
- git commit -am "old xz-style patch: $*"; )
+ @( if [ -z "$(NOGITPATCH)" ]; then \
+ cd $(WORKSRC); git add -A; \
+ git commit -am "old xz-style patch: $*"; \
+ fi )
@$(MAKECOOKIE)
# apply bzipped patches
bz-patch-%:
@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
@bzip2 -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
- @( cd $(WORKSRC); git add -A; \
- git commit -am "old bz-style patch: $*"; )
+ @( if [ -z "$(NOGITPATCH)" ]; then \
+ cd $(WORKSRC); git add -A; \
+ git commit -am "old bz-style patch: $*"; \
+ fi )
@$(MAKECOOKIE)
# apply gzipped patches
gz-patch-%:
@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
@gzip -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
- @( cd $(WORKSRC); git add -A; \
- git commit -am "old gz-style patch: $*"; )
+ @( if [ -z "$(NOGITPATCH)" ]; then \
+ cd $(WORKSRC); git add -A; \
+ git commit -am "old gz-style patch: $*"; \
+ fi )
@$(MAKECOOKIE)
# apply normal patches (git format-patch output or old-style diff -r)
normal-patch-%:
@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
@( if ggrep -q 'diff --git' $(abspath $(DOWNLOADDIR)/$*); then \
- cd $(WORKSRC); git am --ignore-space-change --ignore-whitespace $(abspath $(DOWNLOADDIR)/$*); \
- else \
+ if [ -z "$(NOGITPATCH)" ]; then \
+ cd $(WORKSRC);\
+ git am --ignore-space-change --ignore-whitespace $(abspath $(DOWNLOADDIR)/$*); \
+ else \
+ $(GARPATCH) < $(DOWNLOADDIR)/$*; \
+ fi; \
+ else \
echo Adding old-style patch...; \
$(GARPATCH) < $(DOWNLOADDIR)/$*; \
- cd $(WORKSRC); git add -A; \
- git commit -am "old style patch: $*"; \
- fi )
+ if [ -z "$(NOGITPATCH)" ]; then \
+ cd $(WORKSRC); \
+ git add -A; \
+ git commit -am "old style patch: $*"; \
+ fi; \
+ fi )
@$(MAKECOOKIE)
### PATCH FILE TYPE MAPPINGS ###
Modified: csw/mgar/gar/v2-uwatch2/gar.mk
===================================================================
--- csw/mgar/gar/v2-uwatch2/gar.mk 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/gar.mk 2011-02-13 12:55:58 UTC (rev 13270)
@@ -129,7 +129,7 @@
merge-$(2):
@echo "[===== Building modulation '$(2)' on host '$$(BUILDHOST)' =====]"
$$(if $$(and $$(BUILDHOST),$$(filter-out $$(THISHOST),$$(BUILDHOST))),\
- $(SSH) $$(BUILDHOST) "PATH=$$(PATH) MAKEFLAGS=\"$(MAKEFLAGS)\" $(MAKE) -C $$(CURDIR) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
+ $(SSH) $$(BUILDHOST) "PATH=$$(PATH) MAKEFLAGS=\"$$(MAKEFLAGS)\" $(MAKE) -C $$(CURDIR) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
$(MAKE) $(if $(GAR_PLATFORM),GAR_PLATFORM=$(GAR_PLATFORM)) MODULATION=$(2) $(3) merge-modulated\
)
@# The next line has intentionally been left blank to explicitly terminate this make rule
@@ -444,7 +444,7 @@
_var_definitions = $(foreach VAR,$(shell perl -ne 'print "$$1 " if( /@([^@]+)@/ )' <$1),$(VAR)=$($(VAR)))
expandvars-%:
- $(call _var_definitions,$(WORKDIR)/$*) perl -i-unexpanded -npe 's/@([^@]+)@/$$ENV{$$1}/e' $(WORKDIR)/$*
+ $(call _var_definitions,$(WORKDIR)/$*) perl -i-unexpanded -npe 's/@([^@]+)@/$$ENV{$$1}/eg' $(WORKDIR)/$*
@$(MAKECOOKIE)
Modified: csw/mgar/gar/v2-uwatch2/gar.pkg.mk
===================================================================
--- csw/mgar/gar/v2-uwatch2/gar.pkg.mk 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/gar.pkg.mk 2011-02-13 12:55:58 UTC (rev 13270)
@@ -32,12 +32,12 @@
ifeq ($(origin PACKAGES), undefined)
PACKAGES = $(if $(filter %.gspec,$(DISTFILES)),,CSW$(NAME))
-CATALOGNAME ?= $(if $(filter %.gspec,$(DISTFILES)),,$(NAME))
+CATALOGNAME ?= $(if $(filter %.gspec,$(DISTFILES)),,$(subst -,_,$(NAME)))
SRCPACKAGE_BASE = $(firstword $(basename $(filter %.gspec,$(DISTFILES))) $(PACKAGES))
SRCPACKAGE ?= $(SRCPACKAGE_BASE)-src
SPKG_SPECS ?= $(basename $(filter %.gspec,$(DISTFILES))) $(PACKAGES) $(if $(NOSOURCEPACKAGE),,$(SRCPACKAGE))
else
-CATALOGNAME ?= $(if $(filter-out $(firstword $(PACKAGES)),$(PACKAGES)),,$(patsubst CSW%,%,$(PACKAGES)))
+CATALOGNAME ?= $(if $(filter-out $(firstword $(PACKAGES)),$(PACKAGES)),,$(subst -,_,$(patsubst CSW%,%,$(PACKAGES))))
SRCPACKAGE_BASE = $(firstword $(PACKAGES))
SRCPACKAGE ?= $(SRCPACKAGE_BASE)-src
SPKG_SPECS ?= $(sort $(basename $(filter %.gspec,$(DISTFILES))) $(PACKAGES) $(if $(NOSOURCEPACKAGE),,$(SRCPACKAGE)))
@@ -112,7 +112,7 @@
$(if $(CATALOGNAME),
$(CATALOGNAME),
$(if $(filter $(1),$(PACKAGES)),
- $(patsubst CSW%,%,$(1)),
+ $(subst -,_,$(patsubst CSW%,%,$(1))),
$(if $(realpath files/$(1).gspec),
$(shell perl -F'\s+' -ane 'print "$$F[2]" if( $$F[0] eq "%var" && $$F[1] eq "bitname")' files/$(1).gspec),
$(error The catalog name for the package '$1' could not be determined, because it was neither in PACKAGES nor was there a gspec-file)
@@ -195,6 +195,7 @@
# - set class for all config files
_CSWCLASS_FILTER = | perl -ane '\
+ $(foreach FILE,$(CPTEMPLATES),$$F[1] = "cswcptemplates" if( $$F[2] =~ m(^$(FILE)$$) );)\
$(foreach FILE,$(MIGRATECONF),$$F[1] = "cswmigrateconf" if( $$F[2] =~ m(^$(FILE)$$) );)\
$(foreach FILE,$(SAMPLECONF:%\.CSW=%),$$F[1] = "cswcpsampleconf" if ( $$F[2] =~ m(^$(FILE)\.CSW$$) );)\
$(foreach FILE,$(PRESERVECONF:%\.CSW=%),$$F[1] = "cswpreserveconf" if( $$F[2] =~ m(^$(FILE)\.CSW$$) );)\
@@ -296,7 +297,7 @@
PKGFILES_DEVEL += $(includedir)/.*
PKGFILES_DEVEL += $(sharedstatedir)/aclocal/.*
PKGFILES_DEVEL += $(mandir)/man1/.*-config\.1.*
-PKGFILES_DEVEL += $(mandir)/man3/.*
+PKGFILES_DEVEL += $(mandir)/man3/.*\.3
# PKGFILES_DOC selects files beloging to a documentation package
PKGFILES_DOC = $(docdir)/.*
@@ -718,6 +719,7 @@
reset-merge-checkpkgoverrides:
@rm -f $(COOKIEDIR)/merge-checkpkgoverrides $(foreach SPEC,$(_PKG_SPECS),$(COOKIEDIR)/merge-checkpkgoverrides-$(SPEC))
+ @rm -f $(foreach S,$(SPKG_SPECS),$(WORKDIR_GLOBAL)/checkpkg_override.$S)
merge-alternatives-%:
@echo "[ Generating alternatives for package $* ]"
@@ -769,7 +771,7 @@
$(if $(strip $(foreach P,$(PACKAGING_PLATFORMS),$(PACKAGING_HOST_$P))),\
$(if $(filter $(THISHOST),$(foreach P,$(PACKAGING_PLATFORMS),$(PACKAGING_HOST_$P))),\
@$(MAKECOOKIE),\
- $(warning *** You are building this package on a non-requested platform host '$(THISHOST)'. The follow platforms were requested:)\
+ $(warning *** You are building this package on a non-requested platform host '$(THISHOST)'. The following platforms were requested:)\
$(foreach P,$(PACKAGING_PLATFORMS),\
$(warning *** - $P $(if $(PACKAGING_HOST_$P),to be build on host '$(PACKAGING_HOST_$P)',with no suitable host available))\
)\
Modified: csw/mgar/gar/v2-uwatch2/lib/python/README
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/README 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/README 2011-02-13 12:55:58 UTC (rev 13270)
@@ -4,20 +4,23 @@
Checks to implement:
- foo_bar != CSWfoo-bar -> error
- - *dev(el)? -> error, suggest *-devel
+ - *dev(el)? -> error, suggest *-dev
- *-?rt -> error, suggest specific library packages
- empty package without 'transitional' in the name --> error, suggest
- 'transitional'
+ 'transitional'
- CSWpmfoo --> error, suggest CSWpm-foo
- Dependency on a transitional package --> error
- ('transitional', 'stub', 'legacy')
+ ('transitional', 'stub', 'legacy')
- Dependency on CSWcas-initsmf + rc* files --> error
+ - A package must not be incompatible with itself
Development plan for checkpkg:
-- Generalize dependency checking by adding NeedFile(file_list, reason) to
- error_mgr. It's possible to need one of the listed files only, so files are
- given as alternatives, but the reason is common.
+- Notify maintainers when their package is available from mirrors
+- Allow maintainers to opt-out from these notifications
+- Add support for the 'overridden' field in the database
+- When adding a package to a catalog, store the time and date of the
+ addition
- Display stats from each run
- Shorten the on-screen output, add commands to display override lines
- Move the set check stats outside of checking functions, remove the special
@@ -37,6 +40,9 @@
- Move the 'data' field of the srv4_file table to a separate table (should
speed up checking if stats are already collected)
- Store run history
+- Generalize dependency checking by adding NeedFile(file_list, reason) to
+ error_mgr. It's possible to need one of the listed files only, so files are
+ given as alternatives, but the reason is common.
Known problems:
- libmagic fails sometimes when processing the whole catalog
@@ -56,3 +62,13 @@
python-progressbar \
python-sqlobject \
python-yaml
+
+===Checkpkg database===
+
+Additional database indexes:
+
+# TODO(maciej): Add them to the models.py
+
+CREATE INDEX srv4_mtime_idx ON srv4_file_stats(mtime);
+CREATE INDEX srv4_md5_idx ON srv4_file_stats(md5_sum);
+CREATE INDEX catalog_idx ON srv4_file_in_catalog (arch_id, osrel_id, catrel_id);
Modified: csw/mgar/gar/v2-uwatch2/lib/python/catalog.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/catalog.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/catalog.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -65,9 +65,10 @@
class OpencswCatalog(object):
"""Represents a catalog file."""
- def __init__(self, file_name):
- self.file_name = file_name
+ def __init__(self, fd):
+ self.fd = fd
self.by_basename = None
+ self.by_catalogname = None
self.catalog_data = None
def _ParseCatalogLine(self, line):
@@ -96,12 +97,12 @@
r"(?P<deps>\S+)"
r"\s+"
# none
- r"(?P<none_thing_1>\S+)"
+ r"(?P<category>\S+)"
# An optional empty field.
r"("
r"\s+"
# none\n'
- r"(?P<none_thing_2>\S+)"
+ r"(?P<i_deps>\S+)"
r")?"
r"$"
),
@@ -109,6 +110,14 @@
cline_re_list = [re.compile(x) for x in cline_re_str_list]
matched = False
d = None
+ def SplitPkgList(pkglist):
+ if not pkglist:
+ pkglist = ()
+ elif pkglist == "none":
+ pkglist = ()
+ else:
+ pkglist = tuple(pkglist.split("|"))
+ return pkglist
for cline_re in cline_re_list:
m = cline_re.match(line)
if m:
@@ -116,6 +125,8 @@
matched = True
if not d:
raise CatalogLineParseError("Parsed %s data is empty" % repr(line))
+ d["deps"] = SplitPkgList(d["deps"])
+ d["i_deps"] = SplitPkgList(d["i_deps"])
if not matched:
raise CatalogLineParseError("No regexes matched %s" % repr(line))
return d
@@ -133,8 +144,7 @@
def GetCatalogData(self):
if not self.catalog_data:
- fd = open(self.file_name, "r")
- self.catalog_data = self._GetCatalogData(fd)
+ self.catalog_data = self._GetCatalogData(self.fd)
return self.catalog_data
def GetDataByBasename(self):
@@ -146,3 +156,43 @@
logging.error("%s is missing the file_basename field", d)
self.by_basename[d["file_basename"]] = d
return self.by_basename
+
+ def GetDataByCatalogname(self):
+ if not self.by_catalogname:
+ self.by_catalogname = {}
+ cd = self.GetCatalogData()
+ for d in cd:
+ if "catalogname" not in d:
+ logging.error("%s is missing the catalogname field", d)
+ if d["catalogname"] in self.by_catalogname:
+ logging.warning("Catalog name %s is duplicated!", d["catalogname"])
+ self.by_catalogname[d["catalogname"]] = d
+ return self.by_catalogname
+
+
+class CatalogComparator(object):
+
+ def GetCatalogDiff(self, cat_a, cat_b):
+ """Returns a difference between two catalogs."""
+ if type(cat_a) == dict:
+ bc_a = cat_a
+ else:
+ bc_a = cat_a.GetDataByCatalogname()
+ if type(cat_b) == dict:
+ bc_b = cat_b
+ else:
+ bc_b = cat_b.GetDataByCatalogname()
+ cn_a = set(bc_a)
+ cn_b = set(bc_b)
+ new_catalognames = cn_b.difference(cn_a)
+ removed_catalognames = cn_a.difference(cn_b)
+ same_catalognames = cn_b.intersection(cn_a)
+ # Looking for updated catalognames
+ updated_catalognames = set()
+ for catalogname in same_catalognames:
+ if bc_a[catalogname]["version"] != bc_b[catalogname]["version"]:
+ updated_catalognames.add(catalogname)
+ new_pkgs = [bc_b[x] for x in new_catalognames]
+ removed_pkgs = [bc_a[x] for x in removed_catalognames]
+ updated_pkgs = [{"from": bc_a[x], "to": bc_b[x]} for x in updated_catalognames]
+ return new_pkgs, removed_pkgs, updated_pkgs
Copied: csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier.py (from rev 13268, csw/mgar/gar/v2/lib/python/catalog_notifier.py)
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier.py (rev 0)
+++ csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -0,0 +1,282 @@
+#!/usr/bin/env python2.6
+
+"""Polls a designated catalog tree, and sends notifications about
+package updates."""
+
+import optparse
+import catalog
+import common_constants
+from Cheetah import Template
+import urllib2
+import logging
+import configuration
+import pprint
+import cPickle
+import json
+import os.path
+import smtplib
+from email.mime.text import MIMEText
+import rest
+
+REPORT_TMPL = u"""Catalog update report for $email
+Catalog URL: $url
+#import re
+#def CatalogList($catalogs)
+#set $by_catrel = {}
+#set $unused = [by_catrel.setdefault(x[0], []).append(x[1:]) for x in $catalogs]
+#for catrel in $by_catrel:
+ - $catrel: #
+#set $by_arch = {}
+#set $unused = [by_arch.setdefault(x[0], []).append(x[1:]) for x in $by_catrel[$catrel]]
+#set $first = True
+#for arch in $by_arch:
+#if not $first
+, #
+#else
+#set $first = False
+#end if
+$arch (#
+#echo ", ".join([re.sub(r'^.*OS', '', x[0]) for x in $by_arch[$arch]]) + ")"
+#end for
+
+#end for
+#end def
+#if "new_pkgs" in $pkg_data
+
+New packages:
+#for basename in $pkg_data["new_pkgs"]
+* $basename
+ In catalogs:
+#set $catalogs = $sorted($pkg_data["new_pkgs"][basename]["catalogs"])
+$CatalogList($catalogs)
+#end for
+#end if
+#if "removed_pkgs" in $pkg_data
+
+Removed packages:
+#for basename in $pkg_data["removed_pkgs"]
+* $basename
+ From catalogs:
+#set $catalogs = $sorted($pkg_data["removed_pkgs"][basename]["catalogs"])
+$CatalogList($catalogs)
+#end for
+#end if
+#if "upgraded_pkg" in $pkg_data
+
+Version change (probably upgrade):
+#for basename in $pkg_data["upgraded_pkg"]
+#for from_basename in $pkg_data["upgraded_pkg"][basename]["from_pkg"]
+- $pkg_data["upgraded_pkg"][basename]["from_pkg"][from_basename]["file_basename"]
+#end for
++ $pkg_data["upgraded_pkg"][basename]["to_pkg"]["file_basename"]
+ In catalogs:
+#set $catalogs = $sorted($pkg_data["upgraded_pkg"][basename]["catalogs"])
+$CatalogList($catalogs)
+#end for
+#end if
+#if "lost_pkg" in $pkg_data
+
+You no longer maintain packages:
+#for basename in $pkg_data["lost_pkg"]
+#for from_basename in $pkg_data["lost_pkg"][basename]["from_pkg"]
+- $pkg_data["lost_pkg"][basename]["from_pkg"][from_basename]["file_basename"]
+#end for
+ In catalogs:
+#set $catalogs = $sorted($pkg_data["lost_pkg"][basename]["catalogs"])
+$CatalogList($catalogs)
+#end for
+#end if
+#if "got_pkg" in $pkg_data
+
+You took over packages:
+#for basename in $pkg_data["got_pkg"]
+* $basename
+ In catalogs:
+#set $catalogs = $sorted($pkg_data["got_pkg"][basename]["catalogs"])
+$CatalogList($catalogs)
+#end for
+#end if
+"""
+
+class NotificationFormatter(object):
+
+ def _GetPkgsByMaintainer(self, catalogs, rest_client):
+ c = catalog.CatalogComparator()
+ pkgs_by_maintainer = {}
+ for catrel, arch, osrel, cat_a, cat_b in catalogs:
+ catalog_key = (catrel, arch, osrel)
+ new_pkgs, removed_pkgs, updated_pkgs = c.GetCatalogDiff(cat_a, cat_b)
+ labels_and_lists = (
+ ("new_pkgs", new_pkgs),
+ ("removed_pkgs", removed_pkgs),
+ )
+ for label, pkg_list in labels_and_lists:
+ for pkg in pkg_list:
+ maintainer = rest_client.GetMaintainerByMd5(pkg["md5sum"])
+ maintainer_email = maintainer["maintainer_email"]
+ pkgs_by_maintainer.setdefault(maintainer_email, {})
+ pkgs_by_maintainer[maintainer_email].setdefault(label, {})
+ labeled = pkgs_by_maintainer[maintainer_email][label]
+ basename = pkg["file_basename"]
+ labeled.setdefault(basename, {
+ "pkg": pkg,
+ "catalogs": [],
+ })
+ labeled[basename]["catalogs"].append(catalog_key)
+ for d in updated_pkgs:
+ from_pkg = d["from"]
+ to_pkg = d["to"]
+ maintainer_from = rest_client.GetMaintainerByMd5(from_pkg["md5sum"])
+ maintainer_to = rest_client.GetMaintainerByMd5(to_pkg["md5sum"])
+ from_email = maintainer_from["maintainer_email"]
+ to_email = maintainer_to["maintainer_email"]
+ if from_email == to_email:
+ # A normal upgrade, no takeover
+ label = "upgraded_pkg"
+ self._StorePkgUpdate(catalog_key,
+ label, pkgs_by_maintainer, from_email, from_pkg, to_pkg)
+ else:
+ # Package takeover
+ self._StorePkgUpdate(catalog_key,
+ "lost_pkg", pkgs_by_maintainer, from_email, from_pkg, to_pkg)
+ self._StorePkgUpdate(catalog_key,
+ "got_pkg", pkgs_by_maintainer, to_email, from_pkg, to_pkg)
+ return pkgs_by_maintainer
+
+ def _StorePkgUpdate(self,
+ catalog_key,
+ label, pkgs_by_maintainer, email, from_pkg, to_pkg):
+ pkgs_by_maintainer.setdefault(email, {})
+ pkgs_by_maintainer[email].setdefault(label, {})
+ labeled = pkgs_by_maintainer[email][label]
+ basename = to_pkg["file_basename"]
+ labeled.setdefault(basename, {
+ "to_pkg": to_pkg,
+ "from_pkg": {},
+ "catalogs": [],
+ })
+ labeled[basename]["from_pkg"][from_pkg["file_basename"]] = from_pkg
+ labeled[basename]["catalogs"].append(catalog_key)
+
+ def _RenderForMaintainer(self, pkg_data, email, url):
+ namespace = {
+ "pkg_data": pkg_data,
+ "email": email,
+ "url": url}
+ t = Template.Template(REPORT_TMPL, searchList=[namespace])
+ return unicode(t)
+
+ def FormatNotifications(self, url, catalogs, rest_client):
+ """Formats a notification from a series of catalogs.
+
+ Args:
+ url: Base URL for catalogs
+ catalogs: A list of triplets (catrel, arch, osrel, cat_a, cat_b)
+ rest_client: An interface to the outside world
+ """
+ pkgs_by_maintainer = self._GetPkgsByMaintainer(catalogs, rest_client)
+ rendered_notifications = {}
+ for email in pkgs_by_maintainer:
+ rendered_notifications[email] = self._RenderForMaintainer(
+ pkgs_by_maintainer[email], email, url)
+ return rendered_notifications
+
+
+class CatalogIndexDownloader(object):
+
+ def GetCatalogsByTriad(self, cat_tree_url):
+ catalogs_by_triad = {}
+ for catrel in common_constants.DEFAULT_CATALOG_RELEASES:
+ for arch in common_constants.PHYSICAL_ARCHITECTURES:
+ for osrel in common_constants.OS_RELS:
+ short_osrel = osrel.replace("SunOS", "")
+ catalog_file_url = (
+ "%s%s/%s/%s/catalog"
+ % (cat_tree_url, catrel, arch, short_osrel))
+ logging.info("Opening %s", repr(catalog_file_url))
+ try:
+ f = urllib2.urlopen(catalog_file_url)
+ key = (catrel, arch, osrel)
+ catalog_instance = catalog.OpencswCatalog(f)
+ catalogs_by_triad[key] = catalog_instance.GetDataByCatalogname()
+ except urllib2.URLError, e:
+ logging.warning(e)
+ return catalogs_by_triad
+
+
+def main():
+ DEFAULT_URL = "http://mirror.opencsw.org/opencsw/"
+ DEFAULT_URL = "http://ivy.home.blizinski.pl/~maciej/opencsw/"
+ parser = optparse.OptionParser()
+ parser.add_option("-u", "--url",
+ dest="url", help="Base URL of OpenCSW catalog",
+ default=DEFAULT_URL)
+ parser.add_option("-d", "--debug",
+ dest="debug", action="store_true",
+ default=False)
+ parser.add_option("-s", "--send-notifications",
+ dest="send_notifications", action="store_true",
+ default=False)
+ parser.add_option("-p", "--pickle-file",
+ dest="pickle_file", help="Pickle file",
+ default="/tmp/opencsw-notifier-data/example.pickle")
+ parser.add_option("-w", "--whitelist",
+ dest="whitelist",
+ help="E-mail address whitelist, comma separated",
+ default=None)
+ options, args = parser.parse_args()
+ logging.basicConfig(level=logging.DEBUG)
+ # Getting catalogs
+ cat_tree_url = options.url
+ downloader = CatalogIndexDownloader()
+ catalogs_by_triad = downloader.GetCatalogsByTriad(cat_tree_url)
+ pickle_path = options.pickle_file
+ previous_catalogs_by_triad = None
+ try:
+ with open(pickle_path, "rb") as fd:
+ previous_catalogs_by_triad = cPickle.load(fd)
+ except (IOError, EOFError), e:
+ logging.warning(e)
+ previous_catalogs_by_triad = {}
+ # Merge the two data structures here
+ catalogs = []
+ for key in catalogs_by_triad:
+ if key in previous_catalogs_by_triad:
+ catalogs.append(
+ # ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ key + (previous_catalogs_by_triad[key], catalogs_by_triad[key])
+ )
+ else:
+ logging.debug("%s not found in previous_catalogs_by_triad", key)
+ formatter = NotificationFormatter()
+ rest_client = rest.RestClient()
+ notifications = formatter.FormatNotifications(
+ cat_tree_url, catalogs, rest_client)
+ whitelist = frozenset()
+ if options.whitelist:
+ whitelist = frozenset(options.whitelist.split(","))
+ logging.debug("Email whitelist: %s", whitelist)
+ for email in notifications:
+ if options.send_notifications:
+ logging.debug("email: %s", repr(email))
+ if whitelist and email not in whitelist:
+ continue
+ logging.debug("Sending.")
+ msg = MIMEText(notifications[email])
+ msg["Subject"] = "OpenCSW catalog update report"
+ from_address = "Catalog update notifier <noreply at opencsw.org>"
+ msg['From'] = from_address
+ msg['To'] = email
+ s = smtplib.SMTP('localhost')
+ s.sendmail(from_address, [email], msg.as_string())
+ s.quit()
+ logging.debug("E-mail sent.")
+ else:
+ print notifications[email]
+ print "* * *"
+ with open(pickle_path, "wb") as fd:
+ cPickle.dump(catalogs_by_triad, fd)
+
+
+if __name__ == '__main__':
+ main()
Copied: csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier_test.py (from rev 13268, csw/mgar/gar/v2/lib/python/catalog_notifier_test.py)
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier_test.py (rev 0)
+++ csw/mgar/gar/v2-uwatch2/lib/python/catalog_notifier_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -0,0 +1,220 @@
+#!/usr/bin/env python2.6
+
+import unittest
+import mox
+import catalog_notifier
+import catalog
+import catalog_test
+import copy
+import pprint
+import rest
+
+
+class NotificationFormatterTest(mox.MoxTestBase):
+
+ def disabled_testOne(self):
+ """This tested too much."""
+ f = catalog_notifier.NotificationFormatter()
+ rest_client_mock = self.mox.CreateMock(rest.RestClient)
+ url = "http://www.example.com/opencsw/"
+ cat_a = self.mox.CreateMock(catalog.OpencswCatalog)
+ cat_b = self.mox.CreateMock(catalog.OpencswCatalog)
+ catalogs = [
+ ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ]
+ maintainers = {
+ "cfe40c06e994f6e8d3b191396d0365cb": {"maintainer_email": "joe at example.com"},
+ }
+ cat_a.GetDataByCatalogname().AndReturn({})
+ cat_b.GetDataByCatalogname().AndReturn({"syslog_ng": catalog_test.PKG_STRUCT_1})
+ self.mox.ReplayAll()
+ self.assertEqual(
+ "report here",
+ f.FormatNotification(url, catalogs, rest_client_mock))
+
+ def test_GetPkgsByMaintainerNew(self):
+ f = catalog_notifier.NotificationFormatter()
+ rest_client_mock = self.mox.CreateMock(rest.RestClient)
+ cat_a = self.mox.CreateMock(catalog.OpencswCatalog)
+ cat_b = self.mox.CreateMock(catalog.OpencswCatalog)
+ catalogs = [
+ ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ("fossil", "amd65", "SolarOS5.13", cat_a, cat_b),
+ ("fossil", "amd67", "SolarOS5.12", cat_a, cat_b),
+ ("rock", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ]
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ cat_a.GetDataByCatalogname().AndReturn({})
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ cat_a.GetDataByCatalogname().AndReturn({})
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ cat_a.GetDataByCatalogname().AndReturn({})
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ cat_a.GetDataByCatalogname().AndReturn({})
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ self.mox.ReplayAll()
+ expected = {'joe at example.com': {
+ 'new_pkgs': {
+ catalog_test.PKG_STRUCT_1["file_basename"]: {
+ "pkg": catalog_test.PKG_STRUCT_1,
+ "catalogs": [
+ ("fossil", "amd65", "SolarOS5.12"),
+ ("fossil", "amd65", "SolarOS5.13"),
+ ("fossil", "amd67", "SolarOS5.12"),
+ ("rock", "amd65", "SolarOS5.12"),
+ ],
+ },
+ }}
+ }
+ result = f._GetPkgsByMaintainer(catalogs, rest_client_mock)
+ self.assertEqual(expected, result)
+ # Uncomment to see rendered template
+ # print f._RenderForMaintainer(
+ # result["joe at example.com"], "joe at example.com",
+ # "http://mirror.example.com")
+
+ def test_GetPkgsByMaintainerRemoved(self):
+ f = catalog_notifier.NotificationFormatter()
+ rest_client_mock = self.mox.CreateMock(rest.RestClient)
+ cat_a = self.mox.CreateMock(catalog.OpencswCatalog)
+ cat_b = self.mox.CreateMock(catalog.OpencswCatalog)
+ catalogs = [
+ ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ]
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ cat_a.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ cat_b.GetDataByCatalogname().AndReturn({})
+ self.mox.ReplayAll()
+ expected = {'joe at example.com': {
+ 'removed_pkgs': {
+ catalog_test.PKG_STRUCT_1["file_basename"]: {
+ "pkg": catalog_test.PKG_STRUCT_1,
+ "catalogs": [
+ ("fossil", "amd65", "SolarOS5.12"),
+ ],
+ },
+ }}
+ }
+ self.assertEqual(
+ expected,
+ f._GetPkgsByMaintainer(catalogs, rest_client_mock))
+ expected_text = u"""aa"""
+ # Uncomment to see rendered template
+ # print f._RenderForMaintainer(
+ # expected["joe at example.com"],
+ # "joe at example.com",
+ # "http://mirror.example.com")
+
+ def test_GetPkgsByMaintainerTakeover(self):
+ f = catalog_notifier.NotificationFormatter()
+ rest_client_mock = self.mox.CreateMock(rest.RestClient)
+ cat_a = self.mox.CreateMock(catalog.OpencswCatalog)
+ cat_b = self.mox.CreateMock(catalog.OpencswCatalog)
+ catalogs = [
+ ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ]
+ previous_pkg = copy.deepcopy(catalog_test.PKG_STRUCT_1)
+ previous_pkg["version"] = "previous_version"
+ previous_pkg["md5sum"] = "previous_md5"
+ cat_a.GetDataByCatalogname().AndReturn({
+ "syslog_ng": previous_pkg,
+ })
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ rest_client_mock.GetMaintainerByMd5('previous_md5').AndReturn(
+ {"maintainer_email": "jack at example.com"}
+ )
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "joe at example.com"}
+ )
+ self.mox.ReplayAll()
+ result = f._GetPkgsByMaintainer(catalogs, rest_client_mock)
+ self.assertTrue("jack at example.com" in result)
+ self.assertEqual({"lost_pkg": {
+ catalog_test.PKG_STRUCT_1["file_basename"]: {
+ "from_pkg": {previous_pkg["file_basename"]: previous_pkg},
+ "to_pkg": catalog_test.PKG_STRUCT_1,
+ "catalogs": [("fossil", "amd65", "SolarOS5.12")],
+ }}},
+ result["jack at example.com"])
+ self.assertEqual({"got_pkg": {
+ catalog_test.PKG_STRUCT_1["file_basename"]: {
+ "from_pkg": {previous_pkg["file_basename"]: previous_pkg},
+ "to_pkg": catalog_test.PKG_STRUCT_1,
+ "catalogs": [("fossil", "amd65", "SolarOS5.12")],
+ }}},
+ result["joe at example.com"])
+ # Uncomment to see rendered templates
+ # print f._RenderForMaintainer(
+ # result["jack at example.com"], "jack at example.com",
+ # "http://mirror.example.com")
+ # print f._RenderForMaintainer(
+ # result["joe at example.com"], "joe at example.com",
+ # "http://mirror.example.com")
+
+ def test_GetPkgsByMaintainerUpgrade(self):
+ f = catalog_notifier.NotificationFormatter()
+ rest_client_mock = self.mox.CreateMock(rest.RestClient)
+ cat_a = self.mox.CreateMock(catalog.OpencswCatalog)
+ cat_b = self.mox.CreateMock(catalog.OpencswCatalog)
+ catalogs = [
+ ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b),
+ ]
+ previous_pkg = copy.deepcopy(catalog_test.PKG_STRUCT_1)
+ previous_pkg["version"] = "previous_version"
+ previous_pkg["md5sum"] = "previous_md5"
+ cat_a.GetDataByCatalogname().AndReturn({
+ "syslog_ng": previous_pkg,
+ })
+ cat_b.GetDataByCatalogname().AndReturn({
+ "syslog_ng": catalog_test.PKG_STRUCT_1,
+ })
+ rest_client_mock.GetMaintainerByMd5('previous_md5').AndReturn(
+ {"maintainer_email": "jack at example.com"}
+ )
+ rest_client_mock.GetMaintainerByMd5('cfe40c06e994f6e8d3b191396d0365cb').AndReturn(
+ {"maintainer_email": "jack at example.com"}
+ )
+ self.mox.ReplayAll()
+ result = f._GetPkgsByMaintainer(catalogs, rest_client_mock)
+ # pprint.pprint(result)
+ self.assertTrue("jack at example.com" in result)
+ # In this scenario, we group packages by the target package (after upgrade)
+ self.assertEqual({"upgraded_pkg": {
+ catalog_test.PKG_STRUCT_1["file_basename"]: {
+ "from_pkg": {previous_pkg["file_basename"]: previous_pkg},
+ "to_pkg": catalog_test.PKG_STRUCT_1,
+ "catalogs": [("fossil", "amd65", "SolarOS5.12")],
+ }}},
+ result["jack at example.com"])
+ # print f._RenderForMaintainer(
+ # result["jack at example.com"], "jack at example.com",
+ # "http://mirror.example.com")
+
+
+if __name__ == '__main__':
+ unittest.main()
Modified: csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -2,28 +2,98 @@
import unittest
import catalog
+import os.path
+from StringIO import StringIO
-class OpencswCatalogUnitTest(unittest.TestCase):
-
- def test_ParseCatalogLine_1(self):
- line = (
+CATALOG_LINE_1 = (
+ "syslog_ng 3.0.4,REV=2009.08.30 "
+ "CSWsyslogng "
+ "syslog_ng-3.0.4,REV=2009.08.30-SunOS5.8-i386-CSW.pkg.gz "
+ "cfe40c06e994f6e8d3b191396d0365cb 137550 "
+ "CSWgcc4corert|CSWeventlog|CSWosslrt|CSWzlib|CSWpcrert|CSWggettextrt|"
+ "CSWglib2|CSWtcpwrap|CSWcswclassutils|CSWcommon none")
+CATALOG_LINE_2 = (
+ "syslog_ng 3.0.4,REV=2009.10.12 "
+ "CSWsyslogng "
+ "syslog_ng-3.0.4,REV=2009.10.12-SunOS5.8-i386-CSW.pkg.gz "
+ "a1e9747ac3aa04c0497d2a3a23885995 137367 "
+ "CSWcswclassutils|CSWgcc4corert|CSWeventlog|CSWosslrt|CSWzlib|CSWpcrert|"
+ "CSWggettextrt|CSWglib2|CSWtcpwrap|CSWcswclassutils|CSWcommon none")
+CATALOG_LINE_3 = (
'tmux 1.2,REV=2010.05.17 CSWtmux '
'tmux-1.2,REV=2010.05.17-SunOS5.9-sparc-CSW.pkg.gz '
'145351cf6186fdcadcd169b66387f72f 214091 '
'CSWcommon|CSWlibevent none none\n')
+
+PKG_STRUCT_1 = {
+ 'category': 'none',
+ 'i_deps': (),
+ 'pkgname': 'CSWsyslogng',
+ 'md5sum': 'cfe40c06e994f6e8d3b191396d0365cb',
+ 'version': '3.0.4,REV=2009.08.30',
+ 'deps': ('CSWgcc4corert', 'CSWeventlog', 'CSWosslrt', 'CSWzlib',
+ 'CSWpcrert', 'CSWggettextrt', 'CSWglib2', 'CSWtcpwrap',
+ 'CSWcswclassutils', 'CSWcommon'),
+ 'file_basename': 'syslog_ng-3.0.4,REV=2009.08.30-SunOS5.8-i386-CSW.pkg.gz',
+ 'size': '137550',
+ 'catalogname': 'syslog_ng'}
+
+
+
+class OpencswCatalogUnitTest(unittest.TestCase):
+
+ def test_ParseCatalogLine_1(self):
oc = catalog.OpencswCatalog(None)
- parsed = oc._ParseCatalogLine(line)
+ parsed = oc._ParseCatalogLine(CATALOG_LINE_3)
expected = {'catalogname': 'tmux',
- 'deps': 'CSWcommon|CSWlibevent',
+ 'deps': ('CSWcommon', 'CSWlibevent'),
'file_basename': 'tmux-1.2,REV=2010.05.17-SunOS5.9-sparc-CSW.pkg.gz',
'md5sum': '145351cf6186fdcadcd169b66387f72f',
- 'none_thing_1': 'none',
- 'none_thing_2': 'none',
+ 'category': 'none',
+ 'i_deps': (),
'pkgname': 'CSWtmux',
'size': '214091',
'version': '1.2,REV=2010.05.17'}
self.assertEquals(expected, parsed)
+ def testGetDataByCatalogname(self):
+ fd = StringIO(CATALOG_LINE_1)
+ oc = catalog.OpencswCatalog(fd)
+ expected = {"syslog_ng": PKG_STRUCT_1}
+ self.assertEqual(expected, oc.GetDataByCatalogname())
+
+class CatalogComparatorUnitTest(unittest.TestCase):
+
+ def testUpdateOnly(self):
+ oc1 = catalog.OpencswCatalog(StringIO(CATALOG_LINE_1))
+ oc2 = catalog.OpencswCatalog(StringIO(CATALOG_LINE_2))
+ c = catalog.CatalogComparator()
+ new_pkgs, removed_pkgs, updated_pkgs = c.GetCatalogDiff(oc1, oc2)
+ self.assertFalse(new_pkgs)
+ self.assertFalse(removed_pkgs)
+ self.assertTrue("from" in updated_pkgs[0])
+
+ def testAddition(self):
+ oc1 = catalog.OpencswCatalog(StringIO(CATALOG_LINE_1))
+ oc2 = catalog.OpencswCatalog(
+ StringIO(CATALOG_LINE_1 + "\n" + CATALOG_LINE_3))
+ c = catalog.CatalogComparator()
+ new_pkgs, removed_pkgs, updated_pkgs = c.GetCatalogDiff(oc1, oc2)
+ self.assertFalse(removed_pkgs)
+ self.assertFalse(updated_pkgs)
+ self.assertEqual(1, len(new_pkgs))
+
+ def testRemoval(self):
+ oc1 = catalog.OpencswCatalog(
+ StringIO(CATALOG_LINE_1 + "\n" + CATALOG_LINE_3))
+ oc2 = catalog.OpencswCatalog(StringIO(CATALOG_LINE_1))
+ c = catalog.CatalogComparator()
+ new_pkgs, removed_pkgs, updated_pkgs = c.GetCatalogDiff(oc1, oc2)
+ self.assertFalse(new_pkgs)
+ self.assertFalse(updated_pkgs)
+ self.assertEqual(1, len(removed_pkgs))
+
+
if __name__ == '__main__':
unittest.main()
Property changes on: csw/mgar/gar/v2-uwatch2/lib/python/catalog_test.py
___________________________________________________________________
Added: svn:executable
+ *
Modified: csw/mgar/gar/v2-uwatch2/lib/python/checkpkg.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/checkpkg.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/checkpkg.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -18,14 +18,10 @@
import models as m
import common_constants
import package_stats
+import struct_util
DESCRIPTION_RE = r"^([\S]+) - (.*)$"
-BAD_CONTENT_REGEXES = (
- # Slightly obfuscating these by using concatenation of strings.
- r'/export' r'/medusa',
- r'/opt' r'/build',
-)
INSTALL_CONTENTS_AVG_LINE_LENGTH = 102.09710677919261
SYS_DEFAULT_RUNPATH = [
@@ -35,8 +31,6 @@
"/lib",
]
-MD5_RE = re.compile(r"^[0123456789abcdef]{32}$")
-
class Error(Exception):
pass
@@ -57,22 +51,6 @@
pass
-def GetOptions():
- parser = optparse.OptionParser()
- parser.add_option("-d", "--debug", dest="debug",
- default=False, action="store_true",
- help="Turn on debugging messages")
- parser.add_option("-p", "--profile", dest="profile",
- default=False, action="store_true",
- help=("Turn on profiling"))
- parser.add_option("-q", "--quiet", dest="quiet",
- default=False, action="store_true",
- help=("Print less messages"))
- (options, args) = parser.parse_args()
- # Using set() to make the arguments unique.
- return options, set(args)
-
-
def ExtractDescription(pkginfo):
desc_re = re.compile(DESCRIPTION_RE)
m = re.match(desc_re, pkginfo["NAME"])
@@ -90,15 +68,11 @@
return m.group("username") if m else None
-def IsMd5(s):
- # For optimization, move the compilation elsewhere.
- return MD5_RE.match(s)
-
def GetPackageStatsByFilenamesOrMd5s(args, debug=False):
filenames = []
md5s = []
for arg in args:
- if IsMd5(arg):
+ if struct_util.IsMd5(arg):
md5s.append(arg)
else:
filenames.append(arg)
Modified: csw/mgar/gar/v2-uwatch2/lib/python/checkpkg2.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/checkpkg2.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/checkpkg2.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -14,6 +14,7 @@
import database
import package_stats
+import struct_util
import checkpkg
import checkpkg_lib
import overrides
@@ -59,8 +60,8 @@
help="Display less messages")
parser.add_option("--catalog-release",
dest="catrel",
- default="unstable",
- help="A catalog release: experimental, unstable, testing, stable.")
+ default="current",
+ help="A catalog release: current, unstable, testing, stable.")
parser.add_option("-r", "--os-releases",
dest="osrel_commas",
help=("Comma separated list of ['SunOS5.9', 'SunOS5.10'], "
@@ -102,7 +103,7 @@
# We need to separate files and md5 sums.
md5_sums, file_list = [], []
for arg in args:
- if checkpkg.MD5_RE.match(arg):
+ if struct_util.IsMd5(arg):
md5_sums.append(arg)
else:
file_list.append(arg)
@@ -162,7 +163,7 @@
if unapplied_overrides:
print textwrap.fill(UNAPPLIED_OVERRIDES, 80)
for override in unapplied_overrides:
- print "* Unused %s" % override
+ print u"* Unused %s" % override
exit_code = bool(tags_for_all_osrels)
sys.exit(exit_code)
Modified: csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -136,15 +136,18 @@
self.debug = debug
self.name = name
self.sqo_pkgs_list = sqo_pkgs_list
- self.errors = []
- self.individual_checks = []
- self.set_checks = []
- self.packages = []
self.osrel = osrel
self.arch = arch
self.catrel = catrel
self.show_progress = show_progress
+ self._ResetState()
+ self.individual_checks = []
+ self.set_checks = []
+ def _ResetState(self):
+ self.errors = []
+ self.packages = []
+
def GetProgressBar(self):
if self.show_progress and not self.debug:
return progressbar.ProgressBar()
@@ -155,10 +158,6 @@
return super(CheckpkgManagerBase, self).GetSqlobjectTriad(
self.osrel, self.arch, self.catrel)
- def GetPackageStatsList(self):
- raise RuntimeError("Please don't use this function as it violates "
- "the Law of Demeter.")
-
def FormatReports(self, errors, messages, gar_lines):
namespace = {
"name": self.name,
@@ -203,7 +202,7 @@
# left is lists and dictionaries.
i = counter.next()
if stats_obj.data_obj:
- raw_pkg_data = cPickle.loads(stats_obj.data_obj.pickle)
+ raw_pkg_data = stats_obj.GetStatsStruct()
else:
raise CatalogDatabaseError(
"%s (%s) is missing the data object."
@@ -219,20 +218,18 @@
Returns a tuple of an exit code and a report.
"""
- # packages_data = self.GetPackageStatsList()
+ self._ResetState()
assert self.sqo_pkgs_list, "The list of packages must not be empty."
db_stat_objs_by_pkgname = {}
for pkg in self.sqo_pkgs_list:
db_stat_objs_by_pkgname[pkg.pkginst.pkgname] = pkg
logging.debug("Deleting old errors from the database.")
+ sqo_os_rel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad()
for pkgname, db_obj in db_stat_objs_by_pkgname.iteritems():
- sqo_os_rel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad()
- db_obj.RemoveCheckpkgResults(
- sqo_os_rel, sqo_arch, sqo_catrel)
+ db_obj.RemoveCheckpkgResults(sqo_os_rel, sqo_arch, sqo_catrel)
errors, messages, gar_lines = self.GetAllTags(self.sqo_pkgs_list)
- no_errors = len(errors) + 1
pbar = self.GetProgressBar()
- pbar.maxval = no_errors
+ pbar.maxval = len(errors) + 1
count = itertools.count(1)
logging.info("Stuffing the candies under the pillow...")
pbar.start()
@@ -342,7 +339,6 @@
paths.append(pkg)
return paths_and_pkgs
-
def GetPkgByPath(self, file_path):
"""Proxies calls to self.system_pkgmap."""
pkgs_in_catalog = self.catalog.GetPkgByPath(
@@ -939,10 +935,26 @@
sqo_srv4, repr(pkg_arch), repr(filename_arch))
return ans
+ def GetConflictingSrv4ByCatalognameResult(self,
+ sqo_srv4, catalogname,
+ sqo_osrel, sqo_arch, sqo_catrel):
+ res = m.Srv4FileStats.select(
+ m.Srv4FileStats.q.catalogname==catalogname
+ ).throughTo.in_catalogs.filter(
+ sqlobject.AND(
+ m.Srv4FileInCatalog.q.osrel==sqo_osrel,
+ m.Srv4FileInCatalog.q.arch==sqo_arch,
+ m.Srv4FileInCatalog.q.catrel==sqo_catrel,
+ m.Srv4FileInCatalog.q.srv4file!=sqo_srv4))
+ return res
+
def AddSrv4ToCatalog(self, sqo_srv4, osrel, arch, catrel):
"""Registers a srv4 file in a catalog."""
logging.debug("AddSrv4ToCatalog(%s, %s, %s, %s)",
sqo_srv4, osrel, arch, catrel)
+ # There are only i386 and sparc catalogs.
+ if arch != 'i386' and arch != 'sparc':
+ raise CatalogDatabaseError("Wrong architecture: %s" % arch)
sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
osrel, arch, catrel)
if not self.Srv4MatchesCatalog(sqo_srv4, sqo_arch):
@@ -965,9 +977,16 @@
m.Srv4FileInCatalog.q.arch==sqo_arch,
m.Srv4FileInCatalog.q.catrel==sqo_catrel,
m.Srv4FileInCatalog.q.srv4file!=sqo_srv4))
- if len(list(res)):
+ if res.count():
raise CatalogDatabaseError(
- "There already is a package with that pkgname: %s" % pkginst)
+ "There already is a package with that pkgname: %s" % pkginst.pkgname)
+ res = self.GetConflictingSrv4ByCatalognameResult(
+ sqo_srv4, sqo_srv4.catalogname,
+ sqo_osrel, sqo_arch, sqo_catrel)
+ if res.count():
+ raise CatalogDatabaseError(
+ "There already is a package with that catalogname: %s"
+ % sqo_srv4.catalogname)
# Checking for presence of the same srv4 already in the catalog.
res = m.Srv4FileInCatalog.select(
sqlobject.AND(
@@ -975,9 +994,9 @@
m.Srv4FileInCatalog.q.arch==sqo_arch,
m.Srv4FileInCatalog.q.catrel==sqo_catrel,
m.Srv4FileInCatalog.q.srv4file==sqo_srv4))
- if len(list(res)):
- logging.debug("%s is already part of %s %s %s",
- sqo_srv4, osrel, arch, catrel)
+ if res.count():
+ logging.warning("%s is already part of %s %s %s",
+ sqo_srv4, osrel, arch, catrel)
# Our srv4 is already part of that catalog.
return
obj = m.Srv4FileInCatalog(
@@ -989,15 +1008,18 @@
def RemoveSrv4(self, sqo_srv4, osrel, arch, catrel):
sqo_osrel, sqo_arch, sqo_catrel = self.GetSqlobjectTriad(
osrel, arch, catrel)
- sqo_srv4_in_cat = m.Srv4FileInCatalog.select(
- sqlobject.AND(
- m.Srv4FileInCatalog.q.arch==sqo_arch,
- m.Srv4FileInCatalog.q.osrel==sqo_osrel,
- m.Srv4FileInCatalog.q.catrel==sqo_catrel,
- m.Srv4FileInCatalog.q.srv4file==sqo_srv4)).getOne()
- # Files belonging to this package should not be removed from the catalog
- # as the package might be still present in another catalog.
- sqo_srv4_in_cat.destroySelf()
+ try:
+ sqo_srv4_in_cat = m.Srv4FileInCatalog.select(
+ sqlobject.AND(
+ m.Srv4FileInCatalog.q.arch==sqo_arch,
+ m.Srv4FileInCatalog.q.osrel==sqo_osrel,
+ m.Srv4FileInCatalog.q.catrel==sqo_catrel,
+ m.Srv4FileInCatalog.q.srv4file==sqo_srv4)).getOne()
+ # Files belonging to this package should not be removed from the catalog
+ # as the package might be still present in another catalog.
+ sqo_srv4_in_cat.destroySelf()
+ except sqlobject.main.SQLObjectNotFound, e:
+ logging.warning(e)
class Catalog(CatalogMixin):
Modified: csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib_test.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib_test.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/checkpkg_lib_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -1,18 +1,19 @@
#!/usr/bin/env python2.6
+import checkpkg_lib
import copy
-import unittest
-import checkpkg_lib
-import tag
-import package_stats
+import cPickle
import database
-import sqlobject
+import inspective_package
import models
+import mox
import package_stats
-import inspective_package
-import mox
+import package_stats
+import pprint
+import sqlobject
+import tag
import test_base
-import cPickle
+import unittest
from testdata import stubs
from testdata.neon_stats import pkgstats as neon_stats
@@ -350,7 +351,22 @@
# Verifying that there are some reported error tags.
self.assertTrue(list(models.CheckpkgErrorTag.select()))
+ def testReRunCheckpkg(self):
+ """Error tags should not accumulate."""
+ self.dbc.InitialDataImport()
+ sqo_pkg = package_stats.PackageStats.SaveStats(neon_stats[0], True)
+ cm = checkpkg_lib.CheckpkgManager2(
+ "testname", [sqo_pkg], "SunOS5.9", "sparc", "unstable",
+ show_progress=False)
+ before_count = models.CheckpkgErrorTag.selectBy(srv4_file=sqo_pkg).count()
+ cm.Run()
+ first_run_count = models.CheckpkgErrorTag.selectBy(srv4_file=sqo_pkg).count()
+ cm.Run()
+ second_run_count = models.CheckpkgErrorTag.selectBy(srv4_file=sqo_pkg).count()
+ self.assertEquals(0, before_count)
+ self.assertEquals(first_run_count, second_run_count)
+
class IndividualCheckInterfaceUnitTest(mox.MoxTestBase):
def testNeededFile(self):
Modified: csw/mgar/gar/v2-uwatch2/lib/python/common_constants.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/common_constants.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/common_constants.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -3,17 +3,17 @@
ARCH_ALL = "all"
PHYSICAL_ARCHITECTURES = [ARCH_SPARC, ARCH_i386]
ARCHITECTURES = PHYSICAL_ARCHITECTURES + [ARCH_ALL]
-OS_RELS = [
+OS_RELS = (
u"SunOS5.8",
u"SunOS5.9",
u"SunOS5.10",
u"SunOS5.11",
-]
+)
SYSTEM_SYMLINKS = (
- ("/opt/csw/bdb4", ["/opt/csw/bdb42"]),
- ("/64", ["/amd64", "/sparcv9"]),
- ("/opt/csw/lib/i386", ["/opt/csw/lib"]),
+ ("/opt/csw/bdb4", ("/opt/csw/bdb42",)),
+ ("/64", ("/amd64", "/sparcv9")),
+ ("/opt/csw/lib/i386", ("/opt/csw/lib",)),
)
DEFAULT_INSTALL_CONTENTS_FILE = "/var/sadm/install/contents"
Modified: csw/mgar/gar/v2-uwatch2/lib/python/configuration.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/configuration.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/configuration.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -11,6 +11,7 @@
WS_RE = re.compile(r"\s+")
CHECKPKG_CONFIG_FILENAME = "checkpkg.ini"
+CHECKPKG_RELMGR_CONFIG_FILENAME = "checkpkg_relmgr.ini"
CHECKPKG_AUTO_CONFIG_FILENAME = "checkpkg_auto.ini"
CHECKPKG_DEFAULTS_FILENAME = "checkpkg_defaults.ini"
CHECKPKG_DIR = "%(HOME)s/.checkpkg"
@@ -21,6 +22,8 @@
CHECKPKG_AUTO_CONFIG_FILENAME)
SITE_CONFIG_FILE = os.path.join(CHECKPKG_SITE_CONFIG_DIR,
CHECKPKG_CONFIG_FILENAME)
+SITE_CONFIG_RELMGR_FILE = os.path.join(CHECKPKG_SITE_CONFIG_DIR,
+ CHECKPKG_RELMGR_CONFIG_FILENAME)
DEFAULTS_FILE = os.path.join(CHECKPKG_SRC_BASEDIR, CHECKPKG_DEFAULTS_FILENAME)
CONFIGURATION_FILE_LOCATIONS = [
@@ -36,10 +39,11 @@
# file, it's expected that checkpkg will connect to the shared database.
# To achieve this, the site-global config has to have priority over the
# autogenerated file.
- (DEFAULTS_FILE, True),
- (AUTO_CONFIG_FILE_TMPL, False),
- (SITE_CONFIG_FILE, False),
- (USER_CONFIG_FILE_TMPL, False)
+ (DEFAULTS_FILE, True),
+ (AUTO_CONFIG_FILE_TMPL, False),
+ (SITE_CONFIG_FILE, False),
+ (SITE_CONFIG_RELMGR_FILE, False),
+ (USER_CONFIG_FILE_TMPL, False)
]
@@ -59,36 +63,47 @@
else: raise
+def HomeExists():
+ if "HOME" not in os.environ:
+ return False
+ return True
+
+
def GetConfig():
config = ConfigParser.SafeConfigParser()
file_was_found = False
for file_name_tmpl, default_file in CONFIGURATION_FILE_LOCATIONS:
- filename = file_name_tmpl % os.environ
- if os.path.exists(filename):
- if not default_file:
- file_was_found = True
- config.read(file_name_tmpl % os.environ)
+ filename = None
+ try:
+ filename = file_name_tmpl % os.environ
+ if os.path.exists(filename):
+ if not default_file:
+ file_was_found = True
+ config.read(file_name_tmpl % os.environ)
+ except KeyError, e:
+ logging.warn(e)
if not file_was_found:
- db_file = "%(HOME)s/.checkpkg/checkpkg.db" % os.environ
- checkpkg_dir = CHECKPKG_DIR % os.environ
- MkdirP(checkpkg_dir)
- config_file = AUTO_CONFIG_FILE_TMPL % os.environ
- logging.warning(
- "No configuration file found. Will attempt to create "
- "an sane default configuration in %s."
- % repr(config_file))
- if not config.has_section("database"):
- config.add_section("database")
- config.set("database", "type", "sqlite")
- config.set("database", "name", db_file)
- config.set("database", "host", "")
- config.set("database", "user", "")
- config.set("database", "password", "")
- config.set("database", "auto_manage", "yes")
- fd = open(config_file, "w")
- config.write(fd)
- fd.close()
- logging.debug("Configuration has been written.")
+ if HomeExists():
+ db_file = os.path.join(CHECKPKG_DIR % os.environ, "checkpkg.db")
+ checkpkg_dir = CHECKPKG_DIR % os.environ
+ MkdirP(checkpkg_dir)
+ config_file = AUTO_CONFIG_FILE_TMPL % os.environ
+ logging.warning(
+ "No configuration file found. Will attempt to create "
+ "an sane default configuration in %s."
+ % repr(config_file))
+ if not config.has_section("database"):
+ config.add_section("database")
+ config.set("database", "type", "sqlite")
+ config.set("database", "name", db_file)
+ config.set("database", "host", "")
+ config.set("database", "user", "")
+ config.set("database", "password", "")
+ config.set("database", "auto_manage", "yes")
+ fd = open(config_file, "w")
+ config.write(fd)
+ fd.close()
+ logging.debug("Configuration has been written.")
if not config.has_section("database"):
logging.fatal("Section 'database' not found in the config file. "
"Please refer to the documentation: "
Copied: csw/mgar/gar/v2-uwatch2/lib/python/csw_upload_pkg.py (from rev 13268, csw/mgar/gar/v2/lib/python/csw_upload_pkg.py)
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/csw_upload_pkg.py (rev 0)
+++ csw/mgar/gar/v2-uwatch2/lib/python/csw_upload_pkg.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -0,0 +1,288 @@
+#!/usr/bin/env python2.6
+
+"""csw_upload_pkg.py - uploads packages to the database.
+
+POST using pycurl code example taken from:
+http://pycurl.cvs.sourceforge.net/pycurl/pycurl/tests/test_post2.py?view=markup
+"""
+
+from StringIO import StringIO
+import pycurl
+import logging
+import optparse
+import hashlib
+import os.path
+import opencsw
+import json
+import common_constants
+import socket
+
+
+BASE_URL = "http://buildfarm.opencsw.org/releases/"
+USAGE = """%prog [ options ] <pkg1> [ <pkg2> [ ... ] ]
+
+Uploads a set of packages to the unstable catalog in opencsw-future.
+
+- When an ARCH=all package is sent, it's added to both sparc and i386 catalogs
+- When a SunOS5.x package is sent, it's added to catalogs SunOS5.x,
+ SunOS5.(x+1), up to SunOS5.11.
+- If a package update is sent, the tool uses catalogname to identify the
+ package it's supposed to replace
+
+The --remove option affects the same catalogs as the regular use, except that
+it removes assignments of a given package to catalogs, instead of adding them.
+
+For more information, see:
+http://wiki.opencsw.org/automated-release-process#toc0
+"""
+
+class Error(Exception):
+ pass
+
+
+class RestCommunicationError(Error):
+ pass
+
+
+class PackageCheckError(Error):
+ """A problem with the package."""
+
+
+class DataError(Error):
+ """Unexpected data found."""
+
+
+class Srv4Uploader(object):
+
+ def __init__(self, filenames, debug=False):
+ self.filenames = filenames
+ self.md5_by_filename = {}
+ self.debug = debug
+
+ def Upload(self):
+ for filename in self.filenames:
+ parsed_basename = opencsw.ParsePackageFileName(
+ os.path.basename(filename))
+ if parsed_basename["vendortag"] != "CSW":
+ raise PackageCheckError(
+ "Package vendor tag is %s instead of CSW."
+ % parsed_basename["vendortag"])
+ self._UploadFile(filename)
+
+ def Remove(self):
+ for filename in self.filenames:
+ self._RemoveFile(filename)
+
+ def _RemoveFile(self, filename):
+ md5_sum = self._GetFileMd5sum(filename)
+ file_in_allpkgs, file_metadata = self._GetSrv4FileMetadata(md5_sum)
+ osrel = file_metadata['osrel']
+ arch = file_metadata['arch']
+ self._IterateOverCatalogs(
+ filename, file_metadata,
+ arch, osrel, self._RemoveFromCatalog)
+
+ def _RemoveFromCatalog(self, filename, arch, osrel, file_metadata):
+ md5_sum = self._GetFileMd5sum(filename)
+ basename = os.path.basename(filename)
+ parsed_basename = opencsw.ParsePackageFileName(basename)
+ url = (
+ "%scatalogs/unstable/%s/%s/%s/"
+ % (BASE_URL, arch, osrel, md5_sum))
+ logging.debug("DELETE @ URL: %s %s", type(url), url)
+ c = pycurl.Curl()
+ d = StringIO()
+ h = StringIO()
+ c.setopt(pycurl.URL, str(url))
+ c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
+ c.setopt(pycurl.WRITEFUNCTION, d.write)
+ c.setopt(pycurl.HEADERFUNCTION, h.write)
+ c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+ if self.debug:
+ c.setopt(c.VERBOSE, 1)
+ c.perform()
+ http_code = c.getinfo(pycurl.HTTP_CODE)
+ logging.debug(
+ "DELETE curl getinfo: %s %s %s",
+ type(http_code),
+ http_code,
+ c.getinfo(pycurl.EFFECTIVE_URL))
+ c.close()
+ if http_code >= 400 and http_code <= 499:
+ raise RestCommunicationError("%s - HTTP code: %s" % (url, http_code))
+
+ def _GetFileMd5sum(self, filename):
+ if filename not in self.md5_by_filename:
+ logging.debug("_GetFileMd5sum(%s): Reading the file", filename)
+ with open(filename, "rb") as fd:
+ hash = hashlib.md5()
+ hash.update(fd.read())
+ md5_sum = hash.hexdigest()
+ self.md5_by_filename[filename] = md5_sum
+ return self.md5_by_filename[filename]
+
+ def _IterateOverCatalogs(self, filename, file_metadata, arch, osrel, callback):
+ # Implementing backward compatibility. A package for SunOS5.x is also
+ # inserted into SunOS5.(x+n) for n=(0, 1, ...)
+ for idx, known_osrel in enumerate(common_constants.OS_RELS):
+ if osrel == known_osrel:
+ osrels = common_constants.OS_RELS[idx:]
+ if arch == 'all':
+ archs = ('sparc', 'i386')
+ else:
+ archs = (arch,)
+ for arch in archs:
+ for osrel in osrels:
+ callback(filename, arch, osrel, file_metadata)
+
+ def _UploadFile(self, filename):
+ md5_sum = self._GetFileMd5sum(filename)
+ file_in_allpkgs, file_metadata = self._GetSrv4FileMetadata(md5_sum)
+ if file_in_allpkgs:
+ logging.debug("File %s already uploaded.", filename)
+ else:
+ logging.debug("Uploading %s.", filename)
+ self._PostFile(filename)
+ file_in_allpkgs, file_metadata = self._GetSrv4FileMetadata(md5_sum)
+ logging.debug("file_metadata %s", repr(file_metadata))
+ if not file_metadata:
+ raise DataError("file_metadata is empty: %s" % repr(file_metadata))
+ osrel = file_metadata['osrel']
+ arch = file_metadata['arch']
+ self._IterateOverCatalogs(
+ filename, file_metadata,
+ arch, osrel, self._InsertIntoCatalog)
+
+ def _InsertIntoCatalog(self, filename, arch, osrel, file_metadata):
+ logging.info(
+ "_InsertIntoCatalog(%s, %s, %s)",
+ repr(arch), repr(osrel), repr(filename))
+ md5_sum = self._GetFileMd5sum(filename)
+ basename = os.path.basename(filename)
+ parsed_basename = opencsw.ParsePackageFileName(basename)
+ logging.debug("parsed_basename: %s", parsed_basename)
+ url = (
+ "%scatalogs/unstable/%s/%s/%s/"
+ % (BASE_URL, arch, osrel, md5_sum))
+ logging.debug("URL: %s %s", type(url), url)
+ c = pycurl.Curl()
+ d = StringIO()
+ h = StringIO()
+ # Bogus data to upload
+ s = StringIO()
+ c.setopt(pycurl.URL, str(url))
+ c.setopt(pycurl.PUT, 1)
+ c.setopt(pycurl.UPLOAD, 1)
+ c.setopt(pycurl.INFILESIZE_LARGE, s.len)
+ c.setopt(pycurl.READFUNCTION, s.read)
+ c.setopt(pycurl.WRITEFUNCTION, d.write)
+ c.setopt(pycurl.HEADERFUNCTION, h.write)
+ c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+ if self.debug:
+ c.setopt(c.VERBOSE, 1)
+ c.perform()
+ http_code = c.getinfo(pycurl.HTTP_CODE)
+ logging.debug(
+ "curl getinfo: %s %s %s",
+ type(http_code),
+ http_code,
+ c.getinfo(pycurl.EFFECTIVE_URL))
+ c.close()
+ # if self.debug:
+ # logging.debug("*** Headers")
+ # logging.debug(h.getvalue())
+ # logging.debug("*** Data")
+ if http_code >= 400 and http_code <= 499:
+ if not self.debug:
+ # In debug mode, all headers are printed to screen, and we aren't
+ # interested in the response body.
+ logging.fatal("Response: %s %s", http_code, d.getvalue())
+ raise RestCommunicationError("%s - HTTP code: %s" % (url, http_code))
+ else:
+ logging.info("Response: %s %s", http_code, d.getvalue())
+ return http_code
+
+ def _GetSrv4FileMetadata(self, md5_sum):
+ logging.debug("_GetSrv4FileMetadata(%s)", repr(md5_sum))
+ url = BASE_URL + "srv4/" + md5_sum + "/"
+ c = pycurl.Curl()
+ d = StringIO()
+ h = StringIO()
+ c.setopt(pycurl.URL, url)
+ c.setopt(pycurl.WRITEFUNCTION, d.write)
+ c.setopt(pycurl.HEADERFUNCTION, h.write)
+ if self.debug:
+ c.setopt(c.VERBOSE, 1)
+ c.perform()
+ http_code = c.getinfo(pycurl.HTTP_CODE)
+ logging.debug(
+ "curl getinfo: %s %s %s",
+ type(http_code),
+ http_code,
+ c.getinfo(pycurl.EFFECTIVE_URL))
+ c.close()
+ successful = http_code >= 200 and http_code <= 299
+ metadata = None
+ if successful:
+ metadata = json.loads(d.getvalue())
+ else:
+ logging.info("Data for %s not found" % repr(md5_sum))
+ return successful, metadata
+
+ def _PostFile(self, filename):
+ logging.info("_PostFile(%s)", repr(filename))
+ md5_sum = self._GetFileMd5sum(filename)
+ c = pycurl.Curl()
+ d = StringIO()
+ h = StringIO()
+ url = BASE_URL + "srv4/"
+ c.setopt(pycurl.URL, url)
+ c.setopt(pycurl.POST, 1)
+ post_data = [
+ ('srv4_file', (pycurl.FORM_FILE, filename)),
+ ('submit', 'Upload'),
+ ('md5_sum', md5_sum),
+ ('basename', os.path.basename(filename)),
+ ]
+ c.setopt(pycurl.HTTPPOST, post_data)
+ c.setopt(pycurl.WRITEFUNCTION, d.write)
+ c.setopt(pycurl.HEADERFUNCTION, h.write)
+ c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+ if self.debug:
+ c.setopt(c.VERBOSE, 1)
+ c.perform()
+ http_code = c.getinfo(pycurl.HTTP_CODE)
+ c.close()
+ if self.debug:
+ logging.debug("*** Headers")
+ logging.debug(h.getvalue())
+ logging.debug("*** Data")
+ logging.debug(d.getvalue())
+ logging.debug("File POST http code: %s", http_code)
+ if http_code >= 400 and http_code <= 499:
+ raise RestCommunicationError("%s - HTTP code: %s" % (url, http_code))
+
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(USAGE)
+ parser.add_option("-d", "--debug",
+ dest="debug",
+ default=False, action="store_true")
+ parser.add_option("--remove",
+ dest="remove",
+ default=False, action="store_true",
+ help="Remove packages from catalogs instead of adding them")
+ options, args = parser.parse_args()
+ if options.debug:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.INFO)
+ logging.debug("args: %s", args)
+ hostname = socket.gethostname()
+ if not hostname.startswith('login'):
+ logging.warning("This script is meant to be run on the login host.")
+ uploader = Srv4Uploader(args, debug=options.debug)
+ if options.remove:
+ uploader.Remove()
+ else:
+ uploader.Upload()
Modified: csw/mgar/gar/v2-uwatch2/lib/python/database.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/database.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/database.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -82,12 +82,17 @@
ldm.InitialDataImport()
ldm.SetDatabaseSchemaVersion()
else:
- raise DatabaseError(
+ msg = (
"Database schema does not match the application. "
"Database contains: %s, "
"the application expects: %s. "
- "Make sure your application sources are up to date."
% (ldm.GetDatabaseSchemaVersion(), DB_SCHEMA_VERSION))
+ if DB_SCHEMA_VERSION < ldm.GetDatabaseSchemaVersion():
+ msg += "Make sure your application sources are up to date."
+ elif DB_SCHEMA_VERSION > ldm.GetDatabaseSchemaVersion():
+ msg += ("Make sure your database is up to date. "
+ "Re-create it if necessary.")
+ raise DatabaseError(msg)
def _CheckAndMaybeFixFreshness(self, auto_fix):
ldm = LocalDatabaseManager()
@@ -260,14 +265,27 @@
def GetFileMtime(self):
if not self.file_mtime:
- stat_data = os.stat(SYSTEM_PKGMAP)
- self.file_mtime = stat_data.st_mtime
+ try:
+ stat_data = os.stat(SYSTEM_PKGMAP)
+ self.file_mtime = stat_data.st_mtime
+ except OSError, e:
+ logging.warning("Could not open %s: %s", SYSTEM_PKGMAP, e)
return self.file_mtime
def IsDatabaseUpToDate(self):
f_mtime_epoch = self.GetFileMtime()
d_mtime_epoch = self.GetDatabaseMtime()
- f_mtime = time.gmtime(int(f_mtime_epoch))
+
+ # On some systems where pkgdb runs, f_mtime_epoch can be None. To
+ # allow to run pkgdb, the absence of the SYSTEM_PKGMAP file must be
+ # tolerated. The GetDatabaseMtime function returns None if the file
+ # is absent. If f_mtime_epoch cannot be translated into a number,
+ # it's set to zero.
+ f_mtime = 0
+ try:
+ f_mtime = time.gmtime(int(f_mtime_epoch))
+ except TypeError, e:
+ logging.warning("Could not get file mtime: %s", e)
d_mtime = time.gmtime(int(d_mtime_epoch))
logging.debug("IsDatabaseUpToDate: f_mtime %s, d_time: %s", f_mtime, d_mtime)
# Rounding up to integer seconds. There is a race condition:
@@ -286,6 +304,8 @@
# Using the same stuff pkgdb is using.
logging.warning(
"Refreshing the database. It may take a long time, please be patient.")
+ logging.warning("If you need a way to make it faster, please see:")
+ logging.warning("http://wiki.opencsw.org/checkpkg#toc5")
infile_contents = common_constants.DEFAULT_INSTALL_CONTENTS_FILE
infile_pkginfo = None
logging.debug("Indexing.")
Modified: csw/mgar/gar/v2-uwatch2/lib/python/dependency_checks.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/dependency_checks.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/dependency_checks.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -95,8 +95,11 @@
error_mgr.ReportError(
pkgname,
"deprecated-library",
- ("%s %s %s/%s"
- % (binary_info["path"], msg, resolved_path, soname)))
+ ("file=%s lib=%s/%s"
+ % (binary_info["path"], resolved_path, soname)))
+ messenger.Message(
+ "Binary %s links to a deprecated library %s/%s. %s"
+ % (binary_info["path"], resolved_path, soname, msg))
if not resolved:
orphan_sonames.append((soname, binary_info["path"]))
if path_list:
Modified: csw/mgar/gar/v2-uwatch2/lib/python/models.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/models.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/models.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -4,6 +4,7 @@
import logging
import sqlobject
+import os.path
from sqlobject import sqlbuilder
import cPickle
@@ -24,20 +25,39 @@
name = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
type = sqlobject.ForeignKey('CatalogReleaseType', notNone=True)
+ def __unicode__(self):
+ return u"Catalog release: %s" % self.name
+
class OsRelease(sqlobject.SQLObject):
"Short name: SunOS5.9, long name: Solaris 9"
short_name = sqlobject.UnicodeCol(length=40, unique=True, notNone=True)
full_name = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
+ def __unicode__(self):
+ return u"OS release: %s" % self.full_name
+
class Architecture(sqlobject.SQLObject):
"One of: 'sparc', 'x86'."
name = sqlobject.UnicodeCol(length=40, unique=True, notNone=True)
+ def __unicode__(self):
+ return u"Architecture: %s" % self.name
+
class Maintainer(sqlobject.SQLObject):
"""The maintainer of the package, identified by the e-mail address."""
email = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
full_name = sqlobject.UnicodeCol(length=255, default=None)
+ def ObfuscatedEmail(self):
+ username, domain = self.email.split("@")
+ username = username[:-3] + "..."
+ return "@".join((username, domain))
+
+ def __unicode__(self):
+ return u"%s <%s>" % (
+ self.full_name or "Maintainer full name unknown",
+ self.ObfuscatedEmail())
+
class Host(sqlobject.SQLObject):
"Hostname, as returned by socket.getfqdn()"
fqdn = sqlobject.UnicodeCol(length=255, unique=True, notNone=True)
@@ -78,6 +98,9 @@
srv4_file = sqlobject.ForeignKey('Srv4FileStats')
basename_idx = sqlobject.DatabaseIndex('basename')
+ def __unicode__(self):
+ return u"File: %s" % os.path.join(self.path, self.basename)
+
class Srv4FileStatsBlob(sqlobject.SQLObject):
"""Holds pickled data structures.
@@ -176,7 +199,35 @@
CheckpkgOverride.sqlmeta.table,
CheckpkgOverride.q.srv4_file==self)))
+ def __unicode__(self):
+ return (
+ u"Package: %s-%s, %s"
+ % (self.catalogname, self.version_string, self.arch.name))
+ def GetStatsStruct(self):
+ return cPickle.loads(str(self.data_obj.pickle))
+
+ def GetRestRepr(self):
+ mimetype = "application/x-vnd.opencsw.pkg;type=srv4-detail"
+ data = {
+ 'catalogname': self.catalogname,
+ 'basename': self.basename,
+ 'md5_sum': self.md5_sum,
+ 'size': self.size,
+ 'maintainer_email': self.maintainer.email,
+ 'maintainer_full_name': self.maintainer.full_name,
+ 'version_string': self.version_string,
+ 'arch': self.arch.name,
+ 'pkgname': self.pkginst.pkgname,
+ 'mtime': unicode(self.mtime),
+ 'osrel': self.os_rel.short_name,
+ 'rev': self.rev,
+ 'filename_arch': self.filename_arch.name,
+ # 'in_catalogs': unicode([unicode(x) for x in self.in_catalogs]),
+ }
+ return mimetype, data
+
+
class CheckpkgErrorTagMixin(object):
def ToGarSyntax(self):
@@ -214,7 +265,10 @@
arch = sqlobject.ForeignKey('Architecture', notNone=True)
catrel = sqlobject.ForeignKey('CatalogRelease', notNone=True)
+ def __unicode__(self):
+ return u"Error: %s %s %s" % (self.pkgname, self.tag_name, self.tag_info)
+
class CheckpkgOverride(sqlobject.SQLObject):
# Overrides don't need to contain catalog parameters.
srv4_file = sqlobject.ForeignKey('Srv4FileStats', notNone=True)
@@ -222,6 +276,12 @@
tag_name = sqlobject.UnicodeCol(notNone=True)
tag_info = sqlobject.UnicodeCol(default=None)
+ def __unicode__(self):
+ return (u"Override: %s: %s %s" %
+ (self.pkgname,
+ self.tag_name,
+ self.tag_info or ""))
+
def DoesApply(self, tag):
"""Figures out if this override applies to the given tag."""
basket_a = {}
@@ -251,10 +311,36 @@
'arch', 'osrel', 'catrel', 'srv4file',
unique=True)
+ def __unicode__(self):
+ return (
+ u"%s is in catalog %s %s %s"
+ % (self.srv4file,
+ self.arch.name,
+ self.osrel.full_name,
+ self.catrel.name))
+
class Srv4DependsOn(sqlobject.SQLObject):
"""Models dependencies."""
srv4_file = sqlobject.ForeignKey('Srv4FileStats', notNone=True)
pkginst = sqlobject.ForeignKey('Pkginst', notNone=True)
dep_uniq_idx = sqlobject.DatabaseIndex(
'srv4_file', 'pkginst')
+
+
+def GetCatPackagesResult(sqo_osrel, sqo_arch, sqo_catrel):
+ join = [
+ sqlbuilder.INNERJOINOn(None,
+ Srv4FileInCatalog,
+ Srv4FileInCatalog.q.srv4file==Srv4FileStats.q.id),
+ ]
+ res = Srv4FileStats.select(
+ sqlobject.AND(
+ Srv4FileInCatalog.q.osrel==sqo_osrel,
+ Srv4FileInCatalog.q.arch==sqo_arch,
+ Srv4FileInCatalog.q.catrel==sqo_catrel,
+ Srv4FileStats.q.use_to_generate_catalogs==True,
+ ),
+ join=join,
+ ).orderBy('catalogname')
+ return res
Modified: csw/mgar/gar/v2-uwatch2/lib/python/models_test.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/models_test.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/models_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -1,5 +1,11 @@
+#!/usr/bin/env python2.6
+
import unittest
+import mox
+import test_base
import models
+import sqlobject
+import datetime
class CheckpkgErrorTagUnitTest(unittest.TestCase):
@@ -28,6 +34,52 @@
self.assertEquals(t1, t2)
+class Srv4FileStatsUnitTest(test_base.SqlObjectTestMixin, mox.MoxTestBase):
+
+ def setUp(self):
+ super(Srv4FileStatsUnitTest, self).setUp()
+ self.dbc.InitialDataImport()
+ self.sqo_arch = models.Architecture.selectBy(id=1).getOne()
+ self.sqo_osrel = models.OsRelease.selectBy(id=1).getOne()
+ self.sqo_catrel = models.CatalogRelease.selectBy(id=1).getOne()
+ self.pkginst = models.Pkginst(pkgname="CSWfoo")
+ self.maintainer = models.Maintainer(
+ email='joe at example.com',
+ full_name='Joe Bloggs')
+ self.p = models.Srv4FileStats(
+ arch=self.sqo_arch,
+ basename="foo.pkg",
+ catalogname="foo",
+ data_obj=None,
+ filename_arch=self.sqo_arch,
+ latest=True,
+ maintainer=self.maintainer,
+ md5_sum="not a real one",
+ size=1L,
+ mtime=datetime.datetime.now(),
+ os_rel=self.sqo_osrel,
+ pkginst=self.pkginst,
+ registered=True,
+ use_to_generate_catalogs=True,
+ rev="2011.01.01",
+ stats_version=0,
+ version_string="1.0,REV=2011.01.01",
+ )
+
+ def testRemoveCheckpkgResults(self):
+ error_tag = models.CheckpkgErrorTag(
+ tag_name="foo",
+ tag_info="foo_info",
+ srv4_file=self.p,
+ os_rel=self.sqo_osrel,
+ arch=self.sqo_arch,
+ catrel=self.sqo_catrel,
+ )
+ self.assertEqual(1, models.CheckpkgErrorTag.select().count())
+ self.p.RemoveCheckpkgResults(self.sqo_osrel, self.sqo_arch, self.sqo_catrel)
+ self.assertEqual(0, models.CheckpkgErrorTag.select().count())
+
+
if __name__ == '__main__':
unittest.main()
Property changes on: csw/mgar/gar/v2-uwatch2/lib/python/models_test.py
___________________________________________________________________
Added: svn:executable
+ *
Modified: csw/mgar/gar/v2-uwatch2/lib/python/opencsw.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/opencsw.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/opencsw.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -306,8 +306,9 @@
if relevant_pkgs:
package_files.append(relevant_pkgs[-1])
if not package_files:
- raise PackageError("Could not find %s in %s"
- % (repr(software), repr(self.dir_path)))
+ logging.warning(
+ "Could not find any %s-* packages in %s",
+ repr(software), repr(self.dir_path))
logging.debug("The latest packages %s in %s are %s",
repr(software),
repr(self.dir_path),
Modified: csw/mgar/gar/v2-uwatch2/lib/python/opencsw_test.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/opencsw_test.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/opencsw_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -116,6 +116,7 @@
parsed = opencsw.ParsePackageFileName(file_name)
self.assertEqual(parsed["arch"], "sparc")
self.assertEqual(parsed["catalogname"], "boost-jam")
+ self.assertEqual(parsed["vendortag"], "UNCOMMITTED")
def testParsePackageFileName_Nonsense(self):
"""Checks if the function can sustain a non-conformant string."""
Modified: csw/mgar/gar/v2-uwatch2/lib/python/package.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/package.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/package.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -48,6 +48,7 @@
def __init__(self, pkg_path, debug=False):
super(CswSrv4File, self).__init__()
+ logging.debug("CswSrv4File(%s, debug=%s)", repr(pkg_path), debug)
self.pkg_path = pkg_path
self.workdir = None
self.gunzipped_path = None
@@ -81,6 +82,7 @@
# Causing the class to stat the .gz file. This call throws away the
# result, but the result will be cached as a object member.
self.GetMtime()
+ self.GetMd5sum()
base_name_gz = os.path.split(self.pkg_path)[1]
shutil.copy(self.pkg_path, self.GetWorkDir())
self.pkg_path = os.path.join(self.GetWorkDir(), base_name_gz)
Modified: csw/mgar/gar/v2-uwatch2/lib/python/package_checks.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/package_checks.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/package_checks.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -156,6 +156,14 @@
},
}
+
+ALLOWED_STARTING_PATHS = frozenset([
+ "/opt/csw",
+ "/etc/opt/csw",
+ "/var/opt/csw",
+])
+
+
def RemovePackagesUnderInstallation(paths_and_pkgs_by_soname,
pkgs_to_be_installed):
"""Emulates uninstallation of packages prior to installation
@@ -430,7 +438,7 @@
msg += "URL: %s" % OBSOLETE_DEPS[obsolete_pkg]["url"]
if not msg:
msg = None
- logger.info(msg)
+ messenger.Message(msg)
def CheckArchitectureVsContents(pkg_data, error_mgr, logger, messenger):
@@ -962,12 +970,17 @@
if binary_info["path"] in shared_libs:
if su.IsLibraryLinkable(binary_info["path"]):
# It is a shared library and other projects might link to it.
+ # Some libraries don't declare a soname; compile time linker defaults
+ # to their file name.
if "soname" in binary_info and binary_info["soname"]:
soname = binary_info["soname"]
else:
soname = os.path.split(binary_info["path"])[1]
linkable_shared_libs.append((soname, binary_info))
check_names = True
+ logging.debug("CheckSharedLibraryNamingPolicy(): "
+ "linkable shared libs of %s: %s"
+ % (pkgname, linkable_shared_libs))
if len(linkable_shared_libs) > 1:
sonames = sorted(set([x[0] for x in linkable_shared_libs]))
tmp = su.MakePackageNameBySonameCollection(sonames)
@@ -979,7 +992,7 @@
error_mgr.ReportError(
"non-uniform-lib-versions-in-package",
"sonames=%s"
- % (sonames))
+ % (",".join(sonames)))
messenger.Message(
"Package %s contains shared libraries, and their soname "
"versions are not in sync: %s. This means that "
@@ -1008,7 +1021,7 @@
pkgname)
check_names = False
- else:
+ else: # len(linkable_shared_libs) > 1
if pkgname not in multilib_pkgnames:
error_mgr.ReportError(
"shared-lib-pkgname-mismatch",
@@ -1083,10 +1096,13 @@
error_mgr.ReportError(
"shared-lib-package-contains-so-symlink",
"file=%s" % entry["path"])
- messenger.SuggestGarLine("# (If %s-devel doesn't exist yet)" % pkgname)
- messenger.SuggestGarLine("PACKAGES += %s-devel" % pkgname)
+ messenger.SuggestGarLine("# (If %s-dev doesn't exist yet)" % pkgname)
+ messenger.SuggestGarLine("PACKAGES += %s-dev" % pkgname)
messenger.SuggestGarLine(
- "PKGFILES_%s-devel += %s" % (pkgname, entry["path"]))
+ "PKGFILES_%s-dev += %s" % (pkgname, entry["path"]))
+ messenger.SuggestGarLine(
+ "CATALOGNAME_%s-dev = %s_dev"
+ % (pkgname, pkg_data["basic_stats"]["catalogname"]))
messenger.Message(
"The package contains shared libraries together with the "
"symlink of the form libfoo.so -> libfoo.so.1. "
@@ -1136,9 +1152,9 @@
% (binary_info["soname"], binary_info["base_name"]))
-def CheckDocDir(pkg_data, error_mgr, logger, messenger):
+def CheckLicenseFilePlacement(pkg_data, error_mgr, logger, messenger):
pkgname = pkg_data["basic_stats"]["pkgname"]
- docpath_re = re.compile(r"/opt/csw/share/doc/(?P<docname>[^/]+)/license")
+ docpath_re = re.compile(r"/opt/csw/share/doc/(?P<docname>[^/]+)/license$")
for pkgmap_entry in pkg_data["pkgmap"]:
if "path" not in pkgmap_entry: continue
if not pkgmap_entry["path"]: continue
@@ -1185,6 +1201,28 @@
% (pkgname, repr(pkgmap_entry["path"]), repr(pkgmap_entry["target"])))
+def CheckPrefixDirs(pkg_data, error_mgr, logger, messenger):
+ """Files are allowed to be in /opt/csw, /etc/opt/csw and /var/opt/csw."""
+ pkgname = pkg_data["basic_stats"]["pkgname"]
+ paths_with_slashes = [(x, x + "/") for x in ALLOWED_STARTING_PATHS]
+ for pkgmap_entry in pkg_data["pkgmap"]:
+ if "path" not in pkgmap_entry: continue
+ if not pkgmap_entry["path"]: continue
+ allowed_found = False
+ for p, pslash in paths_with_slashes:
+ # We need to handle /opt/csw as an allowed path
+ if pkgmap_entry["path"] == p:
+ allowed_found = True
+ break
+ if pkgmap_entry["path"].startswith(pslash):
+ allowed_found = True
+ break
+ if not allowed_found:
+ error_mgr.ReportError(
+ "bad-location-of-file",
+ "file=%s" % pkgmap_entry["path"])
+
+
def CheckSonameMustNotBeEqualToFileNameIfFilenameEndsWithSo(
pkg_data, error_mgr, logger, messenger):
pass
Modified: csw/mgar/gar/v2-uwatch2/lib/python/package_checks_test.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/package_checks_test.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/package_checks_test.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -51,10 +51,13 @@
checkpkg_lib.IndividualCheckInterface)
def testDefault(self):
+ self.RunCheckpkgTest(self.CheckpkgTest)
+
+ def RunCheckpkgTest(self, callback):
self.logger_mock = stubs.LoggerStub()
self.SetMessenger()
self.SetErrorManagerMock()
- self.CheckpkgTest()
+ callback()
self.mox.ReplayAll()
getattr(pc, self.FUNCTION_NAME)(self.pkg_data,
self.error_mgr_mock,
@@ -646,8 +649,8 @@
self.error_mgr_mock.ReportError(
'CSWrsync',
'deprecated-library',
- u'opt/csw/bin/sparcv8/rsync Deprecated Berkeley DB location '
- u'/opt/csw/lib/libdb-4.7.so')
+ u'file=opt/csw/bin/sparcv8/rsync '
+ u'lib=/opt/csw/lib/libdb-4.7.so')
self.pkg_data = [self.pkg_data]
for i in range(1):
self.error_mgr_mock.NeedFile(
@@ -1406,7 +1409,7 @@
self.pkg_data = neon_stats[0]
self.error_mgr_mock.ReportError(
'non-uniform-lib-versions-in-package',
- "sonames=['libneon.so.26', 'libneon.so.27']")
+ "sonames=libneon.so.26,libneon.so.27")
class TestCheckSharedLibraryNamingPolicyBerkeley(CheckpkgUnitTestHelper, unittest.TestCase):
@@ -1415,16 +1418,50 @@
self.pkg_data = bdb48_stats[0]
-class TestCheckSharedLibraryPkgDoesNotHaveTheSoFile(CheckpkgUnitTestHelper, unittest.TestCase):
+class TestCheckSharedLibraryPkgDoesNotHaveTheSoFile(CheckpkgUnitTestHelper,
+ unittest.TestCase):
FUNCTION_NAME = 'CheckSharedLibraryPkgDoesNotHaveTheSoFile'
+
def CheckpkgTest(self):
self.pkg_data = neon_stats[0]
self.error_mgr_mock.ReportError(
- 'shared-lib-package-contains-so-symlink', 'file=/opt/csw/lib/libneon.so')
+ 'shared-lib-package-contains-so-symlink',
+ 'file=/opt/csw/lib/libneon.so')
self.error_mgr_mock.ReportError(
- 'shared-lib-package-contains-so-symlink', 'file=/opt/csw/lib/sparcv9/libneon.so')
+ 'shared-lib-package-contains-so-symlink',
+ 'file=/opt/csw/lib/sparcv9/libneon.so')
+class TestCheckSharedLibraryPkgDoesNotHaveTheSoFileSuggestion(
+ CheckpkgUnitTestHelper, unittest.TestCase):
+ FUNCTION_NAME = 'CheckSharedLibraryPkgDoesNotHaveTheSoFile'
+
+ def SetMessenger(self):
+ """Overriding this method to use mock instead of a stub."""
+ self.messenger = self.mox.CreateMock(stubs.MessengerStub)
+
+ def CheckpkgTest(self):
+ self.pkg_data = neon_stats[0]
+ self.error_mgr_mock.ReportError(
+ 'shared-lib-package-contains-so-symlink',
+ 'file=/opt/csw/lib/libneon.so')
+ self.error_mgr_mock.ReportError(
+ 'shared-lib-package-contains-so-symlink',
+ 'file=/opt/csw/lib/sparcv9/libneon.so')
+ self.messenger.SuggestGarLine("# (If CSWneon-dev doesn't exist yet)")
+ self.messenger.SuggestGarLine('PACKAGES += CSWneon-dev')
+ self.messenger.SuggestGarLine(
+ 'PKGFILES_CSWneon-dev += /opt/csw/lib/libneon.so')
+ self.messenger.SuggestGarLine('CATALOGNAME_CSWneon-dev = neon_dev')
+ self.messenger.Message(mox.IsA(str))
+ self.messenger.SuggestGarLine("# (If CSWneon-dev doesn't exist yet)")
+ self.messenger.SuggestGarLine('PACKAGES += CSWneon-dev')
+ self.messenger.SuggestGarLine(
+ 'PKGFILES_CSWneon-dev += /opt/csw/lib/sparcv9/libneon.so')
+ self.messenger.SuggestGarLine('CATALOGNAME_CSWneon-dev = neon_dev')
+ self.messenger.Message(mox.IsA(str))
+
+
class TestCheckSharedLibraryNameMustBeAsubstringOfSonameGood(
CheckpkgUnitTestHelper, unittest.TestCase):
FUNCTION_NAME = 'CheckSharedLibraryNameMustBeAsubstringOfSoname'
@@ -1444,8 +1481,9 @@
'soname=libneon.so.27 filename=foo.so.1')
-class TestCheckDocDirLicense(CheckpkgUnitTestHelper, unittest.TestCase):
- FUNCTION_NAME = 'CheckDocDir'
+class TestCheckLicenseFilePlacementLicense(CheckpkgUnitTestHelper,
+ unittest.TestCase):
+ FUNCTION_NAME = 'CheckLicenseFilePlacement'
def CheckpkgTest(self):
self.pkg_data = copy.deepcopy(neon_stats[0])
self.pkg_data["pkgmap"].append({
@@ -1459,9 +1497,23 @@
'in-package=/opt/csw/share/doc/alien/license')
-class TestCheckDocDirRandomFile(CheckpkgUnitTestHelper, unittest.TestCase):
+class TestCheckLicenseFilePlacementLicenseDifferentSuffix(
+ CheckpkgUnitTestHelper, unittest.TestCase):
+ """A differently suffixed file should not trigger an error."""
+ FUNCTION_NAME = 'CheckLicenseFilePlacement'
+ def CheckpkgTest(self):
+ self.pkg_data = copy.deepcopy(neon_stats[0])
+ self.pkg_data["pkgmap"].append({
+ "class": "none", "type": "f", "line": "",
+ "user": "root", "group": "bin", "mode": '0755',
+ "path": "/opt/csw/share/doc/alien/license.html",
+ })
+
+
+class TestCheckLicenseFilePlacementRandomFile(
+ CheckpkgUnitTestHelper, unittest.TestCase):
"A random file should not trigger the message; only license files."
- FUNCTION_NAME = 'CheckDocDir'
+ FUNCTION_NAME = 'CheckLicenseFilePlacement'
def CheckpkgTest(self):
self.pkg_data = copy.deepcopy(neon_stats[0])
self.pkg_data["pkgmap"].append({
@@ -1533,5 +1585,75 @@
self.error_mgr_mock.NeedFile('/opt/csw/lib/libpq.so.5', mox.IsA(str))
+class TestCheckPrefixDirs(CheckpkgUnitTestHelper,
+ unittest.TestCase):
+ FUNCTION_NAME = 'CheckPrefixDirs'
+
+ def CheckpkgTest(self):
+ self.pkg_data = copy.deepcopy(tree_stats[0])
+ self.pkg_data["pkgmap"].append(
+ {'class': 'none',
+ 'group': None,
+ 'line': None,
+ 'mode': None,
+ 'path': '/opt/csw/bin/foo',
+ 'type': 'f',
+ 'user': None,
+ 'target': None})
+
+ def CheckpkgTest2(self):
+ self.pkg_data = copy.deepcopy(tree_stats[0])
+ self.pkg_data["pkgmap"].append(
+ {'class': 'none',
+ 'group': None,
+ 'line': None,
+ 'mode': None,
+ 'path': '/opt/cswbin/foo',
+ 'type': 'f',
+ 'user': None,
+ 'target': None})
+ self.error_mgr_mock.ReportError(
+ 'bad-location-of-file',
+ 'file=/opt/cswbin/foo')
+
+ def CheckpkgTest3(self):
+ self.pkg_data = copy.deepcopy(tree_stats[0])
+ self.pkg_data["pkgmap"].append(
+ {'class': 'none',
+ 'group': None,
+ 'line': None,
+ 'mode': None,
+ 'path': '/var/opt/csw/foo',
+ 'type': 'f',
+ 'user': None,
+ 'target': None})
+
+ def CheckpkgTest4(self):
+ self.pkg_data = copy.deepcopy(tree_stats[0])
+ self.pkg_data["pkgmap"].append(
+ {'class': 'none',
+ 'group': None,
+ 'line': None,
+ 'mode': None,
+ 'path': '/var/foo',
+ 'type': 'f',
+ 'user': None,
+ 'target': None})
+ self.error_mgr_mock.ReportError(
+ 'bad-location-of-file',
+ 'file=/var/foo')
+
+ # These three utility functions allow to run 3 tests in a single
+ # class.
+ def testTwo(self):
+ self.RunCheckpkgTest(self.CheckpkgTest2)
+
+ def testThree(self):
+ self.RunCheckpkgTest(self.CheckpkgTest3)
+
+ def testFour(self):
+ self.RunCheckpkgTest(self.CheckpkgTest4)
+
+
if __name__ == '__main__':
unittest.main()
Modified: csw/mgar/gar/v2-uwatch2/lib/python/package_stats.py
===================================================================
--- csw/mgar/gar/v2-uwatch2/lib/python/package_stats.py 2011-02-13 12:51:06 UTC (rev 13269)
+++ csw/mgar/gar/v2-uwatch2/lib/python/package_stats.py 2011-02-13 12:55:58 UTC (rev 13270)
@@ -26,8 +26,11 @@
BAD_CONTENT_REGEXES = (
# Slightly obfuscating these by using the default concatenation of
# strings.
+ r'/export' r'/home',
r'/export' r'/medusa',
r'/opt' r'/build',
+ r'/usr' r'/local',
+ r'/usr' r'/share',
)
@@ -430,14 +433,14 @@
if not srv4.data_obj:
raise DatabaseError("Could not find the data object for %s (%s)"
% (srv4.basename, md5_sum))
- self.all_stats = cPickle.loads(str(srv4.data_obj.pickle))
+ self.all_stats = srv4.GetStatsStruct()
return self.all_stats
def StatsListFromCatalog(file_name_list, catalog_file_name=None, debug=False):
packages = [inspective_package.InspectiveCswSrv4File(x, debug) for x in file_name_list]
if catalog_file_name:
- catalog_obj = catalog.OpencswCatalog(catalog_file_name)
+ catalog_obj = catalog.OpencswCatalog(open(catalog_file_name, "rb"))
md5s_by_basename = catalog_obj.GetDataByBasename()
for pkg in packages:
basename = os.path.basename(pkg.pkg_path)
@@ -459,7 +462,7 @@
self.logger = logging
self.debug = debug
- def CollectStatsFromFiles(self, file_list, catalog_file):
+ def CollectStatsFromFiles(self, file_list, catalog_file, force_unpack=False):
args_display = file_list
if len(args_display) > 5:
args_display = args_display[:5] + ["...more..."]
@@ -485,7 +488,7 @@
# removes the temporary directory from the disk. This allows to process
# the whole catalog.
stats = stats_list.pop()
- stats.CollectStats()
+ stats.CollectStats(force=force_unpack)
data_list.append(stats.GetAllStats())
pbar.update(counter.next())
pbar.finish()
@@ Diff output truncated at 100000 characters. @@
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
More information about the devel
mailing list