[csw-devel] SF.net SVN: gar:[19754] csw/mgar/gar/v2-yann

chninkel at users.sourceforge.net chninkel at users.sourceforge.net
Tue Nov 27 22:04:24 CET 2012


Revision: 19754
          http://gar.svn.sourceforge.net/gar/?rev=19754&view=rev
Author:   chninkel
Date:     2012-11-27 21:04:23 +0000 (Tue, 27 Nov 2012)
Log Message:
-----------
gar/v2-yann: updated from HEAD

Modified Paths:
--------------
    csw/mgar/gar/v2-yann/bin/pathfilter
    csw/mgar/gar/v2-yann/bin/stripbin
    csw/mgar/gar/v2-yann/categories/python/category.mk
    csw/mgar/gar/v2-yann/category.mk
    csw/mgar/gar/v2-yann/gar.conf.mk
    csw/mgar/gar/v2-yann/gar.lib.mk
    csw/mgar/gar/v2-yann/gar.mk
    csw/mgar/gar/v2-yann/gar.pkg.mk
    csw/mgar/gar/v2-yann/lib/map.solaris10u8
    csw/mgar/gar/v2-yann/lib/python/compare_pkgs.py
    csw/mgar/gar/v2-yann/lib/python/csw_upload_pkg.py
    csw/mgar/gar/v2-yann/lib/python/database.py
    csw/mgar/gar/v2-yann/lib/python/generate_catalog_file.py
    csw/mgar/gar/v2-yann/lib/python/integrate_catalogs.py
    csw/mgar/gar/v2-yann/lib/python/opencsw_test.py
    csw/mgar/gar/v2-yann/lib/python/package.py
    csw/mgar/gar/v2-yann/lib/python/pkgdb.py
    csw/mgar/gar/v2-yann/lib/python/rest.py
    csw/mgar/gar/v2-yann/lib/python/safe_remove_package.py
    csw/mgar/gar/v2-yann/lib/python/system_pkgmap.py
    csw/mgar/gar/v2-yann/lib/web/pkgdb_web.py
    csw/mgar/gar/v2-yann/lib/web/releases_web.py

Added Paths:
-----------
    csw/mgar/gar/v2-yann/bin/catlicense
    csw/mgar/gar/v2-yann/categories/apps
    csw/mgar/gar/v2-yann/categories/default/
    csw/mgar/gar/v2-yann/categories/devel
    csw/mgar/gar/v2-yann/categories/lang
    csw/mgar/gar/v2-yann/categories/lib
    csw/mgar/gar/v2-yann/categories/net
    csw/mgar/gar/v2-yann/categories/server
    csw/mgar/gar/v2-yann/categories/utils
    csw/mgar/gar/v2-yann/categories/xorg
    csw/mgar/gar/v2-yann/categories/xtra

Removed Paths:
-------------
    csw/mgar/gar/v2-yann/categories/apps/
    csw/mgar/gar/v2-yann/categories/devel/
    csw/mgar/gar/v2-yann/categories/lang/
    csw/mgar/gar/v2-yann/categories/lib/
    csw/mgar/gar/v2-yann/categories/net/
    csw/mgar/gar/v2-yann/categories/server/
    csw/mgar/gar/v2-yann/categories/utils/
    csw/mgar/gar/v2-yann/categories/xorg/
    csw/mgar/gar/v2-yann/categories/xtra/

Property Changed:
----------------
    csw/mgar/gar/v2-yann/


Property changes on: csw/mgar/gar/v2-yann
___________________________________________________________________
Modified: svn:mergeinfo
   - /csw/mgar/gar/v2:4936-6678
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-defaultchange:13903-14022
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-fortran:10883-12516
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-raised-buildlevel:15906-15949
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-solaris11:18134-18236
/csw/mgar/gar/v2-sqlite:10434-10449
/csw/mgar/gar/v2-uwatch2:12141-13270
   + /csw/mgar/gar/v2:4936-6678,19287-19753
/csw/mgar/gar/v2-bwalton:9784-10011
/csw/mgar/gar/v2-checkpkg:7722-7855
/csw/mgar/gar/v2-checkpkg-override-relocation:10585-10737
/csw/mgar/gar/v2-checkpkg-stats:8454-8649
/csw/mgar/gar/v2-collapsed-modulations:6895
/csw/mgar/gar/v2-defaultchange:13903-14022
/csw/mgar/gar/v2-dirpackage:8125-8180
/csw/mgar/gar/v2-fortran:10883-12516
/csw/mgar/gar/v2-git/v2-relocate:7617
/csw/mgar/gar/v2-migrateconf:7082-7211
/csw/mgar/gar/v2-noexternals:11592-11745
/csw/mgar/gar/v2-raised-buildlevel:15906-15949
/csw/mgar/gar/v2-relocate:5028-11738
/csw/mgar/gar/v2-skayser:6087-6132
/csw/mgar/gar/v2-solaris11:18134-18236
/csw/mgar/gar/v2-sqlite:10434-10449
/csw/mgar/gar/v2-uwatch2:12141-13270

Copied: csw/mgar/gar/v2-yann/bin/catlicense (from rev 19753, csw/mgar/gar/v2/bin/catlicense)
===================================================================
--- csw/mgar/gar/v2-yann/bin/catlicense	                        (rev 0)
+++ csw/mgar/gar/v2-yann/bin/catlicense	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1,14 @@
+#!/bin/sh
+
+if [ -f "$1" ]; then
+  cat "$1"
+  shift
+fi
+
+while [ $# -ge 1 ]; do
+  if [ -f "$1" ]; then
+    echo "\n---\n"
+    cat "$1"
+  fi
+  shift
+done

Modified: csw/mgar/gar/v2-yann/bin/pathfilter
===================================================================
--- csw/mgar/gar/v2-yann/bin/pathfilter	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/bin/pathfilter	2012-11-27 21:04:23 UTC (rev 19754)
@@ -21,19 +21,41 @@
 
 my ($hasinclude, $hasexclude);
 my @isaexec;
-my @selection;
+my @selection_args;
 my $result = GetOptions(
 	'h|help' => \$help,
 	'e|isaexec=s' => \@isaexec,
-	'i|include=s' => sub { push @selection, [ 'i', $_[1] ]; $hasinclude = 1 },
-	'I=s' => sub { push @selection, [ 'i', quotemeta( $_[1] ) ]; $hasinclude = 1 },
-	'x|exclude=s' => sub { push @selection, [ 'x', $_[1] ]; $hasexclude = 1 },
-	'X=s' => sub { push @selection, [ 'x', quotemeta( $_[1] ) ]; $hasexclude = 1 },
+	'i|include=s' => sub { push @selection_args, [ 'i', $_[1] ]; $hasinclude = 1 },
+	'I=s' => sub { push @selection_args, [ 'i', quotemeta( $_[1] ) ]; $hasinclude = 1 },
+	'x|exclude=s' => sub { push @selection_args, [ 'x', $_[1] ]; $hasexclude = 1 },
+	'X=s' => sub { push @selection_args, [ 'x', quotemeta( $_[1] ) ]; $hasexclude = 1 },
 ) or pod2usage( 1 );
 
 # Exclude everything by default if there are only include rules
-push @selection, [ 'x', '.*' ] if( $hasinclude && !$hasexclude );
+push @selection_args, [ 'x', '.*' ] if( $hasinclude && !$hasexclude );
 
+# @selection = map { [ $_->[0], qr/^$_->[1]$/ ] } @selection;
+
+# This routine anchors all regexps at start and end and combines seqential includes/excludes into a single regex
+my @selection;
+my @seltemp;
+my $mode;
+foreach my $c (@selection_args) {
+  my ($type, $re) = @$c;
+  if( $mode && $mode ne $type ) {
+    # flush
+    my $mre = '^(' . join( '|', @seltemp ) . ')$';
+    push @selection, [ $mode, qr/$mre/ ];
+    @seltemp = ();
+    $mode = $type;
+  }
+  $mode = $type;
+  push @seltemp, $re;
+}
+
+my $mre = '^(' . join( '|', @seltemp ) . ')$';
+push @selection, [ $mode, qr/$mre/ ];
+
 pod2usage(-verbose => 2) if $help;
 
 my %p;
@@ -67,9 +89,9 @@
   foreach my $selector (@selection) {
     my ($type, $regex) = @$selector;
     if( $type eq 'i' ) {
-      last SELECTION if( $path =~ /^$regex$/ );
+      last SELECTION if( $path =~ /$regex/ );
     } elsif( $type eq 'x' ) {
-      next NEXTLINE if( $path =~ /^$regex$/ );
+      next NEXTLINE if( $path =~ /$regex/ );
     } else {
       croak( "The type '$type' is unknown (either 'x' or 'i' is allowed)." );
     }
@@ -114,20 +136,50 @@
 }
 
 # Process isaexec substitutions
-# Usage: -e /opt/csw/bin/mytool=/opt/csw/bin/sparcv8/mytool
-#   f none /opt/csw/bin/mytool 0755 root bin
+# -e /opt/csw/bin/foo=/opt/csw/bin/sparcv8/foo
+#   f none /opt/csw/bin/foo 0755 root bin
 # ->
-#   l none /opt/csw/bin/mytool=/opt/csw/bin/isaexec
-#   f none /opt/csw/bin/sparcv8/mytool=/opt/csw/bin/mytool
+#   l none /opt/csw/bin/foo=/opt/csw/bin/isaexec
+#   f none /opt/csw/bin/sparcv8/foo=/opt/csw/bin/foo
+#
+# --
+#
+# -e /opt/csw/bin/foo=/opt/csw/bin/sparcv8/foo
+# -e /opt/csw/bin/bar=/opt/csw/bin/sparcv8/bar
+#   f none /opt/csw/bin/foo 0755 root bin			SAME
+#   l none /opt/csw/bin/bar=/opt/csw/bin/foo 0755 root bin
+# ->
+#   l none /opt/csw/bin/foo=/opt/csw/bin/isaexec		SAME
+#   f none /opt/csw/bin/sparcv8/foo=/opt/csw/bin/foo		SAME
+#   l none /opt/csw/bin/bar=/opt/csw/bin/isaexec
+#   l none /opt/csw/bin/sparcv8/bar=/opt/csw/bin/sparcv8/foo
+#
+
+my %isaexec_map;
 foreach my $e (@isaexec) {
   my ($isaexec_path, $new_path) = split( /=/, $e );
+  $isaexec_map{$isaexec_path} = $new_path;
+}
 
+foreach my $e (@isaexec) {
+  my ($isaexec_path, $new_path) = split( /=/, $e );
+
   # Don't do isaexec replacement if the path has not been selected.
   next if( !exists $p{$isaexec_path} );
 
   $p{$new_path} = [ @{$p{$isaexec_path}} ];
-  $p{$new_path}->[2] = $new_path . '=' . $isaexec_path;
-  $p{$isaexec_path}->[0] = 'l';
+
+  # If the thing we try to isaexec is a symlink itself we need to replace the target also
+  if( $p{$isaexec_path}->[0] eq 'l' ) {
+    # The file to replaced by isaexec is already a hardlink, remove the target
+    my ($target) = ($p{$isaexec_path}->[2] =~ /=(.*)/);
+    $p{$isaexec_path}->[2] =~ s/=.*//;
+    $p{$new_path}->[2] = $new_path . '=' . (exists $isaexec_map{$target} ? $isaexec_map{$target} : $isaexec_path);
+  } else {
+    # Make it a hardlink
+    $p{$isaexec_path}->[0] = 'l';
+    $p{$new_path}->[2] = $new_path . '=' . $isaexec_path;
+  }
   $p{$isaexec_path}->[2] .= '=/opt/csw/bin/isaexec';
 }
 

Modified: csw/mgar/gar/v2-yann/bin/stripbin
===================================================================
--- csw/mgar/gar/v2-yann/bin/stripbin	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/bin/stripbin	2012-11-27 21:04:23 UTC (rev 19754)
@@ -28,7 +28,7 @@
 		my $perm = (stat $file)[2] & 07777;
 		print "making file temporarily writable ... " unless( $perm & 0200 );
                 chmod($perm | 0200, $file);
-		system "strip", $file and die "Failed.";
+		system "/usr/ccs/bin/strip", $file and die "Failed.";
 		chmod($perm, $file);
 		print "Done.\n";
 	}

Copied: csw/mgar/gar/v2-yann/categories/apps (from rev 19753, csw/mgar/gar/v2/categories/apps)
===================================================================
--- csw/mgar/gar/v2-yann/categories/apps	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/apps	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/devel (from rev 19753, csw/mgar/gar/v2/categories/devel)
===================================================================
--- csw/mgar/gar/v2-yann/categories/devel	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/devel	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/lang (from rev 19753, csw/mgar/gar/v2/categories/lang)
===================================================================
--- csw/mgar/gar/v2-yann/categories/lang	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/lang	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/lib (from rev 19753, csw/mgar/gar/v2/categories/lib)
===================================================================
--- csw/mgar/gar/v2-yann/categories/lib	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/lib	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/net (from rev 19753, csw/mgar/gar/v2/categories/net)
===================================================================
--- csw/mgar/gar/v2-yann/categories/net	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/net	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Modified: csw/mgar/gar/v2-yann/categories/python/category.mk
===================================================================
--- csw/mgar/gar/v2-yann/categories/python/category.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/categories/python/category.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -30,4 +30,9 @@
 MASTER_SITES ?= $(PYPI_MIRROR)
 PACKAGES ?= CSWpy-$(DASHED_NAME)
 
+# for use in any references by specific recipes so it can be replaced easily
+# across the tree.  this could later be parameterized for use by multiple
+# versions of python too.
+SITE_PACKAGES = $(libdir)/python2.6/site-packages
+
 include gar/gar.mk

Copied: csw/mgar/gar/v2-yann/categories/server (from rev 19753, csw/mgar/gar/v2/categories/server)
===================================================================
--- csw/mgar/gar/v2-yann/categories/server	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/server	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/utils (from rev 19753, csw/mgar/gar/v2/categories/utils)
===================================================================
--- csw/mgar/gar/v2-yann/categories/utils	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/utils	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/xorg (from rev 19753, csw/mgar/gar/v2/categories/xorg)
===================================================================
--- csw/mgar/gar/v2-yann/categories/xorg	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/xorg	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Copied: csw/mgar/gar/v2-yann/categories/xtra (from rev 19753, csw/mgar/gar/v2/categories/xtra)
===================================================================
--- csw/mgar/gar/v2-yann/categories/xtra	                        (rev 0)
+++ csw/mgar/gar/v2-yann/categories/xtra	2012-11-27 21:04:23 UTC (rev 19754)
@@ -0,0 +1 @@
+link default
\ No newline at end of file

Modified: csw/mgar/gar/v2-yann/category.mk
===================================================================
--- csw/mgar/gar/v2-yann/category.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/category.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -21,6 +21,12 @@
 # of gar/ symlinks in each build directory.
 GARDIR := $(dir $(lastword $(MAKEFILE_LIST)))
 
+$(if $(findstring $(CATEGORIES),apps devel lang lib net server utils xorg xtra),\
+  $(warning The categories with no special meaning have been renamed to 'default', please remove the CATEGORIES line as for the default case this is no longer necessary)\
+)
+
+CATEGORIES ?= default
+
 ifeq (,$(wildcard $(GARDIR)/categories/$(CATEGORIES)/category.mk))
   $(error The category '$(CATEGORIES)' is invalid. Valid categories are: $(patsubst $(GARDIR)/categories/%,%,$(wildcard $(GARDIR)/categories/*)))
 endif

Modified: csw/mgar/gar/v2-yann/gar.conf.mk
===================================================================
--- csw/mgar/gar/v2-yann/gar.conf.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/gar.conf.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -18,7 +18,7 @@
 
 # On these platforms packages are built.
 # They will include binaries for all ISAs that are specified for the platform.
-PACKAGING_PLATFORMS ?= solaris9-sparc solaris9-i386
+PACKAGING_PLATFORMS ?= solaris10-sparc solaris10-i386
 
 # This is the platform we are currently building. It is either set when
 # invoked from "gmake platforms" or when you build a package on a host
@@ -690,7 +690,7 @@
 RUNPATH_ISALIST ?= $(EXTRA_RUNPATH_DIRS) $(EXTRA_LIB) $(filter-out $(libpath_install),$(libdir_install)) $(libpath_install)
 endif
 
-LINKER_MAP_RELEASE-5.10 ?= solaris10u8
+LINKER_MAP_RELEASE-5.10 ?= solaris10
 LINKER_MAP_RELEASE ?= $(LINKER_MAP_RELEASE-$(GAROSREL))
 
 LINKER_MAPS ?= $(foreach MAP,$(LINKER_MAP_RELEASE) $(EXTRA_LINKER_MAPS) $(EXTRA_LINKER_MAPS-$(GAROSREL)),-M $(abspath $(GARDIR)/lib/map.$(LINKER_MAP_RELEASE)))
@@ -752,8 +752,9 @@
 GNOME_MIRROR = $(GNOME_ROOT)/$(GNOME_PROJ)/$(GNOME_SUBV)/
 
 # SourceForge
-SF_PROJ     ?= $(NAME)
-SF_MIRRORS  ?= http://downloads.sourceforge.net/$(SF_PROJ)/
+$(if $(SF_PROJ),$(warning SF_PROJ is deprecated, please use SF_PROJECT instead))
+SF_PROJECT  ?= $(or $(SF_PROJ),$(NAME))
+SF_MIRRORS  ?= http://downloads.sourceforge.net/$(SF_PROJECT)/
 # Keep this for compatibility
 SF_MIRROR    = $(firstword $(SF_MIRRORS))
 SF_PROJECT_SHOWFILE ?= http://sourceforge.net/project/showfiles.php?group_id
@@ -771,9 +772,10 @@
 GNU_SITE     = http://ftp.gnu.org
 GNU_GNUROOT  = $(GNU_SITE)/gnu
 GNU_NGNUROOT = $(GNU_SITE)/non-gnu
-GNU_PROJ    ?= $(NAME)
-GNU_MIRROR   = $(GNU_GNUROOT)/$(GNU_PROJ)/
-GNU_NMIRROR  = $(GNU_NGNUROOT)/$(GNU_PROJ)/
+$(if $(GNU_PROJ),$(warning GNU_PROJ is deprecated, please use GNU_PROJECT instead))
+GNU_PROJECT ?= $(or $(GNU_PROJ),$(NAME))
+GNU_MIRROR   = $(GNU_GNUROOT)/$(GNU_PROJECT)/
+GNU_NMIRROR  = $(GNU_NGNUROOT)/$(GNU_PROJECT)/
 
 # CPAN
 CPAN_SITES  += http://search.cpan.org/CPAN

Modified: csw/mgar/gar/v2-yann/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2-yann/gar.lib.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/gar.lib.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -31,6 +31,8 @@
 # add these 'dynamic script' targets to our fetch list
 DYNURLS := $(foreach DYN,$(DYNSCRIPTS),dynscr://$(DYN))
 
+$(foreach M,$(MASTER_SITES),$(if $(filter %/,$M),,$(error MASTER_SITES must contain only URLs ending in a / wheres this did not: $M)))
+
 URLS := $(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(ALLFILES_PATCHFILES))) $(DYNURLS)
 
 define gitsubst

Modified: csw/mgar/gar/v2-yann/gar.mk
===================================================================
--- csw/mgar/gar/v2-yann/gar.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/gar.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -811,7 +811,7 @@
 
 # Support for cswpycompile, skip pre-compiled python files (.pyc, .pyo)
 # during the merge phase.
-_PYCOMPILE_FILES = /opt/csw/lib/python/site-packages/.*\.py
+_PYCOMPILE_FILES = /opt/csw/lib/python.*/site-packages/.*\.py
 MERGE_EXCLUDE_PYCOMPILE ?= $(if $(PYCOMPILE), $(addsuffix c,$(_PYCOMPILE_FILES)) $(addsuffix o,$(_PYCOMPILE_FILES)))
 
 MERGE_EXCLUDE_INFODIR ?= $(sharedstatedir)/info/dir

Modified: csw/mgar/gar/v2-yann/gar.pkg.mk
===================================================================
--- csw/mgar/gar/v2-yann/gar.pkg.mk	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/gar.pkg.mk	2012-11-27 21:04:23 UTC (rev 19754)
@@ -254,6 +254,7 @@
 		$(if $(PYCOMPILE),$(foreach FILE,$(_PYCOMPILE_FILES),$$F[1] = "cswpycompile" if( $$F[2] =~ m(^$(FILE)$$) );))\
 		$(foreach FILE,$(TEXINFO),$$F[1] = "cswtexinfo" if( $$F[2] =~ m(^$(FILE)$$) );)\
 		$(foreach FILE,$(TEXHASH),$$F[1] = "cswtexhash" if( $$F[2] =~ m(^$(FILE)$$) );)\
+		$(foreach FILE,$(SSLCERT),$$F[1] = "cswsslcert" if( $$F[2] =~ m(^$(FILE)$$) );)\
 		$(if $(AP2_MODS), at F = ("e", "build", $$F[2], "?", "?", "?") if ($$F[2] =~ m(^/opt/csw/apache2/ap2mod/.*));) \
 		$(if $(PHP5_EXT), at F = ("e", "build", $$F[2], "?", "?", "?") if ($$F[2] =~ m(^/opt/csw/php5/extensions/.*));) \
 		$$F[1] = "cswcptemplates" if( $$F[2] =~ m(^/opt/csw/etc/templates/.+$$) and $$F[0] eq "f" ); \
@@ -275,12 +276,16 @@
 # per the above note.  (See bacula for an example of where this is
 # required.)
 
+# NOTE: ensure sslcert must run before initsmf/inetd. certs should be
+# in place before services are started.
+
 _CSWCLASSES  = cswusergroup ugfiles
 _CSWCLASSES += cswmigrateconf cswcpsampleconf cswpreserveconf cswcptemplates
 _CSWCLASSES += cswetcservices
 _CSWCLASSES += cswetcshells
 _CSWCLASSES += cswcrontab
 _CSWCLASSES += cswpycompile
+_CSWCLASSES += cswsslcert
 _CSWCLASSES += cswinetd
 _CSWCLASSES += cswinitsmf
 _CSWCLASSES += cswtexinfo
@@ -483,7 +488,7 @@
 
 # Pulled in from pkglib/csw_prototype.gspec
 $(PROTOTYPE): $(WORKDIR) merge
-	$(_DBG)cswproto -c $(GARDIR)/etc/commondirs-$(GARCH) -r $(PKGROOT) $(PKGROOT)=$(if $(ALLOW_RELOCATE),,'/') >$@ 
+	$(_DBG)cswproto $(if $(INCLUDE_COMMONDIRS),,-c $(GARDIR)/etc/commondirs-$(GARCH)) -r $(PKGROOT) $(PKGROOT)=$(if $(ALLOW_RELOCATE),,'/') >$@ 
 
 # pathfilter lives in bin/pathfilter and takes care of including/excluding paths from
 # a prototype (see "perldoc bin/pathfilter"). We employ it here to:
@@ -703,8 +708,9 @@
 	$(if $(ALLOW_RELOCATE),echo "BASEDIR=$(RELOCATE_PREFIX)" >>$@)
 
 
-# findlicensefile - Find an existing file for a given license name
-#
+# findlicensefile - Find an existing file for a given relative license file name
+# Arguments:
+#  $(1)  A filename to be used for licenses
 define findlicensefile
 $(strip 
   $(if $(1),$(firstword $(realpath 
@@ -714,33 +720,31 @@
 )
 endef
 
-define licensefile
-$(strip 
-  $(or 
-    $(call findlicensefile,$(or $(LICENSE_$(1)),$(LICENSE_FULL_$(1)))),
-    $(call findlicensefile,$(or $(LICENSE),$(LICENSE_FULL))),
-  ) 
+# licensefile - Find an existing license file for a given package name
+define licensefiles
+$(foreach L,$(or $(LICENSE_$(1)),$(LICENSE_FULL_$(1)),$(LICENSE),$(LICENSE_FULL)),\
+  $(or $(call findlicensefile,$L),$(if $(_LICENSE_IS_DEFAULT),,$(error Cannot find license file $L for package $(1))))\
 )
 endef
 
 merge-license-%: $(WORKDIR)
 	$(_DBG)$(if $(and $(LICENSE_$*),$(LICENSE_FULL_$*)),$(error Both LICENSE_$* and LICENSE_FULL_$* have been specified where only one is allowed)) \
 		$(if $(and $(filter $*,$(_PKG_SPECS)),$(or $(LICENSE),$(LICENSE_FULL),$(LICENSE_$*),$(LICENSE_FULL_$*))), \
-		LICENSEFILE=$(or $(call licensefile,$*),$(if $(_LICENSE_IS_DEFAULT),,$(error Cannot find license file for package $*))); \
+		LICENSEFILES="$(call licensefiles,$*)"; \
 		LICENSEDIR=$(call licensedir,$*); \
-		$(if $(LICENSE_TEXT_$*)$(LICENSE_TEXT),\
+		$(if $(or $(LICENSE_TEXT_$*),$(LICENSE_TEXT)),\
 		  umask 022 && mkdir -p $(PKGROOT)$$LICENSEDIR && \
 		  echo "$(or $(LICENSE_TEXT_$*),$(LICENSE_TEXT))" > $(PKGROOT)$$LICENSEDIR/license;\
 		  echo "$(or $(LICENSE_TEXT_$*),$(LICENSE_TEXT))" > $(WORKDIR)/$*.copyright;\
 		,\
-		  if [ -n "$$LICENSEFILE" ]; then \
+		  if [ -n "$$LICENSEFILES" ]; then \
 		    $(if $(or $(LICENSE_FULL),$(LICENSE_FULL_$*)), \
-		      if [ -f "$$LICENSEFILE" ]; then cp $$LICENSEFILE $(WORKDIR)/$*.copyright; fi;, \
+		      catlicense $LICENSEFILES > $(WORKDIR)/$*.copyright;, \
 		      echo "Please see $$LICENSEDIR/license for license information." > $(WORKDIR)/$*.copyright; \
 		    ) \
 		    umask 022 && mkdir -p $(PKGROOT)$$LICENSEDIR && \
 		    rm -f $(PKGROOT)$$LICENSEDIR/license && \
-		    cp $$LICENSEFILE $(PKGROOT)$$LICENSEDIR/license; \
+		    catlicense $$LICENSEFILES > $(PKGROOT)$$LICENSEDIR/license; \
 		  fi \
 		) \
 	)
@@ -1024,13 +1028,17 @@
 # this will also make it visible to the build environment. Some software builds
 # use hard-coded non-GNU make which then errs out on -I (unknown option).
 
+_PROPAGATE_ENV += PARALLELMFLAGS
+_PROPAGATE_ENV += PARALLELMODULATIONS
+_PROPAGATE_ENV += PATH
+
 platforms: _PACKAGING_PLATFORMS=$(if $(ARCHALL),$(firstword $(PACKAGING_PLATFORMS)),$(PACKAGING_PLATFORMS))
 platforms:
 	$(foreach P,$(_PACKAGING_PLATFORMS),\
 		$(if $(PACKAGING_HOST_$P),\
 			$(if $(filter $(THISHOST),$(PACKAGING_HOST_$P)),\
 				$(MAKE) GAR_PLATFORM=$P _package && ,\
-				$(SSH) -t $(PACKAGING_HOST_$P) "PATH=$$PATH:/opt/csw/bin $(MAKE) -I $(GARDIR) -C $(CURDIR) GAR_PLATFORM=$P _package" && \
+				$(SSH) -t $(PACKAGING_HOST_$P) "$(foreach V,$(_PROPAGATE_ENV),$(if $($V),$V=$($V))) $(MAKE) -I $(GARDIR) -C $(CURDIR) GAR_PLATFORM=$P _package" && \
 			),\
 			$(error *** No host has been defined for platform $P)\
 		)\

Modified: csw/mgar/gar/v2-yann/lib/map.solaris10u8
===================================================================
--- csw/mgar/gar/v2-yann/lib/map.solaris10u8	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/map.solaris10u8	2012-11-27 21:04:23 UTC (rev 19754)
@@ -2,6 +2,6 @@
 #   http://src.opensolaris.org/source/xref/onnv/onnv-gate/usr/src/lib/libc/port/mapfile-vers#301
 libc.so - SUNW_1.22.5 SUNWprivate_1.1 $ADDVERS=SUNW_1.22.5;
 #  http://src.opensolaris.org/source/xref/onnv/onnv-gate/usr/src/lib/libresolv2/common/mapfile-vers
-libresolv.so - SUNW_2.2 SUNWprivate_2.1 $ADDVERS=SUNW_2.2;
+libresolv.so - SUNW_2.2.1 SUNWprivate_2.1 $ADDVERS=SUNW_2.2.1;
 # http://src.opensolaris.org/source/xref/onnv/onnv-gate/usr/src/lib/libnsl/common/mapfile-vers
 libnsl.so - SUNW_1.9.1 SUNWprivate_1.5 $ADDVERS=SUNW_1.9.1;

Modified: csw/mgar/gar/v2-yann/lib/python/compare_pkgs.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/compare_pkgs.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/compare_pkgs.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2.6
+#!/opt/csw/bin/python2.6
 # coding=utf-8
 # vim:set sw=2 ts=2 sts=2 expandtab:
 #

Modified: csw/mgar/gar/v2-yann/lib/python/csw_upload_pkg.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/csw_upload_pkg.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/csw_upload_pkg.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -1,4 +1,4 @@
-#!/opt/csw/bin/python2.6
+#!/usr/bin/env python2.6
 
 """csw_upload_pkg.py - uploads packages to the database.
 
@@ -54,6 +54,15 @@
 
 This amounts to 3x2x4 = 24 package catalogs total.
 
+= Removing packages from the catalog =
+
+The --remove option works the same way as the regular use, except that it
+removes assignments of a given package to catalogs, instead of adding them.
+
+When removing packages from catalogs, files on disk are passed as arguments.
+On the buildfarm, all files are available under the /home/mirror/opencsw
+directory.
+
 For more information, see:
 http://wiki.opencsw.org/automated-release-process#toc0
 """
@@ -82,7 +91,8 @@
 
   def __init__(self, filenames, rest_url, os_release=None, debug=False,
       output_to_screen=True,
-      username=None, password=None):
+      username=None, password=None,
+      catrel=DEFAULT_CATREL):
     super(Srv4Uploader, self).__init__()
     if filenames:
       filenames = self.SortFilenames(filenames)
@@ -91,10 +101,14 @@
     self.debug = debug
     self.os_release = os_release
     self.rest_url = rest_url
-    self._rest_client = rest.RestClient(self.rest_url)
+    self._rest_client = rest.RestClient(
+        self.rest_url,
+        username=username,
+        password=password)
     self.output_to_screen = output_to_screen
     self.username = username
     self.password = password
+    self.catrel = catrel
 
   def _SetAuth(self, c):
     """Set basic HTTP auth options on given Curl object."""
@@ -166,12 +180,12 @@
       arch = file_metadata['arch']
       metadata_by_md5[md5_sum] = file_metadata
       catalogs = self._MatchSrv4ToCatalogs(
-          filename, DEFAULT_CATREL, arch, osrel, md5_sum)
+          filename, self.catrel, arch, osrel, md5_sum)
       for unused_catrel, cat_arch, cat_osrel in catalogs:
         planned_modifications.append(
             (filename, md5_sum,
              arch, osrel, cat_arch, cat_osrel))
-    # The plan:
+    # The plan: 
     # - Create groups of files to be inserted into each of the catalogs
     # - Invoke checkpkg to check every target catalog
     checkpkg_sets = self._CheckpkgSets(planned_modifications)
@@ -184,6 +198,66 @@
           file_metadata = metadata_by_md5[md5_sum]
           self._InsertIntoCatalog(filename, arch, osrel, file_metadata)
 
+  def Remove(self):
+    for filename in self.filenames:
+      self._RemoveFile(filename)
+
+  def _RemoveFile(self, filename):
+    md5_sum = self._GetFileMd5sum(filename)
+    file_in_allpkgs, file_metadata = self._GetSrv4FileMetadata(md5_sum)
+    if not file_metadata:
+      logging.warning("Could not find metadata for file %s", repr(filename))
+      return
+    osrel = file_metadata['osrel']
+    arch = file_metadata['arch']
+    catalogs = self._MatchSrv4ToCatalogs(
+        filename, DEFAULT_CATREL, arch, osrel, md5_sum)
+    for unused_catrel, cat_arch, cat_osrel in sorted(catalogs):
+      self._RemoveFromCatalog(filename, cat_arch, cat_osrel, file_metadata)
+
+  def _RemoveFromCatalog(self, filename, arch, osrel, file_metadata):
+    print("Removing %s (%s %s) from catalog %s %s %s"
+          % (file_metadata["catalogname"],
+             file_metadata["arch"],
+             file_metadata["osrel"],
+             DEFAULT_CATREL, arch, osrel))
+    md5_sum = self._GetFileMd5sum(filename)
+    basename = os.path.basename(filename)
+    parsed_basename = opencsw.ParsePackageFileName(basename)
+    # TODO: Move this bit to a separate class (RestClient)
+    url = (
+        "%s%s/catalogs/%s/%s/%s/%s/"
+        % (self.rest_url,
+           RELEASES_APP,
+           DEFAULT_CATREL,
+           arch,
+           osrel,
+           md5_sum))
+    logging.debug("DELETE @ URL: %s %s", type(url), url)
+    c = pycurl.Curl()
+    d = StringIO()
+    h = StringIO()
+    c.setopt(pycurl.URL, str(url))
+    c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
+    c.setopt(pycurl.WRITEFUNCTION, d.write)
+    c.setopt(pycurl.HEADERFUNCTION, h.write)
+    c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+    c = self._SetAuth(c)
+    if self.debug:
+      c.setopt(c.VERBOSE, 1)
+    c.perform()
+    http_code = c.getinfo(pycurl.HTTP_CODE)
+    logging.debug(
+        "DELETE curl getinfo: %s %s %s",
+        type(http_code),
+        http_code,
+        c.getinfo(pycurl.EFFECTIVE_URL))
+    c.close()
+    if not (http_code >= 200 and http_code <= 299):
+      raise RestCommunicationError(
+          "%s - HTTP code: %s, content: %s"
+          % (url, http_code, d.getvalue()))
+
   def _GetFileMd5sum(self, filename):
     if filename not in self.md5_by_filename:
       logging.debug("_GetFileMd5sum(%s): Reading the file", filename)
@@ -280,57 +354,12 @@
           % (file_metadata["catalogname"],
              file_metadata["arch"],
              file_metadata["osrel"],
-             DEFAULT_CATREL, arch, osrel))
+             self.catrel, arch, osrel))
     md5_sum = self._GetFileMd5sum(filename)
     basename = os.path.basename(filename)
     parsed_basename = opencsw.ParsePackageFileName(basename)
     logging.debug("parsed_basename: %s", parsed_basename)
-    url = (
-        "%s%s/catalogs/%s/%s/%s/%s/"
-        % (self.rest_url,
-           RELEASES_APP,
-           DEFAULT_CATREL,
-           arch,
-           osrel,
-           md5_sum))
-    logging.debug("URL: %s %s", type(url), url)
-    c = pycurl.Curl()
-    d = StringIO()
-    h = StringIO()
-    # Bogus data to upload
-    s = StringIO()
-    c.setopt(pycurl.URL, str(url))
-    c.setopt(pycurl.PUT, 1)
-    c.setopt(pycurl.UPLOAD, 1)
-    c.setopt(pycurl.INFILESIZE_LARGE, s.len)
-    c.setopt(pycurl.READFUNCTION, s.read)
-    c.setopt(pycurl.WRITEFUNCTION, d.write)
-    c.setopt(pycurl.HEADERFUNCTION, h.write)
-    c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
-    c = self._SetAuth(c)
-    if self.debug:
-      c.setopt(c.VERBOSE, 1)
-    c.perform()
-    http_code = c.getinfo(pycurl.HTTP_CODE)
-    logging.debug(
-        "curl getinfo: %s %s %s",
-        type(http_code),
-        http_code,
-        c.getinfo(pycurl.EFFECTIVE_URL))
-    c.close()
-    # if self.debug:
-    #   logging.debug("*** Headers")
-    #   logging.debug(h.getvalue())
-    #   logging.debug("*** Data")
-    if http_code >= 400 and http_code <= 599:
-      if not self.debug:
-        # In debug mode, all headers are printed to screen, and we aren't
-        # interested in the response body.
-        logging.fatal("Response: %s %s", http_code, d.getvalue())
-      raise RestCommunicationError("%s - HTTP code: %s" % (url, http_code))
-    else:
-      logging.debug("Response: %s %s", http_code, d.getvalue())
-    return http_code
+    return self._rest_client.AddSvr4ToCatalog(self.catrel, arch, osrel, md5_sum)
 
   def _GetSrv4FileMetadata(self, md5_sum):
     logging.debug("_GetSrv4FileMetadata(%s)", repr(md5_sum))
@@ -450,7 +479,7 @@
     args_by_cat = {}
     for arch, osrel in checkpkg_sets:
       print ("Checking %s package(s) against catalog %s %s %s"
-             % (len(checkpkg_sets[(arch, osrel)]), DEFAULT_CATREL, arch, osrel))
+             % (len(checkpkg_sets[(arch, osrel)]), self.catrel, arch, osrel))
       md5_sums = []
       basenames = []
       for filename, md5_sum in checkpkg_sets[(arch, osrel)]:
@@ -462,7 +491,7 @@
       # if it stays that way.
       args_by_cat[(arch, osrel)] = [
           checkpkg_executable,
-          "--catalog-release", DEFAULT_CATREL,
+          "--catalog-release", self.catrel,
           "--os-release", osrel,
           "--architecture", arch,
       ] + md5_sums
@@ -480,7 +509,7 @@
         print "To see errors, run:"
         print " ", " ".join(args_by_cat[(arch, osrel)])
       print ("Packages have not been submitted to the %s catalog."
-             % DEFAULT_CATREL)
+             % self.catrel)
     return not checks_failed_for_catalogs
 
 
@@ -489,6 +518,10 @@
   parser.add_option("-d", "--debug",
       dest="debug",
       default=False, action="store_true")
+  parser.add_option("--remove",
+      dest="remove",
+      default=False, action="store_true",
+      help="Remove packages from catalogs instead of adding them")
   parser.add_option("--os-release",
       dest="os_release",
       help="If specified, only uploads to the specified OS release.")
@@ -500,6 +533,12 @@
       dest="filename_check",
       default=True, action="store_false",
       help="Don't check the filename set (e.g. for a missing architecture)")
+  parser.add_option("--catalog-release",
+      dest="catrel",
+      default=DEFAULT_CATREL,
+      help=("Uploads to a specified named catalog. "
+            "Note that the server side only allows to upload to a limited "
+            "set of catalogs."))
   options, args = parser.parse_args()
   if options.debug:
     logging.basicConfig(level=logging.DEBUG)
@@ -529,20 +568,15 @@
     else:
       print "Continuing anyway."
 
-  username = os.environ["LOGNAME"]
-  authfile = os.path.join('/etc/opt/csw/releases/auth', username)
-
-  try:
-    with open(authfile, 'r') as af:
-      password = af.read().strip()
-  except IOError, e:
-    logging.warning("Error reading %s: %s", authfile, e)
-    password = getpass.getpass("{0}'s pkg release password> ".format(username))
-
+  username, password = rest.GetUsernameAndPassword()
   uploader = Srv4Uploader(args,
                           options.rest_url,
                           os_release=os_release,
                           debug=options.debug,
                           username=username,
-                          password=password)
-  uploader.Upload()
+                          password=password,
+                          catrel=options.catrel)
+  if options.remove:
+    uploader.Remove()
+  else:
+    uploader.Upload()

Modified: csw/mgar/gar/v2-yann/lib/python/database.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/database.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/database.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -287,7 +287,7 @@
       logging.warning("Could not get file mtime: %s", e)
     d_mtime = time.gmtime(int(d_mtime_epoch))
     logging.debug("IsDatabaseUpToDate: f_mtime %s, d_time: %s", f_mtime, d_mtime)
-    # Rounding up to integer seconds.  There is a race condition: 
+    # Rounding up to integer seconds.  There is a race condition:
     # pkgadd finishes at 100.1
     # checkpkg reads /var/sadm/install/contents at 100.2
     # new pkgadd runs and finishes at 100.3

Modified: csw/mgar/gar/v2-yann/lib/python/generate_catalog_file.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/generate_catalog_file.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/generate_catalog_file.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -81,8 +81,9 @@
     if os.path.exists(out_file):
       raise Error("File %s already exists." % out_file)
     lines = []
-    for pkg_data in self.catalog:
-      lines.append(self.ComposeCatalogLine(pkg_data))
+    if self.catalog:  # the catalog might be None
+      for pkg_data in self.catalog:
+        lines.append(self.ComposeCatalogLine(pkg_data))
     with open(out_file, "w") as fd:
       fd.write("\n".join(lines))
 
@@ -91,9 +92,10 @@
     if os.path.exists(out_file):
       raise Error("File %s already exists." % out_file)
     lines = []
-    for pkg_data in self.catalog:
-      pkg_stats = self.pkgcache.GetPkgstats(pkg_data["md5_sum"])
-      lines.append(pkg_stats["pkginfo"]["NAME"])
+    if self.catalog:
+      for pkg_data in self.catalog:
+        pkg_stats = self.pkgcache.GetPkgstats(pkg_data["md5_sum"])
+        lines.append(pkg_stats["pkginfo"]["NAME"])
     with open(out_file, "w") as fd:
       fd.write("\n".join(lines))
 

Modified: csw/mgar/gar/v2-yann/lib/python/integrate_catalogs.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/integrate_catalogs.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/integrate_catalogs.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -23,28 +23,47 @@
 import rest
 import sys
 import urllib2
+import re
 
 
 CATALOG_MOD_TMPL = """#!/bin/bash
 # Catalog modification (not integration yet): $catrel_from -> $catrel_to
 # Generated by $prog
 
+
+if ! grep buildfarm ~/.netrc
+then
+  touch ~/.netrc
+  chmod 0600 ~/.netrc
+  echo >> ~/.netrc \
+    "machine buildfarm.opencsw.org login \${LOGNAME} password \$(cat /etc/opt/csw/releases/auth/\${LOGNAME})"
+fi
+
 set -x
 
-PKGDB=bin/pkgdb
+readonly CURL="curl --netrc"
+readonly REST_URL=http://buildfarm.opencsw.org/releases/
 
+function _add_to_cat {
+  \${CURL} -X PUT \${REST_URL}catalogs/$1/$2/$3/$4/
+}
+
+function _del_from_cat {
+  \${CURL} -X DELETE \${REST_URL}catalogs/$1/$2/$3/$4/
+}
+
 #for catalogname in $sorted($diffs_by_catalogname):
 #if "new_pkgs" in $diffs_by_catalogname[$catalogname]:
 function new_pkg_$catalogname {
 #for arch, osrel, new_pkg in $diffs_by_catalogname[$catalogname]["new_pkgs"]:
   # adding $new_pkg["basename"]
-  \${PKGDB} add-to-cat $osrel $arch $catrel_to $new_pkg["md5_sum"]
+  _add_to_cat $catrel_to $arch $osrel $new_pkg["md5_sum"]
 #end for
 }
 function undo_new_pkg_$catalogname {
 #for arch, osrel, new_pkg in $diffs_by_catalogname[$catalogname]["new_pkgs"]:
-  # adding $new_pkg["basename"]
-  \${PKGDB} del-from-cat $osrel $arch $catrel_to $new_pkg["md5_sum"]
+  # UNDO adding $new_pkg["basename"]
+  _del_from_cat $catrel_to $arch $osrel $new_pkg["md5_sum"]
 #end for
 }
 #end if
@@ -52,13 +71,13 @@
 function remove_pkg_$catalogname {
 #for arch, osrel, rem_pkg in $diffs_by_catalogname[$catalogname]["removed_pkgs"]:
   # removing $rem_pkg["basename"]
-  \${PKGDB} del-from-cat $osrel $arch $catrel_to $rem_pkg["md5_sum"]
+  _del_from_cat $catrel_to $arch $osrel $rem_pkg["md5_sum"]
 #end for
 }
 function undo_remove_pkg_$catalogname {
 #for arch, osrel, rem_pkg in $diffs_by_catalogname[$catalogname]["removed_pkgs"]:
-  # removing $rem_pkg["basename"]
-  \${PKGDB} add-to-cat $osrel $arch $catrel_to $rem_pkg["md5_sum"]
+  # UNDO removing $rem_pkg["basename"]
+  _add_to_cat $catrel_to $arch $osrel $rem_pkg["md5_sum"]
 #end for
 }
 #end if
@@ -75,15 +94,15 @@
   # WARNING: DOWNGRADE
 #end if
   # $catalogname $up_pkg_pair["direction"] from $up_pkg_pair["from"]["version"] to $up_pkg_pair["to"]["version"]
-  \${PKGDB} del-from-cat $osrel $arch $catrel_to $up_pkg_pair["from"]["md5_sum"]
-  \${PKGDB} add-to-cat $osrel $arch $catrel_to $up_pkg_pair["to"]["md5_sum"]
+  _del_from_cat $catrel_to $arch $osrel $up_pkg_pair["from"]["md5_sum"]
+  _add_to_cat $catrel_to $arch $osrel $up_pkg_pair["to"]["md5_sum"]
 #end for
 }
 function undo_upgrade_$catalogname {
 #for arch, osrel, up_pkg_pair in $diffs_by_catalogname[$catalogname]["updated_pkgs"]:
   # UNDO of $catalogname $up_pkg_pair["direction"] from $up_pkg_pair["from"]["version"] to $up_pkg_pair["to"]["version"]
-  \${PKGDB} del-from-cat $osrel $arch $catrel_to $up_pkg_pair["to"]["md5_sum"]
-  \${PKGDB} add-to-cat $osrel $arch $catrel_to $up_pkg_pair["from"]["md5_sum"]
+  _del_from_cat $catrel_to $arch $osrel $up_pkg_pair["to"]["md5_sum"]
+  _add_to_cat $catrel_to $arch $osrel $up_pkg_pair["from"]["md5_sum"]
 #end for
 }
 #end if
@@ -116,7 +135,6 @@
 #end for
 """
 
-
 class Error(Exception):
   """Generic error."""
 
@@ -202,10 +220,12 @@
       # By passing the catalogs (as arguments) in reverse order, we get
       # packages to be updated in new_pkgs, and so forth.
       for pkg in new_pkgs:
-        catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {})
+        catalogname_d = diffs_by_catalogname.setdefault(
+            (pkg["catalogname"]), {})
         catalogname_d.setdefault("new_pkgs", []).append((arch, osrel, pkg))
       for pkg in removed_pkgs:
-        catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {})
+        catalogname_d = diffs_by_catalogname.setdefault(
+            (pkg["catalogname"]), {})
         catalogname_d.setdefault("removed_pkgs", []).append((arch, osrel, pkg))
       for pkg_pair in updated_pkgs:
         update_decision_by_type = {
@@ -215,7 +235,8 @@
         if (update_decision_by_type[pkg_pair["type"]]
             and (pkg_pair["direction"] == "upgrade" or include_downgrades)):
           pkg = pkg_pair["from"]
-          catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {})
+          catalogname_d = diffs_by_catalogname.setdefault(
+              (pkg["catalogname"]), {})
           catalogname_d.setdefault("updated_pkgs", []).append((arch, osrel, pkg_pair))
   return diffs_by_catalogname
 

Modified: csw/mgar/gar/v2-yann/lib/python/opencsw_test.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/opencsw_test.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/opencsw_test.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -406,10 +406,16 @@
     self.assertEquals("stuf_with_some_dashes",
                       opencsw.PkgnameToCatName("STUFwith-some-dashes"))
 
-  def testPkgnameToCatName5(self):
+  def testPkgnameToCatNameDifferentiatesByDigit(self):
     self.assertNotEquals(opencsw.PkgnameToCatName("SUNWi4rf"),
                          opencsw.PkgnameToCatName("SUNWi7rf"))
 
+  def testPkgnameToCatNameDigit(self):
+    self.assertEquals("sunw_i8rf", opencsw.PkgnameToCatName("SUNWi8rf"))
+
+  def testPkgnameToCatNameTwoDigits(self):
+    self.assertEquals("sunw_i13rf", opencsw.PkgnameToCatName("SUNWi13rf"))
+
   def test_4(self):
     pkginfo_dict = opencsw.ParsePkginfo(TEST_PKGINFO.splitlines())
     expected = "sunw_bash-11.10.0,REV=2005.01.08.01.09-SunOS5.10-i386-SUNW.pkg"

Modified: csw/mgar/gar/v2-yann/lib/python/package.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/package.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/package.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -280,7 +280,7 @@
       basedir = pkginfo[basedir_id]
     else:
       basedir = ""
-    # The convention in checkpkg is to not include the leading slash in paths. 
+    # The convention in checkpkg is to not include the leading slash in paths.
     basedir = basedir.lstrip("/")
     return basedir
 

Modified: csw/mgar/gar/v2-yann/lib/python/pkgdb.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/pkgdb.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/pkgdb.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2.6
+#!/opt/csw/bin/python2.6
 # coding=utf-8
 #
 # $Id$
@@ -81,9 +81,10 @@
   "unstable",
   "dublin",
   "kiel",
+  "bratislava",
 ])
 CATALOGS_ALLOWED_TO_BE_IMPORTED = frozenset([
-  "current",
+  "unstable",
 ])
 
 
@@ -621,7 +622,7 @@
             os.unlink(existing_path)
           else:
             logging.debug("Not unlinking %s", existing_path)
-        logging.debug("Existing files: %s", len(existing_files))
+        logging.debug("Number of existing files: %s", len(existing_files))
         for pkg in pkgs:
           src_path = os.path.join(allpkgs_dir, pkg.basename)
           if not os.path.exists(src_path):

Modified: csw/mgar/gar/v2-yann/lib/python/rest.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/rest.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/rest.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -1,13 +1,16 @@
 #!/usr/bin/env python2.6
 
+import os
+from StringIO import StringIO
 import cjson
 import gdbm
 import logging
 import urllib2
+import pycurl
 
 DEFAULT_URL = "http://buildfarm.opencsw.org"
+RELEASES_APP = "/releases"
 
-
 class Error(Exception):
   """Generic error."""
 
@@ -16,12 +19,20 @@
   """Wrong arguments passed."""
 
 
+class RestCommunicationError(Error):
+  """An error during REST request processing."""
+
+
 class RestClient(object):
 
   PKGDB_APP = "/pkgdb/rest"
 
-  def __init__(self, rest_url=DEFAULT_URL):
+  def __init__(self, rest_url=DEFAULT_URL, username=None, password=None,
+      debug=False):
     self.rest_url = rest_url
+    self.username = username
+    self.password = password
+    self.debug = debug
 
   def GetPkgByMd5(self, md5_sum):
     url = self.rest_url + self.PKGDB_APP + "/srv4/%s/" % md5_sum
@@ -86,7 +97,108 @@
     data = urllib2.urlopen(url).read()
     return cjson.decode(data)
 
+  def Srv4ByCatalogAndPkgname(self, catrel, arch, osrel, pkgname):
+    """Returns a srv4 data structure or None if not found."""
+    url = self.rest_url + self.PKGDB_APP + (
+        "/catalogs/%s/%s/%s/pkgnames/%s/"
+        % (catrel, arch, osrel, pkgname))
+    logging.debug("Srv4ByCatalogAndPkgname(): GET %s", url)
+    # The server is no longer returning 404 when the package is absent.  If
+    # a HTTP error code is returned, we're letting the application fail.
+    data = urllib2.urlopen(url).read()
+    return cjson.decode(data)
 
+  def _SetAuth(self, c):
+    """Set basic HTTP auth options on given Curl object."""
+    if self.username:
+      logging.debug("Using basic AUTH for user %s", self.username)
+      c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_ANY)
+      c.setopt(pycurl.USERPWD, "%s:%s" % (self.username, self.password))
+    else:
+      logging.debug("User and password not set, not using HTTP AUTH")
+    return c
+
+  def RemoveSvr4FromCatalog(self, catrel, arch, osrel, md5_sum):
+    url = (
+        "%s%s/catalogs/%s/%s/%s/%s/"
+        % (self.rest_url,
+           RELEASES_APP,
+           catrel, arch, osrel,
+           md5_sum))
+    logging.debug("DELETE @ URL: %s %s", type(url), url)
+    c = pycurl.Curl()
+    d = StringIO()
+    h = StringIO()
+    c.setopt(pycurl.URL, str(url))
+    c.setopt(pycurl.CUSTOMREQUEST, "DELETE")
+    c.setopt(pycurl.WRITEFUNCTION, d.write)
+    c.setopt(pycurl.HEADERFUNCTION, h.write)
+    c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+    c = self._SetAuth(c)
+    if self.debug:
+      c.setopt(c.VERBOSE, 1)
+    c.perform()
+    http_code = c.getinfo(pycurl.HTTP_CODE)
+    logging.debug(
+        "DELETE curl getinfo: %s %s %s",
+        type(http_code),
+        http_code,
+        c.getinfo(pycurl.EFFECTIVE_URL))
+    c.close()
+    if not (http_code >= 200 and http_code <= 299):
+      raise RestCommunicationError(
+          "%s - HTTP code: %s, content: %s"
+          % (url, http_code, d.getvalue()))
+
+  def AddSvr4ToCatalog(self, catrel, arch, osrel, md5_sum):
+    url = (
+        "%s%s/catalogs/%s/%s/%s/%s/"
+        % (self.rest_url,
+           RELEASES_APP,
+           catrel,
+           arch,
+           osrel,
+           md5_sum))
+    logging.debug("URL: %s %s", type(url), url)
+    c = pycurl.Curl()
+    d = StringIO()
+    h = StringIO()
+    # Bogus data to upload
+    s = StringIO()
+    c.setopt(pycurl.URL, str(url))
+    c.setopt(pycurl.PUT, 1)
+    c.setopt(pycurl.UPLOAD, 1)
+    c.setopt(pycurl.INFILESIZE_LARGE, s.len)
+    c.setopt(pycurl.READFUNCTION, s.read)
+    c.setopt(pycurl.WRITEFUNCTION, d.write)
+    c.setopt(pycurl.HEADERFUNCTION, h.write)
+    c.setopt(pycurl.HTTPHEADER, ["Expect:"]) # Fixes the HTTP 417 error
+    c = self._SetAuth(c)
+    if self.debug:
+      c.setopt(c.VERBOSE, 1)
+    c.perform()
+    http_code = c.getinfo(pycurl.HTTP_CODE)
+    logging.debug(
+        "curl getinfo: %s %s %s",
+        type(http_code),
+        http_code,
+        c.getinfo(pycurl.EFFECTIVE_URL))
+    c.close()
+    # if self.debug:
+    #   logging.debug("*** Headers")
+    #   logging.debug(h.getvalue())
+    #   logging.debug("*** Data")
+    if http_code >= 400 and http_code <= 599:
+      if not self.debug:
+        # In debug mode, all headers are printed to screen, and we aren't
+        # interested in the response body.
+        logging.fatal("Response: %s %s", http_code, d.getvalue())
+      raise RestCommunicationError("%s - HTTP code: %s" % (url, http_code))
+    else:
+      logging.debug("Response: %s %s", http_code, d.getvalue())
+    return http_code
+
+
 class CachedPkgstats(object):
   """Class responsible for holding and caching package stats.
 
@@ -120,3 +232,15 @@
               "pkgname": pkgstats["basic_stats"]["pkgname"]}
       self.deps[md5] = cjson.encode(data)
       return data
+
+def GetUsernameAndPassword():
+  username = os.environ["LOGNAME"]
+  password = None
+  authfile = os.path.join('/etc/opt/csw/releases/auth', username)
+  try:
+    with open(authfile, 'r') as af:
+      password = af.read().strip()
+  except IOError, e:
+    logging.warning("Error reading %s: %s", authfile, e)
+    password = getpass.getpass("{0}'s pkg release password> ".format(username))
+  return username, password

Modified: csw/mgar/gar/v2-yann/lib/python/safe_remove_package.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/safe_remove_package.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/safe_remove_package.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -6,7 +6,8 @@
 
   - checks all the catalogs and gets the md5 sums for each catalog
   - checks for reverse dependencies; if there are any, stops
-  - when there are no rev deps, prints a csw-upload-pkg --remove call
+  - when there are no rev deps, makes a REST call to remove the package
+
 """
 
 import optparse
@@ -18,12 +19,34 @@
 import sys
 import os
 import cjson
-import subprocess
 
+USAGE = """%prog --os-releases=SunOS5.10,SunOS5.11 -c <catalogname>
 
+A practical usage example - let's say we have a list of packages to remove in
+a file named 'pkg-list.txt'. We'll also have a cache of packages already
+removed in packages_dropped_cache.txt. The following call will remove the
+listed packages:
+
+for p in $(cat pkg-list.txt)
+do
+	if ! ggrep "^$p\$" packages_dropped_cache.txt > /dev/null
+  then
+    ./safe_remove_package.py \\
+        --os-releases=SunOS5.10,SunOS5.11 \\
+        -c "$p"
+  fi
+done
+"""
+
+
+UNSTABLE = "unstable"
+
 class Error(Exception):
   """A generic error."""
 
+class DataError(Exception):
+  """Wrong data encountered."""
+
 class RevDeps(object):
 
   def __init__(self):
@@ -43,7 +66,6 @@
     catalog = self.rest_client.GetCatalog(*key)
     rev_deps = {}
     for pkg_simple in catalog:
-      # pprint.pprint(pkg_simple)
       md5 = pkg_simple["md5_sum"]
       # pkg = self.cp.GetPkgstats(md5)
       short_data = self.cp.GetDeps(md5)
@@ -68,14 +90,20 @@
 
 class PackageRemover(object):
 
+  def CachePackageIsGone(self, catalogname):
+    with open("packages_dropped_cache.txt", "ab") as fd:
+      fd.write("{0}\n".format(catalogname))
+
   def RemovePackage(self, catalogname, execute=False, os_releases=None):
     if not os_releases:
       os_releases = common_constants.OS_RELS
-    # Get md5 sums
-    rest_client = rest.RestClient()
+    username, password = rest.GetUsernameAndPassword()
+    rest_client = rest.RestClient(username=username, password=password)
     rd = RevDeps()
     rev_deps = {}
-    to_remove = {}
+    # md5 sums to remove
+    to_remove = []
+    found_anywhere = False
     for osrel in os_releases:
       if osrel not in common_constants.OS_RELS:
         logging.warning(
@@ -86,41 +114,52 @@
         logging.info("%s is an obsolete OS release. Skipping.", osrel)
         continue
       for arch in common_constants.PHYSICAL_ARCHITECTURES:
-        pkg_simple = rest_client.Srv4ByCatalogAndCatalogname("unstable", arch, osrel, catalogname)
+        pkg_simple = rest_client.Srv4ByCatalogAndCatalogname(UNSTABLE, arch, osrel, catalogname)
+        if not pkg_simple:
+          # Maybe we were given a pkgname instead of a catalogname? We can try
+          # that before failing.
+          pkg_simple = rest_client.Srv4ByCatalogAndPkgname(
+              UNSTABLE, arch, osrel, catalogname)
+          if not pkg_simple:
+            msg = "{0} was not in the unstable {1} {2} catalog."
+            logging.debug(msg.format(repr(catalogname), arch, osrel))
+            continue
+        if pkg_simple:
+          found_anywhere = True
         md5 = pkg_simple["md5_sum"]
         pkg = rd.cp.GetPkgstats(md5)
-        key = "unstable", arch, osrel
-        cat_rev_deps = rd.RevDeps("unstable", arch, osrel, md5)
+        key = UNSTABLE, arch, osrel
+        cat_rev_deps = rd.RevDeps(UNSTABLE, arch, osrel, md5)
         if cat_rev_deps:
           rev_deps[key] = cat_rev_deps
-        f = (
-            "/home/mirror/opencsw/unstable/%s/%s/%s"
-            % (arch, osrel.replace("SunOS", ""), pkg["basic_stats"]["pkg_basename"]))
-        files = to_remove.setdefault(osrel, [])
-        files.append(f)
+        to_remove.append((UNSTABLE, arch, osrel, md5))
+    if not found_anywhere:
+      self.CachePackageIsGone(catalogname)
     if rev_deps:
-      print "Reverse dependencies found. Bailing out."
-      pprint.pprint(rev_deps)
+      print "Not removing, rev-deps present: ",
+      print pkg_simple["catalogname"], ":", " ; ".join(
+          ["%s %s %s %s"
+            % (x[0], x[1], x[2], ",".join(y[1] for y in rev_deps[x]))
+            for x in rev_deps])
     else:
-      for osrel in to_remove:
-        args = ["csw-upload-pkg", "--remove", "--os-release",
-            osrel] + to_remove[osrel]
-        print " ".join(args)
+      for catrel, arch, osrel, md5_sum in to_remove:
+        print "# [%s]" % pkg_simple["catalogname"], catrel, arch, osrel, md5_sum
         if execute:
-          subprocess.call(args)
+          rest_client.RemoveSvr4FromCatalog(catrel, arch, osrel, md5_sum)
+      if found_anywhere:
+        self.CachePackageIsGone(catalogname)
 
 
-
 def main():
-  parser = optparse.OptionParser()
+  parser = optparse.OptionParser(USAGE)
   parser.add_option("-c", "--catalogname", dest="catalogname")
   parser.add_option("--os-releases", dest="os_releases",
                     help=("Comma separated OS releases, e.g. "
                           "SunOS5.9,SunOS5.10"))
   parser.add_option("--debug", dest="debug", action="store_true")
-  parser.add_option("--execute", dest="execute", action="store_true",
-                    help=("Don't just display, but execute and remove the "
-                          "packages."))
+  parser.add_option("--dry-run", dest="dry_run",
+                    default=False, action="store_true",
+                    help=("Don't apply changes (no REST calls)."))
   options, args = parser.parse_args()
   debug_level = logging.INFO
   if options.debug:
@@ -130,7 +169,7 @@
   if options.os_releases:
     os_releases = options.os_releases.split(",")
   pr = PackageRemover()
-  pr.RemovePackage(options.catalogname, options.execute, os_releases)
+  pr.RemovePackage(options.catalogname, not options.dry_run, os_releases)
 
 
 if __name__ == '__main__':

Modified: csw/mgar/gar/v2-yann/lib/python/system_pkgmap.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/python/system_pkgmap.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/python/system_pkgmap.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -209,7 +209,7 @@
       pkgnames.extend(parts[6:])
     elif file_type == '?':
       # Does not follow the specfication.  A specimen:
-      # /opt/csw/gcc3/lib/gcc/sparc-sun-solaris2.8/3.4.6/include 
+      # /opt/csw/gcc3/lib/gcc/sparc-sun-solaris2.8/3.4.6/include
       # ? none CSWgcc3g77 CSWgcc3core
       logging.warning("File type of %s is '?', assuming it's a directory.",
                       parts[0])
@@ -301,7 +301,7 @@
   def _GetArch(self):
     return self._GetUname("-p")
 
-  def GetDataStructure(self, srv4_pkgcontent_stream, srv4_pkginfo_stream, 
+  def GetDataStructure(self, srv4_pkgcontent_stream, srv4_pkginfo_stream,
                        ips_pkgcontent_stream, ips_pkginfo_stream,
                        osrel, arch, show_progress=False):
     """Gets the data structure to be pickled.
@@ -316,7 +316,7 @@
     }
     if ips_pkginfo_stream and ips_pkgcontent_stream:
       data["contents"].extend(self._ParsePkgContents(ips_pkgcontent_stream, self._ParseIpsPkgContentsLine, show_progress))
-      data["pkginfo"].update(self._ParsePkgInfos(ips_pkgcontent_stream, self._ParseIpsPkgListLine, show_progress))
+      data["pkginfo"].update(self._ParsePkgInfos(ips_pkginfo_stream, self._ParseIpsPkgListLine, show_progress))
 
     return data
 
@@ -328,12 +328,12 @@
     if self.osrel in common_constants.IPS_OS_RELS:
       ips_pkgcontents_stream = self._GetIpsPkgcontentStream()
       ips_pkginfos_stream = self._GetIpsPkginfosStream()
-    else: 
+    else:
       ips_pkgcontents_stream = None
       ips_pkginfos_stream = None
-      
-    data = self.GetDataStructure(srv4_pkgcontents_stream, srv4_pkginfos_stream, 
-                                 ips_pkgcontents_stream, ips_pkginfos_stream, 
+
+    data = self.GetDataStructure(srv4_pkgcontents_stream, srv4_pkginfos_stream,
+                                 ips_pkgcontents_stream, ips_pkginfos_stream,
                                  self.osrel, self.arch, show_progress)
     return data
 
@@ -367,7 +367,7 @@
       stdout, stderr = pkginfo_proc.communicate()
       ret = pkginfo_proc.wait()
       pkginfo_stream = stdout.splitlines()
-    
+
     return pkginfo_stream
 
   def _GetIpsPkginfosStream(self):
@@ -437,6 +437,8 @@
     for sqo_srv4 in res:
       for srv4_in_cat in sqo_srv4.in_catalogs:
         srv4_in_cat.destroySelf()
+      sqo_srv4.RemoveAllCswFiles()
+      sqo_srv4.destroySelf()
 
   def ImportFromFile(self, in_fd, show_progress=False):
     logging.debug("Unpickling data")
@@ -506,6 +508,13 @@
       self.fake_srv4_cache[key] = sqo_srv4
     return self.fake_srv4_cache[key]
 
+  def _GetPbar(self, show_progress):
+    if show_progress:
+      pbar = progressbar.ProgressBar()
+    else:
+      pbar = mute_progressbar.MuteProgressBar()
+    return pbar
+
   def _ImportFiles(self, data, include_prefixes=None, show_progress=False):
     logging.debug("_ImportFiles()")
     osrel = data["osrel"]
@@ -521,10 +530,7 @@
       progressbar_divisor = 1
     update_period = 1L
     count = itertools.count()
-    if show_progress:
-      pbar = progressbar.ProgressBar()
-    else:
-      pbar = mute_progressbar.MuteProgressBar()
+    pbar = self._GetPbar(show_progress)
     pbar.maxval = len(contents) / progressbar_divisor
     pbar.start()
     cleaned_pkgs = set()
@@ -574,12 +580,19 @@
             srv4_file=sqo_srv4)
         srv4_files_to_catalog.add(sqo_srv4)
     pbar.finish()
-    logging.debug(
-        "Registering all the fake srv4 files in all catalogs.")
+    logging.info(
+        "Registering the fake svr4 files (of system packages) "
+        "in all catalogs.")
+    count = itertools.count()
+    pbar = self._GetPbar(show_progress)
+    pbar.maxval = len(srv4_files_to_catalog)
+    pbar.start()
     for sqo_srv4 in srv4_files_to_catalog:
       for sqo_catrel in m.CatalogRelease.select():
         catalog.AddSrv4ToCatalog(
             sqo_srv4, osrel, arch, sqo_catrel.name)
+      pbar.update(count.next())
+    pbar.finish()
 
   def ComposeFakeSrv4Md5(self, pkgname, osrel, arch):
     """Returns a fake md5 sum of a fake srv4 package.

Modified: csw/mgar/gar/v2-yann/lib/web/pkgdb_web.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/web/pkgdb_web.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/web/pkgdb_web.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -38,6 +38,7 @@
   r'/catalognames/([^/]+)/', 'Catalogname',
 )
 urls_rest = (
+  r'/rest/catalogs/', 'RestCatalogList',
   r'/rest/catalogs/([^/]+)/(sparc|i386)/(SunOS[^/]+)/', 'Catalogs',
   r'/rest/catalogs/([^/]+)/(sparc|i386)/(SunOS[^/]+)/pkgname-by-filename',
       'PkgnameByFilename',
@@ -439,7 +440,22 @@
     except sqlobject.dberrors.OperationalError, e:
       raise web.internalerror(e)
 
+class RestCatalogList(object):
+  def GET(self):
+    archs = models.Architecture.select()
+    osrels = models.OsRelease.select()
+    catrels = models.CatalogRelease.select()
+    catalogs = []
+    for catrel in catrels:
+      for arch in archs:
+        if arch.name in ('all'): continue
+        for osrel in osrels:
+          if osrel.full_name == 'unspecified': continue
+          key = [osrel.short_name, arch.name, catrel.name]
+          catalogs.append(key)
+    return cjson.encode(catalogs)
 
+
 web.webapi.internalerror = web.debugerror
 
 

Modified: csw/mgar/gar/v2-yann/lib/web/releases_web.py
===================================================================
--- csw/mgar/gar/v2-yann/lib/web/releases_web.py	2012-11-27 17:39:49 UTC (rev 19753)
+++ csw/mgar/gar/v2-yann/lib/web/releases_web.py	2012-11-27 21:04:23 UTC (rev 19754)
@@ -34,6 +34,7 @@
 
 OPENCSW_ROOT = "/home/mirror/opencsw-official"
 ALLPKGS_DIR = os.path.join(OPENCSW_ROOT, "allpkgs")
+CAN_UPLOAD_TO_CATALOGS = frozenset(["unstable", "kiel", "bratislava"])
 
 def ConnectToDatabase():
   configuration.SetUpSqlobjectConnection()
@@ -146,7 +147,7 @@
     stuck and I don't know why.
     """
     configuration.SetUpSqlobjectConnection()
-    if catrel_name != 'unstable':
+    if catrel_name not in CAN_UPLOAD_TO_CATALOGS:
       # Updates via web are allowed only for the unstable catalog.
       # We should return an error message instead.
       raise web.notfound()

This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.



More information about the devel mailing list