[csw-devel] SF.net SVN: gar:[6894] csw/mgar/gar

dmichelsen at users.sourceforge.net dmichelsen at users.sourceforge.net
Sun Oct 18 19:29:04 CEST 2009


Revision: 6894
          http://gar.svn.sourceforge.net/gar/?rev=6894&view=rev
Author:   dmichelsen
Date:     2009-10-18 17:29:04 +0000 (Sun, 18 Oct 2009)

Log Message:
-----------
mGAR v2: Fork collapsed modulations

Added Paths:
-----------
    csw/mgar/gar/v2-collapsed-modulations/
    csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk
    csw/mgar/gar/v2-collapsed-modulations/gar.mk

Removed Paths:
-------------
    csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk
    csw/mgar/gar/v2-collapsed-modulations/gar.mk

Deleted: csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2/gar.lib.mk	2009-10-09 11:59:38 UTC (rev 6808)
+++ csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk	2009-10-18 17:29:04 UTC (rev 6894)
@@ -1,753 +0,0 @@
-# vim: ft=make ts=4 sw=4 noet
-#
-# $Id$
-#
-# Copyright (C) 2001 Nick Moffitt
-# 
-# Redistribution and/or use, with or without modification, is
-# permitted.  This software is without warranty of any kind.  The
-# author(s) shall not be liable in the event that use of the
-# software causes damage.
-#
-
-# cookies go here, so we have to be able to find them for
-# dependency checking.
-VPATH += $(COOKIEDIR)
-
-# convenience variable to make the cookie.
-MAKECOOKIE = mkdir -p $(COOKIEDIR)/$(@D) && date >> $(COOKIEDIR)/$@
-
-URLSTRIP = $(subst ://,//,$(1))
-
-# if you need to proxy git:// connections, set GIT_USE_PROXY.  There is a
-# default proxy script that works with the (squid?) proxy at the BO buildfarm.
-# override GIT_PROXY_SCRIPT to something else if you need to.
-GIT_MAYBEPROXY = $(if $(GIT_USE_PROXY),GIT_PROXY_COMMAND=$(GIT_PROXY_SCRIPT))
-GIT_TREEISH = $(if $(GIT_TREEISH_$(1)),$(GIT_TREEISH_$(1)),HEAD)
-
-#################### FETCH RULES ####################
-
-URLS = $(call URLSTRIP,$(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(PATCHFILES) $(foreach M,$(MODULATIONS),$(PATCHFILES_$M)))))
-
-# if the caller has defined _postinstall, etc targets for a package, add
-# these 'dynamic script' targets to our fetch list
-URLS += $(foreach DYN,$(DYNSCRIPTS),dynscr//$(DYN))
-
-ifdef GIT_REPOS
-URLS += $(foreach R,$(GIT_REPOS),gitrepo//$(call GITPROJ,$(R)) $(subst http,git-http,$(call URLSTRIP,$(R))))
-endif
-
-# Download the file if and only if it doesn't have a preexisting
-# checksum file.  Loop through available URLs and stop when you
-# get one that doesn't return an error code.
-$(DOWNLOADDIR)/%:  
-	@if test -f $(COOKIEDIR)/checksum-$*; then : ; else \
-		echo " ==> Grabbing $@"; \
-		for i in $(filter %/$*,$(URLS)); do  \
-			echo " 	==> Trying $$i"; \
-			$(MAKE) -s $$i || continue; \
-			mv $(PARTIALDIR)/$* $@; \
-			break; \
-		done; \
-		if test -r $@ ; then : ; else \
-			echo '(!!!) Failed to download $@!' 1>&2; \
-			false; \
-		fi; \
-	fi
-
-gitrepo//%:
-	@( if [ -d $(GARCHIVEDIR)/$(call GITPROJ,$*) ]; then \
-		( cd $(GARCHIVEDIR)/$(call GITPROJ,$*); \
-			$(GIT_MAYBEPROXY) git --bare fetch ) && \
-		gln -s $(GARCHIVEDIR)/$(call GITPROJ,$*)/ $(PARTIALDIR)/$(call GITPROJ,$*); \
-	   else \
-		false; \
-	  fi )
-
-# the git remote add commands are so that we can later do a fetch
-# to update the code.
-# we possibly proxy the git:// references depending on GIT_USE_PROXY
-git-http//%:
-	@git clone --bare http://$* $(PARTIALDIR)/$(call GITPROJ,$*)
-	@( cd $(PARTIALDIR)/$(call GITPROJ,$*); \
-		git remote add origin http://$*; \
-		git config remote.origin.fetch $(if $(GIT_REFS_$(call GITPROJ,$*)),$(GIT_REFS_$(call GITPROJ,$*)),$(GIT_DEFAULT_TRACK)); )
-
-git//%:
-	@$(GIT_MAYBEPROXY) git clone --bare git://$* $(PARTIALDIR)/$(call GITPROJ,$*)
-	@( cd $(PARTIALDIR)/$(call GITPROJ,$*); \
-		git remote add origin git://$*; \
-		git config remote.origin.fetch $(if $(GIT_REFS_$(call GITPROJ,$*)),$(GIT_REFS_$(call GITPROJ,$*)),$(GIT_DEFAULT_TRACK)); )
-
-# create ADMSCRIPTS 'on the fly' from variables defined by the caller
-# This version is private and should only be called from the non-private
-# version directly below
-_dynscr//%:
-	$($(subst .,_,$*))
-
-dynscr//%:
-	$(MAKE) --no-print-directory -n _$@ > $(PARTIALDIR)/$*
-
-# download an http URL (colons omitted)
-http//%: 
-	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) http://$*
-
-https//%: 
-	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) https://$*
-
-# download an ftp URL (colons omitted)
-#ftp//%: 
-#	@wget -T 30 -c --passive-ftp -P $(PARTIALDIR) ftp://$*
-ftp//%: 
-	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) ftp://$*
-
-# link to a local copy of the file
-# (absolute path)
-file///%: 
-	@if test -f /$*; then \
-		gln -sf /$* $(PARTIALDIR)/$(notdir $*); \
-	else \
-		false; \
-	fi
-
-# link to a local copy of the file
-# (relative path)
-file//%: 
-	@if test -f $*; then \
-		gln -sf "$(CURDIR)/$*" $(PARTIALDIR)/$(notdir $*); \
-	else \
-		false; \
-	fi
-
-# Using Jeff Waugh's rsync rule.
-# DOES NOT PRESERVE SYMLINKS!
-rsync//%: 
-	@rsync -azvLP rsync://$* $(PARTIALDIR)/
-
-# Using Jeff Waugh's scp rule
-scp//%:
-	@scp -C $* $(PARTIALDIR)/
-
-# Fetch a SVN repo via http
-svn-http//%:
-	@svn co $(SVNHTTP_CO_ARGS) http://$* $(PARTIALDIR)/$(notdir $*)
-
-svn-https//%:
-	@svn co $(SVNHTTP_CO_ARGS) https://$* $(PARTIALDIR)/$(notdir $*)
-
-#################### CHECKSUM RULES ####################
-
-# check a given file's checksum against $(CHECKSUM_FILE) and
-# error out if it mentions the file without an "OK".
-# The removal of the download prefix is for legacy checksums. For newstyle
-# checksums without path this is not necessary.
-checksum-%: $(CHECKSUM_FILE) 
-	@echo " ==> Running checksum on $*"
-	@if gegrep -- '[ /]$*$$' $(CHECKSUM_FILE); then \
-		if cat $(CHECKSUM_FILE) | sed -e 's!download/!!' | (cd $(DOWNLOADDIR); LC_ALL="C" LANG="C" gmd5sum -c 2>&1) | \
-			ggrep -- '$*' | ggrep -v ':[ ]\+OK'; then \
-			echo '(!!!) $* failed checksum test!' 1>&2; \
-			false; \
-		else \
-			echo 'file $* passes checksum test!'; \
-			$(MAKECOOKIE); \
-		fi \
-	else \
-		echo '(!!!) $* not in $(CHECKSUM_FILE) file!' 1>&2; \
-		false; \
-	fi
-
-#################### CHECKNEW RULES ####################
-
-UPSTREAM_MASTER_SITES ?= $(MASTER_SITES)
-UW_ARGS = $(addprefix -u ,$(UPSTREAM_MASTER_SITES))
-SF_ARGS = $(addprefix -s ,$(UPSTREAM_USE_SF))
-
-define files2check
-$(if $(UFILES_REGEX),$(shell http_proxy=$(http_proxy) ftp_proxy=$(ftp_proxy) $(GARBIN)/upstream_watch $(UW_ARGS) $(SF_ARGS) $(addsuffix ',$(addprefix ',$(UFILES_REGEX)))))
-endef
-
-check-upstream-and-mail: FILES2CHECK = $(call files2check)
-check-upstream-and-mail:
-	@if [ -n '$(FILES2CHECK)' ]; then \
-		NEW_FILES=""; \
-		PACKAGE_UP_TO_DATE=0; \
-		for FILE in $(FILES2CHECK) ""; do \
-			[ -n "$$FILE" ] || continue; \
-			if test -f $(COOKIEDIR)/checknew-$$FILE ; then \
-				PACKAGE_UP_TO_DATE=1; \
-			else \
-				if echo $(DISTFILES) | grep -w $$FILE >/dev/null; then \
-					PACKAGE_UP_TO_DATE=1; \
-					echo "$(GARNAME) : Package is up-to-date. Current version is $$FILE" ; \
-				else \
-					NEW_FILES="$$FILE $$NEW_FILES"; \
-				fi; \
-			fi; \
-			$(MAKE) checknew-$$FILE >/dev/null; \
-		done; \
-		if test -z "$$NEW_FILES" ; then \
-			if [ ! -n '$(UFILES_REGEX)' ]; then \
-				echo "$(GARNAME) : Warning UFILES_REGEX is not set : $(UFILES_REGEX)" ; \
-#				{ echo ""; \
-#				  echo "Hello dear $(GARNAME) maintainer,"; \
-#				  echo ""; \
-#				  echo "The upstream notification job has detected that $(GARNAME) is not configured for automatic upstream file update detection."; \
-#				  echo ""; \
-#				  echo "Please consider updating your package. Documentation is available from this link : http://www.opencsw.org" ; \
-#				  echo ""; \
-#				  echo "--"; \
-#				  echo "Kindest regards"; \
-#				  echo "upstream notification job"; } | $(GARBIN)/mail2maintainer -s '[svn] $(GARNAME) upstream update notification' $(GARNAME); \
-			else \
-				if [ "$$PACKAGE_UP_TO_DATE" -eq "0" ]; then \
-					echo "$(GARNAME) : Warning no files to check ! $(FILES2CHECK)" ; \
-					echo "$(GARNAME) :     UPSTREAM_MASTER_SITES is $(UPSTREAM_MASTER_SITES)" ; \
-					echo "$(GARNAME) :     DISTNAME is $(DISTNAME)" ; \
-					echo "$(GARNAME) :     UFILES_REGEX is : $(UFILES_REGEX)" ; \
-					echo "$(GARNAME) : Please check configuration" ; \
-				fi; \
-			fi; \
-		else \
-			echo "$(GARNAME) : new upstream files available: $$NEW_FILES"; \
-			{	echo ""; \
-				echo "Hello dear $(GARNAME) maintainer,"; \
-				echo ""; \
-				echo "The upstream notification job has detected the availability of new files for $(GARNAME)."; \
-				echo ""; \
-				echo "The following upstream file(s):"; \
-				echo "    $$NEW_FILES"; \
-				echo ""; \
-				echo "is/are available at the following url(s):"; \
-				echo "    $(UPSTREAM_MASTER_SITES)"; \
-				echo ""; \
-				echo "Please consider updating your package." ; \
-				echo ""; \
-				echo "--"; \
-				echo "Kindest regards"; \
-				echo "upstream notification job"; } | $(GARBIN)/mail2maintainer -s '[svn] $(GARNAME) upstream update notification' $(GARNAME); \
-		fi; \
-	fi
-		
-check-upstream: FILES2CHECK = $(call files2check)
-check-upstream: 
-	@if [ -n '$(FILES2CHECK)' ]; then \
-		NEW_FILES=""; \
-		PACKAGE_UP_TO_DATE=0; \
-		for FILE in $(FILES2CHECK) ""; do \
-			[ -n "$$FILE" ] || continue; \
-			if test -f $(COOKIEDIR)/checknew-$$FILE ; then \
-				PACKAGE_UP_TO_DATE=1; \
-			else \
-				if echo $(DISTFILES) | grep -w $$FILE >/dev/null; then \
-					PACKAGE_UP_TO_DATE=1; \
-					echo "$(GARNAME) : Package is up-to-date. Current version is $$FILE" ; \
-				else \
-					NEW_FILES="$$FILE $$NEW_FILES"; \
-				fi; \
-			fi; \
-			$(MAKE) checknew-$$FILE >/dev/null; \
-		done; \
-		if test -z "$$NEW_FILES" ; then \
-			if [ ! -n '$(UFILES_REGEX)' ]; then \
-				echo "$(GARNAME) : Warning UFILES_REGEX is not set : $(UFILES_REGEX)" ; \
-			else \
-				if [ "$$PACKAGE_UP_TO_DATE" -eq "0" ]; then \
-					echo "$(GARNAME) : Warning no files to check ! $(FILES2CHECK)" ; \
-					echo "$(GARNAME) :     UPSTREAM_MASTER_SITES is $(UPSTREAM_MASTER_SITES)" ; \
-					echo "$(GARNAME) :     DISTNAME is $(DISTNAME)" ; \
-					echo "$(GARNAME) :     UFILES_REGEX is : $(UFILES_REGEX)" ; \
-					echo "$(GARNAME) : Please check configuration" ; \
-				fi; \
-			fi; \
-		else \
-			echo "$(GARNAME) : new upstream files available: $$NEW_FILES"; \
-		fi; \
-	fi
-	
-checknew-%:
-	@$(MAKECOOKIE)
-
-
-#################### GARCHIVE RULES ####################
-
-# while we're here, let's just handle how to back up our
-# checksummed files
-
-$(GARCHIVEDIR)/%: $(GARCHIVEDIR)
-	@if [ -h $(DOWNLOADDIR)/$* ]; then :; else \
-		gcp -Lr $(DOWNLOADDIR)/$* $@; \
-	fi
-
-
-#################### EXTRACT RULES ####################
-
-TAR_ARGS = --no-same-owner
-
-# rule to extract uncompressed tarballs
-tar-extract-%:
-	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
-	@gtar $(TAR_ARGS) -xf $(DOWNLOADDIR)/$* -C $(EXTRACTDIR)
-	@$(MAKECOOKIE)
-
-# rule to extract files with tar xzf
-tar-gz-extract-%:
-	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
-	@gzip -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
-	@$(MAKECOOKIE)
-
-# rule to extract files with tar and bzip
-tar-bz-extract-%:
-	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
-	@bzip2 -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
-	@$(MAKECOOKIE)
-
-# extract compressed single files
-bz-extract-%:
-	@echo " ==> Decompressing $(DOWNLOADDIR)/$*"
-	@cp $(DOWNLOADDIR)/$* $(WORKDIR)/
-	@bzip2 -d $(WORKDIR)/$*
-	@$(MAKECOOKIE)
-
-gz-extract-%:
-	@echo " ==> Decompressing $(DOWNLOADDIR)/$*"
-	@cp $(DOWNLOADDIR)/$* $(WORKDIR)/
-	@gzip -d $(WORKDIR)/$*
-	@$(MAKECOOKIE)
-
-# extra dependency rule for git repos, that will allow the user
-# to supply an alternate target at their discretion
-git-extract-%:
-	@echo " ===> Extracting Git Repo $(DOWNLOADDIR)/$* (Treeish: $(call GIT_TREEISH,$*))"
-	git --bare archive --prefix=$(GARNAME)-$(GARVERSION)/ --remote=file://$(abspath $(DOWNLOADDIR))/$*/ $(call GIT_TREEISH,$*) | gtar -xf - -C $(EXTRACTDIR)
-	@$(MAKECOOKIE)
-
-# rule to extract files with unzip
-zip-extract-%:
-	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
-	@unzip $(DOWNLOADDIR)/$* -d $(EXTRACTDIR)
-	@$(MAKECOOKIE)
-
-# this is a null extract rule for files which are constant and
-# unchanged (not archives)
-cp-extract-%:
-	@echo " ==> Copying $(DOWNLOADDIR)/$*"
-	@cp -rp $(DOWNLOADDIR)/$* $(WORKDIR)/
-	@$(MAKECOOKIE)
-
-#gets the meat of a .deb into $(WORKSRC)
-deb-bin-extract-%:
-	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
-	@ar x $(DOWNLOADDIR)/$*
-	@rm debian-binary && \
-		mv *.tar.gz $(DOWNLOADDIR) && \
-			mkdir $(WORKSRC) && \
-				gtar $(TAR_ARGS) -xvz -C $(WORKSRC) \
-					-f $(DOWNLOADDIR)/data.tar.gz
-	@$(MAKECOOKIE)
-
-### EXTRACT FILE TYPE MAPPINGS ###
-# These rules specify which of the above extract action rules to use for a
-# given file extension.  Often support for a given extract type can be handled
-# by simply adding a rule here.
-
-extract-archive-%.tar: tar-extract-%.tar
-	@$(MAKECOOKIE)
-
-extract-archive-%.tar.gz: tar-gz-extract-%.tar.gz
-	@$(MAKECOOKIE)
-
-extract-archive-%.tar.Z: tar-gz-extract-%.tar.Z
-	@$(MAKECOOKIE)
-
-extract-archive-%.tgz: tar-gz-extract-%.tgz
-	@$(MAKECOOKIE)
-
-extract-archive-%.taz: tar-gz-extract-%.taz
-	@$(MAKECOOKIE)
-
-extract-archive-%.tar.bz: tar-bz-extract-%.tar.bz
-	@$(MAKECOOKIE)
-
-extract-archive-%.tar.bz2: tar-bz-extract-%.tar.bz2
-	@$(MAKECOOKIE)
-
-extract-archive-%.tbz: tar-bz-extract-%.tbz
-	@$(MAKECOOKIE)
-
-extract-archive-%.zip: zip-extract-%.zip
-	@$(MAKECOOKIE)
-
-extract-archive-%.ZIP: zip-extract-%.ZIP
-	@$(MAKECOOKIE)
-
-extract-archive-%.deb: deb-bin-extract-%.deb
-	@$(MAKECOOKIE)
-
-extract-archive-%.bz2: bz-extract-%.bz2
-	@$(MAKECOOKIE)
-
-extract-archive-%.gz: gz-extract-%.gz
-	@$(MAKECOOKIE)
-
-extract-archive-%.git: git-extract-%.git
-	@$(MAKECOOKIE)
-
-# anything we don't know about, we just assume is already
-# uncompressed and unarchived in plain format
-extract-archive-%: cp-extract-%
-	@$(MAKECOOKIE)
-
-#################### PATCH RULES ####################
-
-PATCHDIR ?= $(WORKSRC)
-PATCHDIRLEVEL ?= 1
-PATCHDIRFUZZ ?= 2
-GARPATCH = gpatch -d$(PATCHDIR) -p$(PATCHDIRLEVEL) -F$(PATCHDIRFUZZ)
-BASEWORKSRC = $(shell basename $(WORKSRC))
-
-# apply bzipped patches
-bz-patch-%:
-	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
-	@bzip2 -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
-	@$(MAKECOOKIE)
-
-# apply gzipped patches
-gz-patch-%:
-	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
-	@gzip -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
-	@$(MAKECOOKIE)
-
-# apply normal patches
-normal-patch-%:
-	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
-	$(GARPATCH) < $(DOWNLOADDIR)/$*
-	@$(MAKECOOKIE)
-
-# This is used by makepatch
-%/gar-base.diff:
-	@echo " ==> Creating patch $@"
-	@EXTRACTDIR=$(SCRATCHDIR) COOKIEDIR=$(SCRATCHDIR)-$(COOKIEDIR) $(MAKE) extract
-	@PATCHDIR=$(SCRATCHDIR)/$(BASEWORKSRC) COOKIEDIR=$(SCRATCHDIR)-$(COOKIEDIR) $(MAKE) patch
-	@mv $(SCRATCHDIR)/$(BASEWORKSRC) $(WORKSRC_FIRSTMOD).orig
-	@( cd $(WORKDIR_FIRSTMOD); \
-		if gdiff --speed-large-files --minimal -Nru $(BASEWORKSRC).orig $(BASEWORKSRC) > gar-base.diff; then :; else \
-			cd $(CURDIR); \
-			mv -f $(WORKDIR_FIRSTMOD)/gar-base.diff $@; \
-		fi )
-
-### PATCH FILE TYPE MAPPINGS ###
-# These rules specify which of the above patch action rules to use for a given
-# file extension.  Often support for a given patch format can be handled by
-# simply adding a rule here.
-
-patch-extract-%.bz: bz-patch-%.bz
-	@$(MAKECOOKIE)
-
-patch-extract-%.bz2: bz-patch-%.bz2
-	@$(MAKECOOKIE)
-
-patch-extract-%.gz: gz-patch-%.gz
-	@$(MAKECOOKIE)
-
-patch-extract-%.Z: gz-patch-%.Z
-	@$(MAKECOOKIE)
-
-patch-extract-%.diff: normal-patch-%.diff
-	@$(MAKECOOKIE)
-
-patch-extract-%.patch: normal-patch-%.patch
-	@$(MAKECOOKIE)
-
-patch-extract-%: normal-patch-%
-	@$(MAKECOOKIE)
-
-#################### CONFIGURE RULES ####################
-
-TMP_DIRPATHS = --prefix=$(prefix) --exec_prefix=$(exec_prefix) --bindir=$(bindir) --sbindir=$(sbindir) --libexecdir=$(libexecdir) --datadir=$(datadir) --sysconfdir=$(sysconfdir) --sharedstatedir=$(sharedstatedir) --localstatedir=$(localstatedir) --libdir=$(libdir) --infodir=$(infodir) --lispdir=$(lispdir) --includedir=$(includedir) --mandir=$(mandir)
-
-NODIRPATHS += --lispdir
-
-DIRPATHS = $(filter-out $(addsuffix %,$(NODIRPATHS)), $(TMP_DIRPATHS))
-
-# configure a package that has an autoconf-style configure
-# script.
-configure-%/configure:
-	@echo " ==> Running configure in $*"
-	cd $* && $(CONFIGURE_ENV) ./configure $(CONFIGURE_ARGS)
-	@$(MAKECOOKIE)
-
-configure-%/autogen.sh:
-	@echo " ==> Running autogen.sh in $*"
-	@cd $* && $(CONFIGURE_ENV) ./autogen.sh $(CONFIGURE_ARGS)
-	@$(MAKECOOKIE)
-
-# configure a package that uses imake
-# FIXME: untested and likely not the right way to handle the
-# arguments
-configure-%/Imakefile: 
-	@echo " ==> Running xmkmf in $*"
-	@cd $* && $(CONFIGURE_ENV) xmkmf $(CONFIGURE_ARGS)
-	@$(MAKECOOKIE)
-
-configure-%/setup.rb:
-	@echo " ==> Running setup.rb config in $*"
-	@( cd $* ; $(CONFIGURE_ENV) ruby ./setup.rb config $(CONFIGURE_ARGS) )
-	@$(MAKECOOKIE)
-
-#################### BUILD RULES ####################
-
-# build from a standard gnu-style makefile's default rule.
-build-%/Makefile:
-	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
-	@$(MAKECOOKIE)
-
-build-%/makefile:
-	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
-	@$(MAKECOOKIE)
-
-build-%/GNUmakefile:
-	@echo " ==> Running make in $*"
-	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
-	@$(MAKECOOKIE)
-
-build-%/Jamfile:
-	@echo " ==> Running bjam in $*"
-	@( cd $* ; $(BUILD_ENV) bjam $(JAMFLAGS) $(BUILD_ARGS) )
-	@$(MAKECOOKIE)
-
-# Ruby makefiles
-build-%/Rakefile:
-	@echo " ==> Running rake in $*"
-	@( cd $* ; $(BUILD_ENV) rake $(RAKEFLAGS) $(BUILD_ARGS) )
-	@$(MAKECOOKIE)
-
-build-%/rakefile:
-	@echo " ==> Running rake in $*"
-	@( cd $* ; $(BUILD_ENV) rake $(RAKEFLAGS) $(BUILD_ARGS) )
-	@$(MAKECOOKIE)
-
-build-%/setup.rb:
-	@echo " ==> Running setup.rb setup in $*"
-	@( cd $* ; $(BUILD_ENV) ruby ./setup.rb setup $(BUILD_ARGS) )
-	@$(MAKECOOKIE)
-
-# This can be: build, build_py, build_ext, build_clib, build_scripts
-# See setup.py --help-commands for details
-PYBUILD_CMD ?= build
-build-%/setup.py:
-	@echo " ==> Running setup.py $(PYBUILD_TYPE) in $*"
-	@( cd $* ; $(BUILD_ENV) python ./setup.py $(PYBUILD_CMD) $(BUILD_ARGS) )
-	@$(MAKECOOKIE)
-
-#################### TEST RULES ####################
-
-TEST_TARGET ?= test
-
-# Run tests on pre-built sources
-test-%/Makefile:
-	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
-	@$(MAKECOOKIE)
-
-test-%/makefile:
-	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
-	@$(MAKECOOKIE)
-
-test-%/GNUmakefile:
-	@echo " ==> Running make $(TEST_TARGET) in $*"
-	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
-	@$(MAKECOOKIE)
-
-# Ruby makefiles
-test-%/Rakefile:
-	@echo " ==> Running rake $(TEST_TARGET) in $*"
-	@( cd $* ; $(TEST_ENV) rake $(RAKEFLAGS) $(TEST_ARGS) $(TEST_TARGET) )
-	@$(MAKECOOKIE)
-
-test-%/rakefile:
-	@echo " ==> Running rake $(TEST_TARGET) in $*"
-	@( cd $* ; $(TEST_ENV) rake $(RAKEFLAGS) $(TEST_ARGS) $(TEST_TARGET) )
-	@$(MAKECOOKIE)
-
-test-%/setup.py:
-	@echo " ==> Running setup.py test in $*"
-	@( cd $* ; $(TEST_ENV) python ./setup.py test $(TEST_ARGS) )
-	@$(MAKECOOKIE)
-
-################# INSTALL RULES ####################
-
-# just run make install and hope for the best.
-install-%/Makefile:
-	@echo " ==> Running make install in $*"
-	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
-	@$(MAKECOOKIE)
-
-install-%/makefile:
-	@echo " ==> Running make install in $*"
-	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
-	@$(MAKECOOKIE)
-
-install-%/GNUmakefile:
-	@echo " ==> Running make install in $*"
-	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
-	@$(MAKECOOKIE)
-
-# Ruby makefiles
-install-%/Rakefile:
-	@echo " ==> Running rake install in $*"
-	@( cd $* ; $(INSTALL_ENV) rake $(RAKEFLAGS) $(INSTALL_ARGS) )
-	@$(MAKECOOKIE)
-
-install-%/rakefile:
-	@echo " ==> Running rake install in $*"
-	@( cd $* ; $(INSTALL_ENV) rake $(RAKEFLAGS) $(INSTALL_ARGS) )
-	@$(MAKECOOKIE)
-
-install-%/setup.rb:
-	@echo " ==> Running setup.rb install in $*"
-	@( cd $* ; $(INSTALL_ENV) ruby ./setup.rb install --prefix=$(DESTDIR) )
-	@$(MAKECOOKIE)
-
-# This can be: install, install_lib, install_headers, install_scripts,
-# or install_data.  See setup.py --help-commands for details.
-PYINSTALL_CMD ?= install
-install-%/setup.py:
-	@echo " ==> Running setup.py $(PYINSTALL_CMD) in $*"
-	@( cd $* ; $(INSTALL_ENV) python ./setup.py $(PYINSTALL_CMD) $(INSTALL_ARGS) )
-	@$(MAKECOOKIE)
-
-# pkg-config scripts
-install-%-config:
-	mkdir -p $(STAGINGDIR)/$(GARNAME)
-	cp -f $(DESTDIR)$(bindir)/$*-config $(STAGINGDIR)/$(GARNAME)/
-	$(MAKECOOKIE)
-
-######################################
-# Use a manifest file of the format:
-# src:dest[:mode[:owner[:group]]]
-#   as in...
-# ${WORKSRC}/nwall:${bindir}/nwall:2755:root:tty
-# ${WORKSRC}/src/foo:${sharedstatedir}/foo
-# ${WORKSRC}/yoink:${sysconfdir}/yoink:0600
-
-# Okay, so for the benefit of future generations, this is how it
-# works:
-#
-# First of all, we have this file with colon-separated lines.
-# The $(shell cat foo) routine turns it into a space-separated
-# list of words.  The foreach iterates over this list, putting a
-# colon-separated record in $(ZORCH) on each pass through.
-#
-# Next, we have the macro $(MANIFEST_LINE), which splits a record
-# into a space-separated list, and $(MANIFEST_SIZE), which
-# determines how many elements are in such a list.  These are
-# purely for convenience, and could be inserted inline if need
-# be.
-MANIFEST_LINE = $(subst :, ,$(ZORCH)) 
-MANIFEST_SIZE = $(words $(MANIFEST_LINE))
-
-# So the install command takes a variable number of parameters,
-# and our records have from two to five elements.  Gmake can't do
-# any sort of arithmetic, so we can't do any really intelligent
-# indexing into the list of parameters.
-# 
-# Since the last three elements of the $(MANIFEST_LINE) are what
-# we're interested in, we make a parallel list with the parameter
-# switch text (note the dummy elements at the beginning):
-MANIFEST_FLAGS = notused notused --mode= --owner= --group=
-
-# The following environment variables are set before the
-# installation boogaloo begins.  This ensures that WORKSRC is
-# available to the manifest and that all of the location
-# variables are suitable for *installation* (that is, using
-# DESTDIR)
-
-MANIFEST_ENV += WORKSRC=$(WORKSRC)
-# This was part of the "implicit DESTDIR" regime.  However:
-# http://gar.lnx-bbc.org/wiki/ImplicitDestdirConsideredHarmful
-#MANIFEST_ENV += $(foreach TTT,prefix exec_prefix bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir infodir lispdir includedir mandir,$(TTT)=$(DESTDIR)$($(TTT)))
-
-# ...and then we join a slice of it with the corresponding slice
-# of the $(MANIFEST_LINE), starting at 3 and going to
-# $(MANIFEST_SIZE).  That's where all the real magic happens,
-# right there!
-#
-# following that, we just splat elements one and two of
-# $(MANIFEST_LINE) on the end, since they're the ones that are
-# always there.  Slap a semicolon on the end, and you've got a
-# completed iteration through the foreach!  Beaujolais!
-
-# FIXME: using -D may not be the right thing to do!
-install-$(MANIFEST_FILE):
-	@echo " ==> Installing from $(MANIFEST_FILE)"
-	$(MANIFEST_ENV) ; $(foreach ZORCH,$(shell cat $(MANIFEST_FILE)), ginstall -Dc $(join $(wordlist 3,$(MANIFEST_SIZE),$(MANIFEST_FLAGS)),$(wordlist 3,$(MANIFEST_SIZE),$(MANIFEST_LINE))) $(word 1,$(MANIFEST_LINE)) $(word 2,$(MANIFEST_LINE)) ;)
-	@$(MAKECOOKIE)
-
-#################### DEPENDENCY RULES ####################
-
-# These two lines are here to grandfather in all the packages that use
-# BUILDDEPS
-IMGDEPS += build
-build_DEPENDS = $(BUILDDEPS)
-
-# Standard deps install into the standard install dir.  For the
-# BBC, we set the includedir to the build tree and the libdir to
-# the install tree.  Most dependencies work this way.
-
-$(GARDIR)/%/$(COOKIEDIR)/install:
-	@echo ' ==> Building $* as a dependency'
-	@$(MAKE) -C $(GARDIR)/$* install DESTIMG=$(DESTIMG)
-
-# builddeps need to have everything put in the build DESTIMG
-#$(GARDIR)/%/$(COOKIEROOTDIR)/build.d/install:
-#	@echo ' ==> Building $* as a build dependency'
-#	@$(MAKE) -C $(GARDIR)/$* install	DESTIMG=build
-
-# Source Deps grab the source code for another package
-# XXX: nobody uses this, but it should really be more like
-# $(GARDIR)/%/cookies/patch:
-srcdep-$(GARDIR)/%:
-	@echo ' ==> Grabbing source for $* as a dependency'
-	@$(MAKE) -C $(GARDIR)/$* patch-p extract-p > /dev/null 2>&1 || \
-	 $(MAKE) -C $(GARDIR)/$* patch
-
-# Image deps create dependencies on package installations in
-# images other than the current package's DESTIMG.
-IMGDEP_TARGETS = $(foreach TTT,$($*_DEPENDS),$(subst xyzzy,$(TTT),$(GARDIR)/xyzzy/$(COOKIEROOTDIR)/$*.d/install))
-imgdep-%:
-	@test -z "$(strip $(IMGDEP_TARGETS))" || $(MAKE) DESTIMG="$*" $(IMGDEP_TARGETS)
-
-# Igor's info and man gzipper rule
-gzip-info-man: gzip-info gzip-man
-
-gzip-info:
-	gfind $(DESTDIR) -type f -iname *.info* -not -iname *.gz | \
-		gxargs -r gzip --force
-
-gzip-man:
-	gfind $(DESTDIR) -type f -iname *.[1-8] -size +2 -print | \
-		gxargs -r gzip --force
-
-compile-elisp:
-	@(for d in $(ELISP_DIRS); do \
-		echo " ===> Compiling .el files in $$d"; \
-		cd $(PKGROOT)/$$d; \
-		for f in `find . -name "*el"`; do \
-			bf=`basename $$f`; \
-			bd=`dirname $$f`; \
-			cd $$bd; \
-			emacs -L $(PKGROOT)/$$d -L $(PKGROOT)/$$d/$$bd $(EXTRA_EMACS_ARGS) -batch -f batch-byte-compile "$$bf"; \
-			cd $(PKGROOT)/$$d; \
-		done; \
-	done)
-
-include $(addprefix $(GARDIR)/,$(EXTRA_LIBS))
-
-# Mmm, yesssss.  cookies my preciousssss!  Mmm, yes downloads it
-# is!  We mustn't have nasty little gmakeses deleting our
-# precious cookieses now must we?
-.PRECIOUS: $(DOWNLOADDIR)/% $(COOKIEDIR)/% $(FILEDIR)/%

Copied: csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk (from rev 6855, csw/mgar/gar/v2/gar.lib.mk)
===================================================================
--- csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk	                        (rev 0)
+++ csw/mgar/gar/v2-collapsed-modulations/gar.lib.mk	2009-10-18 17:29:04 UTC (rev 6894)
@@ -0,0 +1,753 @@
+# vim: ft=make ts=4 sw=4 noet
+#
+# $Id$
+#
+# Copyright (C) 2001 Nick Moffitt
+# 
+# Redistribution and/or use, with or without modification, is
+# permitted.  This software is without warranty of any kind.  The
+# author(s) shall not be liable in the event that use of the
+# software causes damage.
+#
+
+# cookies go here, so we have to be able to find them for
+# dependency checking.
+VPATH += $(COOKIEDIR)
+
+# convenience variable to make the cookie.
+MAKECOOKIE = mkdir -p $(COOKIEDIR)/$(@D) && date >> $(COOKIEDIR)/$@
+
+URLSTRIP = $(subst ://,//,$(1))
+
+# if you need to proxy git:// connections, set GIT_USE_PROXY.  There is a
+# default proxy script that works with the (squid?) proxy at the BO buildfarm.
+# override GIT_PROXY_SCRIPT to something else if you need to.
+GIT_MAYBEPROXY = $(if $(GIT_USE_PROXY),GIT_PROXY_COMMAND=$(GIT_PROXY_SCRIPT))
+GIT_TREEISH = $(if $(GIT_TREEISH_$(1)),$(GIT_TREEISH_$(1)),HEAD)
+
+#################### FETCH RULES ####################
+
+URLS = $(call URLSTRIP,$(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(PATCHFILES) $(foreach M,$(MODULATIONS),$(PATCHFILES_$M)))))
+
+# if the caller has defined _postinstall, etc targets for a package, add
+# these 'dynamic script' targets to our fetch list
+URLS += $(foreach DYN,$(DYNSCRIPTS),dynscr//$(DYN))
+
+ifdef GIT_REPOS
+URLS += $(foreach R,$(GIT_REPOS),gitrepo//$(call GITPROJ,$(R)) $(subst http,git-http,$(call URLSTRIP,$(R))))
+endif
+
+# Download the file if and only if it doesn't have a preexisting
+# checksum file.  Loop through available URLs and stop when you
+# get one that doesn't return an error code.
+$(DOWNLOADDIR)/%:  
+	@if test -f $(COOKIEDIR)/checksum-$*; then : ; else \
+		echo " ==> Grabbing $@"; \
+		for i in $(filter %/$*,$(URLS)); do  \
+			echo " 	==> Trying $$i"; \
+			$(MAKE) -s $$i || continue; \
+			mv $(PARTIALDIR)/$* $@; \
+			break; \
+		done; \
+		if test -r $@ ; then : ; else \
+			echo '(!!!) Failed to download $@!' 1>&2; \
+			false; \
+		fi; \
+	fi
+
+gitrepo//%:
+	@( if [ -d $(GARCHIVEDIR)/$(call GITPROJ,$*) ]; then \
+		( cd $(GARCHIVEDIR)/$(call GITPROJ,$*); \
+			$(GIT_MAYBEPROXY) git --bare fetch ) && \
+		gln -s $(GARCHIVEDIR)/$(call GITPROJ,$*)/ $(PARTIALDIR)/$(call GITPROJ,$*); \
+	   else \
+		false; \
+	  fi )
+
+# the git remote add commands are so that we can later do a fetch
+# to update the code.
+# we possibly proxy the git:// references depending on GIT_USE_PROXY
+git-http//%:
+	@git clone --bare http://$* $(PARTIALDIR)/$(call GITPROJ,$*)
+	@( cd $(PARTIALDIR)/$(call GITPROJ,$*); \
+		git remote add origin http://$*; \
+		git config remote.origin.fetch $(if $(GIT_REFS_$(call GITPROJ,$*)),$(GIT_REFS_$(call GITPROJ,$*)),$(GIT_DEFAULT_TRACK)); )
+
+git//%:
+	@$(GIT_MAYBEPROXY) git clone --bare git://$* $(PARTIALDIR)/$(call GITPROJ,$*)
+	@( cd $(PARTIALDIR)/$(call GITPROJ,$*); \
+		git remote add origin git://$*; \
+		git config remote.origin.fetch $(if $(GIT_REFS_$(call GITPROJ,$*)),$(GIT_REFS_$(call GITPROJ,$*)),$(GIT_DEFAULT_TRACK)); )
+
+# create ADMSCRIPTS 'on the fly' from variables defined by the caller
+# This version is private and should only be called from the non-private
+# version directly below
+_dynscr//%:
+	$($(subst .,_,$*))
+
+dynscr//%:
+	$(MAKE) --no-print-directory -n _$@ > $(PARTIALDIR)/$*
+
+# download an http URL (colons omitted)
+http//%: 
+	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) http://$*
+
+https//%: 
+	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) https://$*
+
+# download an ftp URL (colons omitted)
+#ftp//%: 
+#	@wget -T 30 -c --passive-ftp -P $(PARTIALDIR) ftp://$*
+ftp//%: 
+	@wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) ftp://$*
+
+# link to a local copy of the file
+# (absolute path)
+file///%: 
+	@if test -f /$*; then \
+		gln -sf /$* $(PARTIALDIR)/$(notdir $*); \
+	else \
+		false; \
+	fi
+
+# link to a local copy of the file
+# (relative path)
+file//%: 
+	@if test -f $*; then \
+		gln -sf "$(CURDIR)/$*" $(PARTIALDIR)/$(notdir $*); \
+	else \
+		false; \
+	fi
+
+# Using Jeff Waugh's rsync rule.
+# DOES NOT PRESERVE SYMLINKS!
+rsync//%: 
+	@rsync -azvLP rsync://$* $(PARTIALDIR)/
+
+# Using Jeff Waugh's scp rule
+scp//%:
+	@scp -C $* $(PARTIALDIR)/
+
+# Fetch a SVN repo via http
+svn-http//%:
+	@svn co $(SVNHTTP_CO_ARGS) http://$* $(PARTIALDIR)/$(notdir $*)
+
+svn-https//%:
+	@svn co $(SVNHTTP_CO_ARGS) https://$* $(PARTIALDIR)/$(notdir $*)
+
+#################### CHECKSUM RULES ####################
+
+# check a given file's checksum against $(CHECKSUM_FILE) and
+# error out if it mentions the file without an "OK".
+# The removal of the download prefix is for legacy checksums. For newstyle
+# checksums without path this is not necessary.
+checksum-%: $(CHECKSUM_FILE) 
+	@echo " ==> Running checksum on $*"
+	@if ggrep -- '[ /]$*$$' $(CHECKSUM_FILE); then \
+		if cat $(CHECKSUM_FILE) | sed -e 's!download/!!' | (cd $(DOWNLOADDIR); LC_ALL="C" LANG="C" gmd5sum -c 2>&1) | \
+			ggrep -- '$*' | ggrep -v ':[ ]\+OK'; then \
+			echo '(!!!) $* failed checksum test!' 1>&2; \
+			false; \
+		else \
+			echo 'file $* passes checksum test!'; \
+			$(MAKECOOKIE); \
+		fi \
+	else \
+		echo '(!!!) $* not in $(CHECKSUM_FILE) file!' 1>&2; \
+		false; \
+	fi
+
+#################### CHECKNEW RULES ####################
+
+UPSTREAM_MASTER_SITES ?= $(MASTER_SITES)
+UW_ARGS = $(addprefix -u ,$(UPSTREAM_MASTER_SITES))
+SF_ARGS = $(addprefix -s ,$(UPSTREAM_USE_SF))
+
+define files2check
+$(if $(UFILES_REGEX),$(shell http_proxy=$(http_proxy) ftp_proxy=$(ftp_proxy) $(GARBIN)/upstream_watch $(UW_ARGS) $(SF_ARGS) $(addsuffix ',$(addprefix ',$(UFILES_REGEX)))))
+endef
+
+check-upstream-and-mail: FILES2CHECK = $(call files2check)
+check-upstream-and-mail:
+	@if [ -n '$(FILES2CHECK)' ]; then \
+		NEW_FILES=""; \
+		PACKAGE_UP_TO_DATE=0; \
+		for FILE in $(FILES2CHECK) ""; do \
+			[ -n "$$FILE" ] || continue; \
+			if test -f $(COOKIEDIR)/checknew-$$FILE ; then \
+				PACKAGE_UP_TO_DATE=1; \
+			else \
+				if echo $(DISTFILES) | grep -w $$FILE >/dev/null; then \
+					PACKAGE_UP_TO_DATE=1; \
+					echo "$(GARNAME) : Package is up-to-date. Current version is $$FILE" ; \
+				else \
+					NEW_FILES="$$FILE $$NEW_FILES"; \
+				fi; \
+			fi; \
+			$(MAKE) checknew-$$FILE >/dev/null; \
+		done; \
+		if test -z "$$NEW_FILES" ; then \
+			if [ ! -n '$(UFILES_REGEX)' ]; then \
+				echo "$(GARNAME) : Warning UFILES_REGEX is not set : $(UFILES_REGEX)" ; \
+#				{ echo ""; \
+#				  echo "Hello dear $(GARNAME) maintainer,"; \
+#				  echo ""; \
+#				  echo "The upstream notification job has detected that $(GARNAME) is not configured for automatic upstream file update detection."; \
+#				  echo ""; \
+#				  echo "Please consider updating your package. Documentation is available from this link : http://www.opencsw.org" ; \
+#				  echo ""; \
+#				  echo "--"; \
+#				  echo "Kindest regards"; \
+#				  echo "upstream notification job"; } | $(GARBIN)/mail2maintainer -s '[svn] $(GARNAME) upstream update notification' $(GARNAME); \
+			else \
+				if [ "$$PACKAGE_UP_TO_DATE" -eq "0" ]; then \
+					echo "$(GARNAME) : Warning no files to check ! $(FILES2CHECK)" ; \
+					echo "$(GARNAME) :     UPSTREAM_MASTER_SITES is $(UPSTREAM_MASTER_SITES)" ; \
+					echo "$(GARNAME) :     DISTNAME is $(DISTNAME)" ; \
+					echo "$(GARNAME) :     UFILES_REGEX is : $(UFILES_REGEX)" ; \
+					echo "$(GARNAME) : Please check configuration" ; \
+				fi; \
+			fi; \
+		else \
+			echo "$(GARNAME) : new upstream files available: $$NEW_FILES"; \
+			{	echo ""; \
+				echo "Hello dear $(GARNAME) maintainer,"; \
+				echo ""; \
+				echo "The upstream notification job has detected the availability of new files for $(GARNAME)."; \
+				echo ""; \
+				echo "The following upstream file(s):"; \
+				echo "    $$NEW_FILES"; \
+				echo ""; \
+				echo "is/are available at the following url(s):"; \
+				echo "    $(UPSTREAM_MASTER_SITES)"; \
+				echo ""; \
+				echo "Please consider updating your package." ; \
+				echo ""; \
+				echo "--"; \
+				echo "Kindest regards"; \
+				echo "upstream notification job"; } | $(GARBIN)/mail2maintainer -s '[svn] $(GARNAME) upstream update notification' $(GARNAME); \
+		fi; \
+	fi
+		
+check-upstream: FILES2CHECK = $(call files2check)
+check-upstream: 
+	@if [ -n '$(FILES2CHECK)' ]; then \
+		NEW_FILES=""; \
+		PACKAGE_UP_TO_DATE=0; \
+		for FILE in $(FILES2CHECK) ""; do \
+			[ -n "$$FILE" ] || continue; \
+			if test -f $(COOKIEDIR)/checknew-$$FILE ; then \
+				PACKAGE_UP_TO_DATE=1; \
+			else \
+				if echo $(DISTFILES) | grep -w $$FILE >/dev/null; then \
+					PACKAGE_UP_TO_DATE=1; \
+					echo "$(GARNAME) : Package is up-to-date. Current version is $$FILE" ; \
+				else \
+					NEW_FILES="$$FILE $$NEW_FILES"; \
+				fi; \
+			fi; \
+			$(MAKE) checknew-$$FILE >/dev/null; \
+		done; \
+		if test -z "$$NEW_FILES" ; then \
+			if [ ! -n '$(UFILES_REGEX)' ]; then \
+				echo "$(GARNAME) : Warning UFILES_REGEX is not set : $(UFILES_REGEX)" ; \
+			else \
+				if [ "$$PACKAGE_UP_TO_DATE" -eq "0" ]; then \
+					echo "$(GARNAME) : Warning no files to check ! $(FILES2CHECK)" ; \
+					echo "$(GARNAME) :     UPSTREAM_MASTER_SITES is $(UPSTREAM_MASTER_SITES)" ; \
+					echo "$(GARNAME) :     DISTNAME is $(DISTNAME)" ; \
+					echo "$(GARNAME) :     UFILES_REGEX is : $(UFILES_REGEX)" ; \
+					echo "$(GARNAME) : Please check configuration" ; \
+				fi; \
+			fi; \
+		else \
+			echo "$(GARNAME) : new upstream files available: $$NEW_FILES"; \
+		fi; \
+	fi
+	
+checknew-%:
+	@$(MAKECOOKIE)
+
+
+#################### GARCHIVE RULES ####################
+
+# while we're here, let's just handle how to back up our
+# checksummed files
+
+$(GARCHIVEDIR)/%: $(GARCHIVEDIR)
+	@if [ -h $(DOWNLOADDIR)/$* ]; then :; else \
+		gcp -Lr $(DOWNLOADDIR)/$* $@; \
+	fi
+
+
+#################### EXTRACT RULES ####################
+
+TAR_ARGS = --no-same-owner
+
+# rule to extract uncompressed tarballs
+tar-extract-%:
+	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
+	@gtar $(TAR_ARGS) -xf $(DOWNLOADDIR)/$* -C $(EXTRACTDIR)
+	@$(MAKECOOKIE)
+
+# rule to extract files with tar xzf
+tar-gz-extract-%:
+	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
+	@gzip -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
+	@$(MAKECOOKIE)
+
+# rule to extract files with tar and bzip
+tar-bz-extract-%:
+	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
+	@bzip2 -dc $(DOWNLOADDIR)/$* | gtar $(TAR_ARGS) -xf - -C $(EXTRACTDIR)
+	@$(MAKECOOKIE)
+
+# extract compressed single files
+bz-extract-%:
+	@echo " ==> Decompressing $(DOWNLOADDIR)/$*"
+	@cp $(DOWNLOADDIR)/$* $(WORKDIR)/
+	@bzip2 -d $(WORKDIR)/$*
+	@$(MAKECOOKIE)
+
+gz-extract-%:
+	@echo " ==> Decompressing $(DOWNLOADDIR)/$*"
+	@cp $(DOWNLOADDIR)/$* $(WORKDIR)/
+	@gzip -d $(WORKDIR)/$*
+	@$(MAKECOOKIE)
+
+# extra dependency rule for git repos, that will allow the user
+# to supply an alternate target at their discretion
+git-extract-%:
+	@echo " ===> Extracting Git Repo $(DOWNLOADDIR)/$* (Treeish: $(call GIT_TREEISH,$*))"
+	git --bare archive --prefix=$(GARNAME)-$(GARVERSION)/ --remote=file://$(abspath $(DOWNLOADDIR))/$*/ $(call GIT_TREEISH,$*) | gtar -xf - -C $(EXTRACTDIR)
+	@$(MAKECOOKIE)
+
+# rule to extract files with unzip
+zip-extract-%:
+	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
+	@unzip $(DOWNLOADDIR)/$* -d $(EXTRACTDIR)
+	@$(MAKECOOKIE)
+
+# this is a null extract rule for files which are constant and
+# unchanged (not archives)
+cp-extract-%:
+	@echo " ==> Copying $(DOWNLOADDIR)/$*"
+	@cp -rp $(DOWNLOADDIR)/$* $(WORKDIR)/
+	@$(MAKECOOKIE)
+
+#gets the meat of a .deb into $(WORKSRC)
+deb-bin-extract-%:
+	@echo " ==> Extracting $(DOWNLOADDIR)/$*"
+	@ar x $(DOWNLOADDIR)/$*
+	@rm debian-binary && \
+		mv *.tar.gz $(DOWNLOADDIR) && \
+			mkdir $(WORKSRC) && \
+				gtar $(TAR_ARGS) -xvz -C $(WORKSRC) \
+					-f $(DOWNLOADDIR)/data.tar.gz
+	@$(MAKECOOKIE)
+
+### EXTRACT FILE TYPE MAPPINGS ###
+# These rules specify which of the above extract action rules to use for a
+# given file extension.  Often support for a given extract type can be handled
+# by simply adding a rule here.
+
+extract-archive-%.tar: tar-extract-%.tar
+	@$(MAKECOOKIE)
+
+extract-archive-%.tar.gz: tar-gz-extract-%.tar.gz
+	@$(MAKECOOKIE)
+
+extract-archive-%.tar.Z: tar-gz-extract-%.tar.Z
+	@$(MAKECOOKIE)
+
+extract-archive-%.tgz: tar-gz-extract-%.tgz
+	@$(MAKECOOKIE)
+
+extract-archive-%.taz: tar-gz-extract-%.taz
+	@$(MAKECOOKIE)
+
+extract-archive-%.tar.bz: tar-bz-extract-%.tar.bz
+	@$(MAKECOOKIE)
+
+extract-archive-%.tar.bz2: tar-bz-extract-%.tar.bz2
+	@$(MAKECOOKIE)
+
+extract-archive-%.tbz: tar-bz-extract-%.tbz
+	@$(MAKECOOKIE)
+
+extract-archive-%.zip: zip-extract-%.zip
+	@$(MAKECOOKIE)
+
+extract-archive-%.ZIP: zip-extract-%.ZIP
+	@$(MAKECOOKIE)
+
+extract-archive-%.deb: deb-bin-extract-%.deb
+	@$(MAKECOOKIE)
+
+extract-archive-%.bz2: bz-extract-%.bz2
+	@$(MAKECOOKIE)
+
+extract-archive-%.gz: gz-extract-%.gz
+	@$(MAKECOOKIE)
+
+extract-archive-%.git: git-extract-%.git
+	@$(MAKECOOKIE)
+
+# anything we don't know about, we just assume is already
+# uncompressed and unarchived in plain format
+extract-archive-%: cp-extract-%
+	@$(MAKECOOKIE)
+
+#################### PATCH RULES ####################
+
+PATCHDIR ?= $(WORKSRC)
+PATCHDIRLEVEL ?= 1
+PATCHDIRFUZZ ?= 2
+GARPATCH = gpatch -d$(PATCHDIR) -p$(PATCHDIRLEVEL) -F$(PATCHDIRFUZZ)
+BASEWORKSRC = $(shell basename $(WORKSRC))
+
+# apply bzipped patches
+bz-patch-%:
+	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
+	@bzip2 -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
+	@$(MAKECOOKIE)
+
+# apply gzipped patches
+gz-patch-%:
+	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
+	@gzip -dc $(DOWNLOADDIR)/$* | $(GARPATCH)
+	@$(MAKECOOKIE)
+
+# apply normal patches
+normal-patch-%:
+	@echo " ==> Applying patch $(DOWNLOADDIR)/$*"
+	$(GARPATCH) < $(DOWNLOADDIR)/$*
+	@$(MAKECOOKIE)
+
+# This is used by makepatch
+%/gar-base.diff:
+	@echo " ==> Creating patch $@"
+	@EXTRACTDIR=$(SCRATCHDIR) COOKIEDIR=$(SCRATCHDIR)-$(COOKIEDIR) $(MAKE) extract
+	@PATCHDIR=$(SCRATCHDIR)/$(BASEWORKSRC) COOKIEDIR=$(SCRATCHDIR)-$(COOKIEDIR) $(MAKE) patch
+	@mv $(SCRATCHDIR)/$(BASEWORKSRC) $(WORKSRC_FIRSTMOD).orig
+	@( cd $(WORKDIR_FIRSTMOD); \
+		if gdiff --speed-large-files --minimal -Nru $(BASEWORKSRC).orig $(BASEWORKSRC) > gar-base.diff; then :; else \
+			cd $(CURDIR); \
+			mv -f $(WORKDIR_FIRSTMOD)/gar-base.diff $@; \
+		fi )
+
+### PATCH FILE TYPE MAPPINGS ###
+# These rules specify which of the above patch action rules to use for a given
+# file extension.  Often support for a given patch format can be handled by
+# simply adding a rule here.
+
+patch-extract-%.bz: bz-patch-%.bz
+	@$(MAKECOOKIE)
+
+patch-extract-%.bz2: bz-patch-%.bz2
+	@$(MAKECOOKIE)
+
+patch-extract-%.gz: gz-patch-%.gz
+	@$(MAKECOOKIE)
+
+patch-extract-%.Z: gz-patch-%.Z
+	@$(MAKECOOKIE)
+
+patch-extract-%.diff: normal-patch-%.diff
+	@$(MAKECOOKIE)
+
+patch-extract-%.patch: normal-patch-%.patch
+	@$(MAKECOOKIE)
+
+patch-extract-%: normal-patch-%
+	@$(MAKECOOKIE)
+
+#################### CONFIGURE RULES ####################
+
+TMP_DIRPATHS = --prefix=$(prefix) --exec_prefix=$(exec_prefix) --bindir=$(bindir) --sbindir=$(sbindir) --libexecdir=$(libexecdir) --datadir=$(datadir) --sysconfdir=$(sysconfdir) --sharedstatedir=$(sharedstatedir) --localstatedir=$(localstatedir) --libdir=$(libdir) --infodir=$(infodir) --lispdir=$(lispdir) --includedir=$(includedir) --mandir=$(mandir)
+
+NODIRPATHS += --lispdir
+
+DIRPATHS = $(filter-out $(addsuffix %,$(NODIRPATHS)), $(TMP_DIRPATHS))
+
+# configure a package that has an autoconf-style configure
+# script.
+configure-%/configure:
+	@echo " ==> Running configure in $*"
+	cd $* && $(CONFIGURE_ENV) ./configure $(CONFIGURE_ARGS)
+	@$(MAKECOOKIE)
+
+configure-%/autogen.sh:
+	@echo " ==> Running autogen.sh in $*"
+	@cd $* && $(CONFIGURE_ENV) ./autogen.sh $(CONFIGURE_ARGS)
+	@$(MAKECOOKIE)
+
+# configure a package that uses imake
+# FIXME: untested and likely not the right way to handle the
+# arguments
+configure-%/Imakefile: 
+	@echo " ==> Running xmkmf in $*"
+	@cd $* && $(CONFIGURE_ENV) xmkmf $(CONFIGURE_ARGS)
+	@$(MAKECOOKIE)
+
+configure-%/setup.rb:
+	@echo " ==> Running setup.rb config in $*"
+	@( cd $* ; $(CONFIGURE_ENV) ruby ./setup.rb config $(CONFIGURE_ARGS) )
+	@$(MAKECOOKIE)
+
+#################### BUILD RULES ####################
+
+# build from a standard gnu-style makefile's default rule.
+build-%/Makefile:
+	@echo " ==> Running make in $*"
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(MAKECOOKIE)
+
+build-%/makefile:
+	@echo " ==> Running make in $*"
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(MAKECOOKIE)
+
+build-%/GNUmakefile:
+	@echo " ==> Running make in $*"
+	@$(BUILD_ENV) $(MAKE) $(PARALLELMFLAGS) $(foreach TTT,$(BUILD_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(BUILD_ARGS)
+	@$(MAKECOOKIE)
+
+build-%/Jamfile:
+	@echo " ==> Running bjam in $*"
+	@( cd $* ; $(BUILD_ENV) bjam $(JAMFLAGS) $(BUILD_ARGS) )
+	@$(MAKECOOKIE)
+
+# Ruby makefiles
+build-%/Rakefile:
+	@echo " ==> Running rake in $*"
+	@( cd $* ; $(BUILD_ENV) rake $(RAKEFLAGS) $(BUILD_ARGS) )
+	@$(MAKECOOKIE)
+
+build-%/rakefile:
+	@echo " ==> Running rake in $*"
+	@( cd $* ; $(BUILD_ENV) rake $(RAKEFLAGS) $(BUILD_ARGS) )
+	@$(MAKECOOKIE)
+
+build-%/setup.rb:
+	@echo " ==> Running setup.rb setup in $*"
+	@( cd $* ; $(BUILD_ENV) ruby ./setup.rb setup $(BUILD_ARGS) )
+	@$(MAKECOOKIE)
+
+# This can be: build, build_py, build_ext, build_clib, build_scripts
+# See setup.py --help-commands for details
+PYBUILD_CMD ?= build
+build-%/setup.py:
+	@echo " ==> Running setup.py $(PYBUILD_TYPE) in $*"
+	@( cd $* ; $(BUILD_ENV) python ./setup.py $(PYBUILD_CMD) $(BUILD_ARGS) )
+	@$(MAKECOOKIE)
+
+#################### TEST RULES ####################
+
+TEST_TARGET ?= test
+
+# Run tests on pre-built sources
+test-%/Makefile:
+	@echo " ==> Running make $(TEST_TARGET) in $*"
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(MAKECOOKIE)
+
+test-%/makefile:
+	@echo " ==> Running make $(TEST_TARGET) in $*"
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(MAKECOOKIE)
+
+test-%/GNUmakefile:
+	@echo " ==> Running make $(TEST_TARGET) in $*"
+	@$(TEST_ENV) $(MAKE) $(foreach TTT,$(TEST_OVERRIDE_DIRS),$(TTT)="$($(TTT))") -C $* $(TEST_ARGS) $(TEST_TARGET)
+	@$(MAKECOOKIE)
+
+# Ruby makefiles
+test-%/Rakefile:
+	@echo " ==> Running rake $(TEST_TARGET) in $*"
+	@( cd $* ; $(TEST_ENV) rake $(RAKEFLAGS) $(TEST_ARGS) $(TEST_TARGET) )
+	@$(MAKECOOKIE)
+
+test-%/rakefile:
+	@echo " ==> Running rake $(TEST_TARGET) in $*"
+	@( cd $* ; $(TEST_ENV) rake $(RAKEFLAGS) $(TEST_ARGS) $(TEST_TARGET) )
+	@$(MAKECOOKIE)
+
+test-%/setup.py:
+	@echo " ==> Running setup.py test in $*"
+	@( cd $* ; $(TEST_ENV) python ./setup.py test $(TEST_ARGS) )
+	@$(MAKECOOKIE)
+
+################# INSTALL RULES ####################
+
+# just run make install and hope for the best.
+install-%/Makefile:
+	@echo " ==> Running make install in $*"
+	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
+	@$(MAKECOOKIE)
+
+install-%/makefile:
+	@echo " ==> Running make install in $*"
+	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
+	@$(MAKECOOKIE)
+
+install-%/GNUmakefile:
+	@echo " ==> Running make install in $*"
+	@$(INSTALL_ENV) $(MAKE) DESTDIR=$(DESTDIR) $(foreach TTT,$(INSTALL_OVERRIDE_VARS),$(TTT)="$(INSTALL_OVERRIDE_VAR_$(TTT))") $(foreach TTT,$(INSTALL_OVERRIDE_DIRS),$(TTT)="$(DESTDIR)$($(TTT))") -C $* $(INSTALL_ARGS) install
+	@$(MAKECOOKIE)
+
+# Ruby makefiles
+install-%/Rakefile:
+	@echo " ==> Running rake install in $*"
+	@( cd $* ; $(INSTALL_ENV) rake $(RAKEFLAGS) $(INSTALL_ARGS) )
+	@$(MAKECOOKIE)
+
+install-%/rakefile:
+	@echo " ==> Running rake install in $*"
+	@( cd $* ; $(INSTALL_ENV) rake $(RAKEFLAGS) $(INSTALL_ARGS) )
+	@$(MAKECOOKIE)
+
+install-%/setup.rb:
+	@echo " ==> Running setup.rb install in $*"
+	@( cd $* ; $(INSTALL_ENV) ruby ./setup.rb install --prefix=$(DESTDIR) )
+	@$(MAKECOOKIE)
+
+# This can be: install, install_lib, install_headers, install_scripts,
+# or install_data.  See setup.py --help-commands for details.
+PYINSTALL_CMD ?= install
+install-%/setup.py:
+	@echo " ==> Running setup.py $(PYINSTALL_CMD) in $*"
+	@( cd $* ; $(INSTALL_ENV) python ./setup.py $(PYINSTALL_CMD) $(INSTALL_ARGS) )
+	@$(MAKECOOKIE)
+
+# pkg-config scripts
+install-%-config:
+	mkdir -p $(STAGINGDIR)/$(GARNAME)
+	cp -f $(DESTDIR)$(bindir)/$*-config $(STAGINGDIR)/$(GARNAME)/
+	$(MAKECOOKIE)
+
+######################################
+# Use a manifest file of the format:
+# src:dest[:mode[:owner[:group]]]
+#   as in...
+# ${WORKSRC}/nwall:${bindir}/nwall:2755:root:tty
+# ${WORKSRC}/src/foo:${sharedstatedir}/foo
+# ${WORKSRC}/yoink:${sysconfdir}/yoink:0600
+
+# Okay, so for the benefit of future generations, this is how it
+# works:
+#
+# First of all, we have this file with colon-separated lines.
+# The $(shell cat foo) routine turns it into a space-separated
+# list of words.  The foreach iterates over this list, putting a
+# colon-separated record in $(ZORCH) on each pass through.
+#
+# Next, we have the macro $(MANIFEST_LINE), which splits a record
+# into a space-separated list, and $(MANIFEST_SIZE), which
+# determines how many elements are in such a list.  These are
+# purely for convenience, and could be inserted inline if need
+# be.
+MANIFEST_LINE = $(subst :, ,$(ZORCH)) 
+MANIFEST_SIZE = $(words $(MANIFEST_LINE))
+
+# So the install command takes a variable number of parameters,
+# and our records have from two to five elements.  Gmake can't do
+# any sort of arithmetic, so we can't do any really intelligent
+# indexing into the list of parameters.
+# 
+# Since the last three elements of the $(MANIFEST_LINE) are what
+# we're interested in, we make a parallel list with the parameter
+# switch text (note the dummy elements at the beginning):
+MANIFEST_FLAGS = notused notused --mode= --owner= --group=
+
+# The following environment variables are set before the
+# installation boogaloo begins.  This ensures that WORKSRC is
+# available to the manifest and that all of the location
+# variables are suitable for *installation* (that is, using
+# DESTDIR)
+
+MANIFEST_ENV += WORKSRC=$(WORKSRC)
+# This was part of the "implicit DESTDIR" regime.  However:
+# http://gar.lnx-bbc.org/wiki/ImplicitDestdirConsideredHarmful
+#MANIFEST_ENV += $(foreach TTT,prefix exec_prefix bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir infodir lispdir includedir mandir,$(TTT)=$(DESTDIR)$($(TTT)))
+
+# ...and then we join a slice of it with the corresponding slice
+# of the $(MANIFEST_LINE), starting at 3 and going to
+# $(MANIFEST_SIZE).  That's where all the real magic happens,
+# right there!
+#
+# following that, we just splat elements one and two of
+# $(MANIFEST_LINE) on the end, since they're the ones that are
+# always there.  Slap a semicolon on the end, and you've got a
+# completed iteration through the foreach!  Beaujolais!
+
+# FIXME: using -D may not be the right thing to do!
+install-$(MANIFEST_FILE):
+	@echo " ==> Installing from $(MANIFEST_FILE)"
+	$(MANIFEST_ENV) ; $(foreach ZORCH,$(shell cat $(MANIFEST_FILE)), ginstall -Dc $(join $(wordlist 3,$(MANIFEST_SIZE),$(MANIFEST_FLAGS)),$(wordlist 3,$(MANIFEST_SIZE),$(MANIFEST_LINE))) $(word 1,$(MANIFEST_LINE)) $(word 2,$(MANIFEST_LINE)) ;)
+	@$(MAKECOOKIE)
+
+#################### DEPENDENCY RULES ####################
+
+# These two lines are here to grandfather in all the packages that use
+# BUILDDEPS
+IMGDEPS += build
+build_DEPENDS = $(BUILDDEPS)
+
+# Standard deps install into the standard install dir.  For the
+# BBC, we set the includedir to the build tree and the libdir to
+# the install tree.  Most dependencies work this way.
+
+$(GARDIR)/%/$(COOKIEDIR)/install:
+	@echo ' ==> Building $* as a dependency'
+	@$(MAKE) -C $(GARDIR)/$* install DESTIMG=$(DESTIMG)
+
+# builddeps need to have everything put in the build DESTIMG
+#$(GARDIR)/%/$(COOKIEROOTDIR)/build.d/install:
+#	@echo ' ==> Building $* as a build dependency'
+#	@$(MAKE) -C $(GARDIR)/$* install	DESTIMG=build
+
+# Source Deps grab the source code for another package
+# XXX: nobody uses this, but it should really be more like
+# $(GARDIR)/%/cookies/patch:
+srcdep-$(GARDIR)/%:
+	@echo ' ==> Grabbing source for $* as a dependency'
+	@$(MAKE) -C $(GARDIR)/$* patch-p extract-p > /dev/null 2>&1 || \
+	 $(MAKE) -C $(GARDIR)/$* patch
+
+# Image deps create dependencies on package installations in
+# images other than the current package's DESTIMG.
+IMGDEP_TARGETS = $(foreach TTT,$($*_DEPENDS),$(subst xyzzy,$(TTT),$(GARDIR)/xyzzy/$(COOKIEROOTDIR)/$*.d/install))
+imgdep-%:
+	@test -z "$(strip $(IMGDEP_TARGETS))" || $(MAKE) DESTIMG="$*" $(IMGDEP_TARGETS)
+
+# Igor's info and man gzipper rule
+gzip-info-man: gzip-info gzip-man
+
+gzip-info:
+	gfind $(DESTDIR) -type f -iname *.info* -not -iname *.gz | \
+		gxargs -r gzip --force
+
+gzip-man:
+	gfind $(DESTDIR) -type f -iname *.[1-8] -size +2 -print | \
+		gxargs -r gzip --force
+
+compile-elisp:
+	@(for d in $(ELISP_DIRS); do \
+		echo " ===> Compiling .el files in $$d"; \
+		cd $(PKGROOT)/$$d; \
+		for f in `find . -name "*el"`; do \
+			bf=`basename $$f`; \
+			bd=`dirname $$f`; \
+			cd $$bd; \
+			emacs -L $(PKGROOT)/$$d -L $(PKGROOT)/$$d/$$bd $(EXTRA_EMACS_ARGS) -batch -f batch-byte-compile "$$bf"; \
+			cd $(PKGROOT)/$$d; \
+		done; \
+	done)
+
+include $(addprefix $(GARDIR)/,$(EXTRA_LIBS))
+
+# Mmm, yesssss.  cookies my preciousssss!  Mmm, yes downloads it
+# is!  We mustn't have nasty little gmakeses deleting our
+# precious cookieses now must we?
+.PRECIOUS: $(DOWNLOADDIR)/% $(COOKIEDIR)/% $(FILEDIR)/%

Deleted: csw/mgar/gar/v2-collapsed-modulations/gar.mk
===================================================================
--- csw/mgar/gar/v2/gar.mk	2009-10-09 11:59:38 UTC (rev 6808)
+++ csw/mgar/gar/v2-collapsed-modulations/gar.mk	2009-10-18 17:29:04 UTC (rev 6894)
@@ -1,828 +0,0 @@
-
-#
-# $Id$
-#
-# Copyright (C) 2001 Nick Moffitt
-# 
-# Redistribution and/or use, with or without modification, is
-# permitted.  This software is without warranty of any kind.  The
-# author(s) shall not be liable in the event that use of the
-# software causes damage.
-
-# Comment this out to make much verbosity
-#.SILENT:
-
-#ifeq ($(origin GARDIR), undefined)
-#GARDIR := $(CURDIR)/../..
-#endif
-
-#GARDIR ?= ../..
-#ifeq ($(origin GARDIR), undefined)
-#GARDIR := $(CURDIR)/../..
-#endif
-
-ifneq ($(abspath /),/)
-$(error Your version of 'make' is too old: $(MAKE_VERSION). Please make sure you are using at least 3.81)
-endif
-
-GARDIR ?= gar
-GARBIN  = $(GARDIR)/bin
-
-DIRSTODOTS = $(subst . /,./,$(patsubst %,/..,$(subst /, ,/$(1))))
-ROOTFROMDEST = $(call DIRSTODOTS,$(DESTDIR))
-MAKEPATH = $(shell echo $(1) | perl -lne 'print join(":", split)')
-TOLOWER = $(shell echo $(1) | tr '[A-Z]' '[a-z]')
-
-# If you call this the value is only evaluated the first time
-# Usage: $(call SETONCE,A,MyComplexVariableEvaluatedOnlyOnce)
-SETONCE = $(eval $(1) ?= $(2))
-
-#meant to take a git url and return just the $proj.git part
-GITPROJ = $(lastword $(subst /, ,$(1)))
-
-PARALLELMFLAGS ?= $(MFLAGS)
-export PARALLELMFLAGS
-
-DISTNAME ?= $(GARNAME)-$(GARVERSION)
-
-DYNSCRIPTS = $(foreach PKG,$(SPKG_SPECS),$(foreach SCR,$(ADMSCRIPTS),$(if $(value $(PKG)_$(SCR)), $(PKG).$(SCR))))
-_NOCHECKSUM += $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))
-
-# Allow overriding of only specific components of ALLFILES by clearing e. g. 'ALLFILES_DYNSCRIPTS = '
-ALLFILES_DISTFILES ?= $(DISTFILES)
-ALLFILES_PATCHFILES ?= $(PATCHFILES) $(foreach M,$(MODULATIONS),$(PATCHFILES_$M))
-ALLFILES_DYNSCRIPTS ?= $(DYNSCRIPTS)
-ALLFILES_GIT_REPOS ?= $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))
-ALLFILES ?= $(sort $(ALLFILES_DISTFILES) $(ALLFILES_PATCHFILES) $(ALLFILES_DYNSCRIPTS) $(ALLFILES_GIT_REPOS) $(EXTRA_ALLFILES))
-
-ifeq ($(MAKE_INSTALL_DIRS),1)
-INSTALL_DIRS = $(addprefix $(DESTDIR),$(prefix) $(exec_prefix) $(bindir) $(sbindir) $(libexecdir) $(datadir) $(sysconfdir) $(sharedstatedir) $(localstatedir) $(libdir) $(infodir) $(lispdir) $(includedir) $(mandir) $(foreach NUM,1 2 3 4 5 6 7 8, $(mandir)/man$(NUM)) $(sourcedir))
-else
-INSTALL_DIRS =
-endif
-
-# For rules that do nothing, display what dependencies they
-# successfully completed
-#DONADA = @echo "	[$@] complete.  Finished rules: $+"
-#DONADA = @touch $(COOKIEDIR)/$@; echo "	[$@] complete for $(GARNAME)."
-COOKIEFILE = $(COOKIEDIR)/$(patsubst $(COOKIEDIR)/%,%,$1)
-DONADA = @touch $(call COOKIEFILE,$@); echo "	[$@] complete for $(GARNAME)."
-
-
-# TODO: write a stub rule to print out the name of a rule when it
-# *does* do something, and handle indentation intelligently.
-
-# Default sequence for "all" is:  fetch checksum extract patch configure build
-all: build
-
-# include the configuration file to override any of these variables
-include $(GARDIR)/gar.conf.mk
-include $(GARDIR)/gar.lib.mk
-
-# ========================= MODULATIONS ======================== 
-
-# The default is to modulate over ISAs
-MODULATORS ?= ISA $(EXTRA_MODULATORS) $(EXTRA_MODULATORS_$(GARCH))
-MODULATIONS_ISA = $(NEEDED_ISAS)
-
-tolower = $(shell echo $(1) | tr '[A-Z]' '[a-z]')
-expand_modulator_1 = $(addprefix $(call tolower,$(1))-,$(MODULATIONS_$(1)))
-# This expands to the list of all modulators with their respective modulations
-modulations = $(if $(word 2,$(1)),\
-	$(foreach P,$(call expand_modulator_1,$(firstword $(1))),\
-		$(addprefix $(P)-,$(call modulations,$(wordlist 2,$(words $(1)),$(1))))\
-	),\
-	$(call expand_modulator_1,$(1)))
-
-MODULATIONS ?= $(filter-out $(SKIP_MODULATIONS),$(strip $(call modulations,$(strip $(MODULATORS)))))
-
-# _modulate(ISA STATIC,,,)
-# -> _modulate2(STATIC,isa-i386,ISA,ISA=i386)
-#    -> _modulate2(,,isa-i386-static-yes,ISA STATIC,ISA=i386 STATIC=yes)
-#       -> xxx-isa-i386-static-yes: @gmake xxx ISA=i386 STATIC=yes
-#    -> _modulate2(,,isa-i386-static-no,ISA STATIC,ISA=i386 STATIC=no)
-#       -> xxx-isa-i386-static-no: @gmake xxx ISA=i386 STATIC=no
-# -> _modulate2(STATIC,isa-amd64,ISA,ISA=amd64)
-#    -> _modulate2(,,isa-amd64-static-yes,ISA STATIC,ISA=amd64 STATIC=yes)
-#       -> xxx-isa-amd64-static-yes: @gmake xxx ISA=amd64 STATIC=yes
-#    -> _modulate2(,,isa-amd64-static-no,ISA STATIC,ISA=amd64 STATIC=no)
-#       -> xxx-isa-amd64-static-no: @gmake xxx ISA=amd64 STATIC=no
-
-define _modulate_target
-$(1)-$(2):
-	@$(MAKE) MODULATION=$(2) $(3) $(1)-modulated
-	@# This is MAKECOOKIE expanded to use the name of the rule explicily as the rule has
-	@# not been evaluated yet. XXX: Use function _MAKECOOKIE for both
-	@mkdir -p $(COOKIEDIR)/$(dir $(1)-$(2)) && date >> $(COOKIEDIR)/$(1)-$(2)
-	@# The next line has intentionally been left blank to explicitly terminate this make rule
-
-endef
-
-define _modulate_target_nocookie
-$(1)-$(2):
-	@$(MAKE) -s MODULATION=$(2) $(3) $(1)-modulated
-	@# The next line has intentionally been left blank to explicitly terminate this make rule
-
-endef
-
-define _modulate_merge
-$(foreach ASSIGNMENT,$(3),
-merge-$(2): $(ASSIGNMENT)
-)
-merge-$(2): BUILDHOST=$$(call modulation2host)
-merge-$(2):
-	@echo "[===== Building modulation '$(2)' on host '$$(BUILDHOST)' =====]"
-	$$(if $$(and $$(BUILDHOST),$$(filter-out $$(THISHOST),$$(BUILDHOST))),\
-		$(SSH) $$(BUILDHOST) "PATH=$$(PATH) $(MAKE) -C $$(CURDIR) $(if $(PLATFORM),PLATFORM=$(PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
-		$(MAKE) $(if $(PLATFORM),PLATFORM=$(PLATFORM)) MODULATION=$(2) $(3) merge-modulated\
-	)
-	@# The next line has intentionally been left blank to explicitly terminate this make rule
-
-endef
-
-define _modulate_do
-$(call _modulate_target,extract,$(2),$(4))
-$(call _modulate_target,patch,$(2),$(4))
-$(call _modulate_target,configure,$(2),$(4))
-$(call _modulate_target_nocookie,reset-configure,$(2),$(4))
-$(call _modulate_target,build,$(2),$(4))
-$(call _modulate_target_nocookie,reset-build,$(2),$(4))
-$(call _modulate_target,test,$(2),$(4))
-$(call _modulate_target,install,$(2),$(4))
-$(call _modulate_target_nocookie,reset-install,$(2),$(4))
-#$(call _modulate_target,merge,$(2),$(4))
-$(call _modulate_merge,,$(2),$(4))
-$(call _modulate_target_nocookie,reset-merge,$(2),$(4))
-$(call _modulate_target_nocookie,clean,$(2),$(4))
-$(call _modulate_target_nocookie,_modenv,$(2),$(4))
-endef
-
-# This evaluates to the make rules for all modulations passed as first argument
-# Usage: _modulate( <MODULATORS> )
-define _modulate
-$(foreach M,$(MODULATIONS_$(firstword $(1))),\
-	$(call _modulate2,\
-		$(wordlist 2,$(words $(1)),$(1)),\
-		$(call tolower,$(firstword $(1)))-$(M),\
-		$(firstword $(1)),\
-		$(firstword $(1))=$(M)\
-	)\
-)
-endef
-
-# This is a helper for the recursive _modulate
-define _modulate2
-$(if $(strip $(1)),\
-	$(foreach M,$(MODULATIONS_$(firstword $(1))),\
-		$(call _modulate2,\
-			$(wordlist 2,$(words $(1)),$(1)),\
-			$(addprefix $(2)-,$(call tolower,$(firstword $(1)))-$(M)),\
-			$(3) $(firstword $(1)),\
-			$(4) $(firstword $(1))=$(M)\
-		)\
-	),\
-	$(call _modulate_do,,$(strip $(2)),$(3),$(4))\
-)
-endef
-
-define _pmod
-	@echo "[ $1 for modulation $(MODULATION): $(foreach M,$(MODULATORS),$M=$($M)) ]"
-endef
-
-$(eval $(call _modulate,$(MODULATORS)))
-
-#################### DIRECTORY MAKERS ####################
-
-# This is to make dirs as needed by the base rules
-$(sort $(DOWNLOADDIR) $(PARTIALDIR) $(COOKIEDIR) $(WORKSRC) $(addprefix $(WORKROOTDIR)/build-,global $(MODULATIONS)) $(EXTRACTDIR) $(FILEDIR) $(SCRATCHDIR) $(PKGROOT) $(INSTALL_DIRS) $(INSTALLISADIR) $(GARCHIVEDIR) $(GARPKGDIR) $(STAGINGDIR)) $(COOKIEDIR)/%:
-	@if test -d $@; then : ; else \
-		ginstall -d $@; \
-		echo "ginstall -d $@"; \
-	fi
-
-# These stubs are wildcarded, so that the port maintainer can
-# define something like "pre-configure" and it won't conflict,
-# while the configure target can call "pre-configure" safely even
-# if the port maintainer hasn't defined it.
-# 
-# in addition to the pre-<target> rules, the maintainer may wish
-# to set a "pre-everything" rule, which runs before the first
-# actual target.
-pre-%:
-	@true
-
-post-%:
-	@true
-
-# Call any arbitrary rule recursively for all dependencies
-deep-%: %
-	@for target in "" $(DEPEND_LIST) ; do \
-		test -z "$$target" && continue ; \
-		$(MAKE) -C ../../$$target DESTIMG=$(DESTIMG) $@ ; \
-	done
-	@$(foreach IMG,$(filter-out $(DESTIMG),$(IMGDEPS)),for dep in "" $($(IMG)_DEPENDS); do test -z "$$dep" && continue ; $(MAKE) -C ../../$$dep DESTIMG=$(IMG) $@; done; )
-
-
-# ========================= MAIN RULES ========================= 
-# The main rules are the ones that the user can specify as a
-# target on the "make" command-line.  Currently, they are:
-#	prereq fetch-list fetch checksum makesum extract checkpatch patch
-#	build install reinstall uninstall package
-# (some may not be complete yet).
-#
-# Each of these rules has dependencies that run in the following
-# order:
-# 	- run the previous main rule in the chain (e.g., install
-# 	  depends on build)
-#	- run the pre- rule for the target (e.g., configure would
-#	  then run pre-configure)
-#	- generate a set of files to depend on.  These are typically
-#	  cookie files in $(COOKIEDIR), but in the case of fetch are
-#	  actual downloaded files in $(DOWNLOADDIR)
-# 	- run the post- rule for the target
-# 
-# The main rules also run the $(DONADA) code, which prints out
-# what just happened when all the dependencies are finished.
-
-announce:
-	@echo "[===== NOW BUILDING: $(DISTNAME) =====]"
-
-announce-modulation:
-	@echo "[===== NOW BUILDING: $(DISTNAME) MODULATION $(MODULATION): $(foreach M,$(MODULATORS),$M=$($M)) =====]"
-
-# prerequisite	- Make sure that the system is in a sane state for building the package
-PREREQUISITE_TARGETS = $(addprefix prerequisitepkg-,$(PREREQUISITE_BASE_PKGS) $(PREREQUISITE_PKGS)) $(addprefix prerequisite-,$(PREREQUISITE_SCRIPTS))
-
-# Force to be called in global modulation
-prerequisite: $(if $(filter global,$(MODULATION)),announce pre-everything $(COOKIEDIR) $(DOWNLOADDIR) $(PARTIALDIR) $(addprefix dep-$(GARDIR)/,$(FETCHDEPS)) pre-prerequisite $(PREREQUISITE_TARGETS) post-prerequisite)
-	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global prerequisite)
-	$(DONADA)
-
-prerequisitepkg-%:
-	@echo " ==> Verifying for installed package $*: \c"
-	@(pkginfo -q $*; if [ $$? -eq 0 ]; then echo "installed"; else echo "MISSING"; exit 1; fi)
-	@$(MAKECOOKIE)
-
-# fetch-list	- Show list of files that would be retrieved by fetch.
-# NOTE: DOES NOT RUN pre-everything!
-fetch-list:
-	@echo "Distribution files: "
-	@$(foreach F,$(DISTFILES),echo "	$F";)
-	@echo "Patch files: "
-	@$(foreach P,$(PATCHFILES),echo "	$P";)
-	@$(foreach M,$(MODULATIONS),$(if $(PATCHFILES_$M),echo "  Modulation $M only: $(PATCHFILES_$M)";))
-	@echo "Dynamically generated scripts: "
-	@$(foreach D,$(DYNSCRIPTS),echo "	$D";)
-	@echo "Git Repos tracked: "
-	@$(foreach R,$(GIT_REPOS),echo "       $R";)
-
-# fetch			- Retrieves $(DISTFILES) (and $(PATCHFILES) if defined)
-#				  into $(DOWNLOADDIR) as necessary.
-FETCH_TARGETS =  $(addprefix $(DOWNLOADDIR)/,$(ALLFILES))
-
-fetch: prerequisite pre-fetch $(FETCH_TARGETS) post-fetch
-	@$(DONADA)
-
-# returns true if fetch has completed successfully, false
-# otherwise
-fetch-p:
-	@$(foreach COOKIEFILE,$(FETCH_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-# checksum		- Use $(CHECKSUMFILE) to ensure that your
-# 				  distfiles are valid.
-CHECKSUM_TARGETS = $(addprefix checksum-,$(filter-out $(_NOCHECKSUM) $(NOCHECKSUM),$(ALLFILES)))
-
-checksum: fetch $(COOKIEDIR) pre-checksum $(CHECKSUM_TARGETS) post-checksum
-	@$(DONADA)
-
-checksum-global: $(if $(filter global,$(MODULATION)),checksum)
-	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global checksum)
-	@$(DONADA)
-
-# The next rule handles the dependency from the modulated context to
-# the contextless checksumming. The rule is called when the cookie
-# to the global checksum is requested. If the global checksum has not run,
-# then run it. Otherwise it is silently accepted.
-checksum-modulated: checksum-global
-	@$(DONADA)
-
-# returns true if checksum has completed successfully, false
-# otherwise
-checksum-p:
-	@$(foreach COOKIEFILE,$(CHECKSUM_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-# makesum		- Generate distinfo (only do this for your own ports!).
-MAKESUM_TARGETS =  $(filter-out $(_NOCHECKSUM) $(NOCHECKSUM),$(ALLFILES))
-
-makesum: fetch $(addprefix $(DOWNLOADDIR)/,$(MAKESUM_TARGETS))
-	@if test "x$(MAKESUM_TARGETS)" != "x "; then \
-		(cd $(DOWNLOADDIR) && gmd5sum $(MAKESUM_TARGETS)) > $(CHECKSUM_FILE) ; \
-		echo "Checksums made for $(MAKESUM_TARGETS)" ; \
-		cat $(CHECKSUM_FILE) ; \
-	fi
-
-# I am always typing this by mistake
-makesums: makesum
-
-GARCHIVE_TARGETS =  $(addprefix $(GARCHIVEDIR)/,$(ALLFILES))
-
-garchive: checksum $(GARCHIVE_TARGETS) ;
-
-# extract		- Unpacks $(DISTFILES) into $(EXTRACTDIR) (patches are "zcatted" into the patch program)
-EXTRACT_TARGETS-global ?= $(foreach SPEC,$(SPKG_SPECS),$(filter $(SPEC).%,$(DISTFILES) $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))))
-EXTRACT_TARGETS = $(addprefix extract-archive-,$(filter-out $(NOEXTRACT),$(if $(EXTRACT_TARGETS-$(MODULATION)),$(EXTRACT_TARGETS-$(MODULATION)),$(DISTFILES) $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R))))))
-
-# We call an additional extract-modulated without resetting any variables so
-# a complete unpacked set goes to the global dir for packaging (like gspec)
-extract: checksum $(COOKIEDIR) pre-extract extract-modulated $(addprefix extract-,$(MODULATIONS)) post-extract
-	@$(DONADA)
-
-extract-global: $(if $(filter global,$(MODULATION)),extract-modulated)
-	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global extract)
-	@$(MAKECOOKIE)
-
-extract-modulated: checksum-modulated $(EXTRACTDIR) $(COOKIEDIR) \
-		$(addprefix dep-$(GARDIR)/,$(EXTRACTDEPS)) \
-		announce-modulation \
-		pre-extract-modulated pre-extract-$(MODULATION) $(EXTRACT_TARGETS) post-extract-$(MODULATION) post-extract-modulated
-	@$(DONADA)
-
-# returns true if extract has completed successfully, false
-# otherwise
-extract-p:
-	@$(foreach COOKIEFILE,$(EXTRACT_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-# checkpatch	- Do a "patch -C" instead of a "patch".  Note
-# 				  that it may give incorrect results if multiple
-# 				  patches deal with the same file.
-# TODO: actually write it!
-checkpatch: extract
-	@echo "$@ NOT IMPLEMENTED YET"
-
-# patch			- Apply any provided patches to the source.
-PATCH_TARGETS = $(addprefix patch-extract-,$(PATCHFILES) $(PATCHFILES_$(MODULATION)))
-
-patch: pre-patch $(addprefix patch-,$(MODULATIONS)) post-patch
-	@$(DONADA)
-
-patch-modulated: extract-modulated $(WORKSRC) pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) post-patch-$(MODULATION) post-patch-modulated
-	@$(DONADA)
-
-# returns true if patch has completed successfully, false
-# otherwise
-patch-p:
-	@$(foreach COOKIEFILE,$(PATCH_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-# makepatch		- Grab the upstream source and diff against $(WORKSRC).  Since
-# 				  diff returns 1 if there are differences, we remove the patch
-# 				  file on "success".  Goofy diff.
-makepatch: $(SCRATCHDIR) $(FILEDIR) $(FILEDIR)/gar-base.diff
-	$(DONADA)
-
-# XXX: Allow patching of pristine sources separate from ISA directories
-# XXX: Use makepatch on global/
-
-# this takes the changes you've made to a working directory,
-# distills them to a patch, updates the checksum file, and tries
-# out the build (assuming you've listed the gar-base.diff in your
-# PATCHFILES).  This is way undocumented.  -NickM
-beaujolais: makepatch makesum clean build
-	$(DONADA)
-
-update: makesum garchive clean
-
-# configure		- Runs either GNU configure, one or more local
-# 				  configure scripts or nothing, depending on
-# 				  what's available.
-CONFIGURE_TARGETS = $(addprefix configure-,$(CONFIGURE_SCRIPTS))
-
-# Limit dependencies to all but one category or to exclude one category
-ALL_CATEGORIES = apps cpan devel gnome lang lib net server utils extra
-ifneq ($(BUILD_CATEGORY),)
-NOBUILD_CATEGORY = $(filter-out $(BUILD_CATEGORY),$(ALL_CATEGORIES))
-endif
-
-DEPEND_LIST = $(filter-out $(addsuffix /%,$(NOBUILD_CATEGORY)),$(DEPENDS) $(LIBDEPS) $(BUILDDEPS))
-
-ifneq ($(SKIPDEPEND),1)
-CONFIGURE_DEPS = $(addprefix $(GARDIR)/,$(addsuffix /$(COOKIEDIR)/install,$(DEPEND_LIST)))
-CONFIGURE_IMGDEPS = $(addprefix imgdep-,$(filter-out $(DESTIMG),$(IMGDEPS)))
-#CONFIGURE_BUILDDEPS = $(addprefix $(GARDIR)/,$(addsuffix /$(COOKIEROOTDIR)/build.d/install,$(BUILDDEPS)))
-endif
-
-configure: pre-configure $(addprefix configure-,$(MODULATIONS)) post-configure
-	@$(DONADA)
-
-configure-modulated: verify-isa patch-modulated $(CONFIGURE_IMGDEPS) $(CONFIGURE_BUILDDEPS) $(CONFIGURE_DEPS) \
-		$(addprefix srcdep-$(GARDIR)/,$(SOURCEDEPS)) \
-		pre-configure-modulated pre-configure-$(MODULATION) $(CONFIGURE_TARGETS) post-configure-$(MODULATION) post-configure-modulated $(if $(STRIP_LIBTOOL),strip-libtool)
-	@$(DONADA)
-
-strip-libtool:
-	@echo '[===== Stripping Libtool =====]'
-	fixlibtool $(WORKSRC)
-	@$(MAKECOOKIE)
-
-.PHONY: reset-configure reset-configure-modulated
-reconfigure: reset-configure configure
-
-reset-configure: $(addprefix reset-configure-,$(MODULATIONS))
-	rm -f $(COOKIEDIR)/configure
-
-reset-configure-modulated:
-	rm -f $(addprefix $(COOKIEDIR)/,$(CONFIGURE_TARGETS))
-
-# returns true if configure has completed successfully, false
-# otherwise
-configure-p:
-	@$(foreach COOKIEFILE,$(CONFIGURE_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-# build			- Actually compile the sources.
-BUILD_TARGETS = $(addprefix build-,$(BUILD_CHECK_SCRIPTS)) $(addprefix build-,$(BUILD_SCRIPTS))
-
-build: pre-build $(addprefix build-,$(MODULATIONS)) post-build
-	$(DONADA)
-
-# Build for a specific architecture
-build-modulated-check:
-	$(if $(filter ERROR,$(ARCHFLAGS_$(GARCOMPILER)_$*)),                                            \
-		$(error Code for the architecture $* can not be produced with the compiler $(GARCOMPILER))      \
-	)
-
-build-modulated: verify-isa configure-modulated pre-build-modulated pre-build-$(MODULATION) $(BUILD_TARGETS) post-build-$(MODULATION) post-build-modulated
-	@$(MAKECOOKIE)
-
-# returns true if build has completed successfully, false
-# otherwise
-build-p:
-	@$(foreach COOKIEFILE,$(BUILD_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-TEST_TARGETS = $(addprefix test-,$(TEST_SCRIPTS))
-
-test: pre-test $(addprefix test-,$(MODULATIONS)) post-test
-	$(DONADA)
-
-test-modulated: build-modulated pre-test-modulated pre-test-$(MODULATION) $(TEST_TARGETS) post-test-$(MODULATION) post-test-modulated
-	$(DONADA)
-
-# strip - Strip executables
-ifneq ($(GARFLAVOR),DBG)
-POST_INSTALL_TARGETS := strip $(POST_INSTALL_TARGETS)
-endif
-
-strip:
-	@for target in $(STRIP_DIRS) $(DESTDIR)$(bindir) $(DESTDIR)$(sbindir) ; \
-	do \
-		stripbin $$target ; \
-	done
-	$(DONADA)
-
-# fixconfig - Remove build-time paths config files
-POST_INSTALL_TARGETS := fixconfig $(POST_INSTALL_TARGETS)
-FIXCONFIG_DIRS    ?= $(DESTDIR)$(libdir) $(DESTDIR)$(bindir)
-FIXCONFIG_RMPATHS ?= $(DESTDIR) $(CURDIR)/$(WORKSRC)
-fixconfig:
-	@if test "x$(FIXCONFIG_DIRS)" != "x" ; then \
-		for path in $(FIXCONFIG_DIRS) ; do \
-			if test -d $$path ; then \
-				echo "  ==> fixconfig: $$path" ; \
-				replacer $$path $(FIXCONFIG_RMPATHS) ; \
-			fi ; \
-		done ; \
-	fi
-
-# install		- Test and install the results of a build.
-INSTALL_TARGETS = $(addprefix install-,$(INSTALL_SCRIPTS))
-
-install: pre-install $(addprefix install-,$(MODULATIONS)) post-install
-	$(DONADA)
-
-install-modulated: build-modulated $(addprefix dep-$(GARDIR)/,$(INSTALLDEPS)) test-modulated $(INSTALL_DIRS) $(PRE_INSTALL_TARGETS) pre-install-modulated pre-install-$(MODULATION) $(INSTALL_TARGETS) post-install-$(MODULATION) post-install-modulated $(POST_INSTALL_TARGETS) 
-	@$(MAKECOOKIE)
-
-# returns true if install has completed successfully, false
-# otherwise
-install-p:
-	@$(foreach COOKIEFILE,$(INSTALL_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
-
-
-
-# reinstall		- Install the results of a build, ignoring
-# 				  "already installed" flag.
-.PHONY: reinstall reset-install reset-install-modulated
-reinstall: reset-install install
-
-reset-install: reset-merge $(addprefix reset-install-,$(MODULATIONS))
-	@rm -f $(foreach M,$(MODULATIONS),$(COOKIEDIR)/install-$M) $(COOKIEDIR)/install $(COOKIEDIR)/post-install
-	@rm -f $(COOKIEDIR)/strip
-
-reset-install-modulated:
-	@$(call _pmod,Reset install state)
-	@rm -rf $(INSTALLISADIR) $(COOKIEDIR)/install-work
-	@rm -f $(foreach C,pre-install-modulated install-modulated post-install-modulated,$(COOKIEDIR)/$C)
-	@rm -f $(COOKIEDIR)/pre-install-$(MODULATION) $(COOKIEDIR)/post-install-$(MODULATION)
-	@rm -f $(COOKIEDIR)/strip
-	@rm -f $(foreach S,$(INSTALL_TARGETS),$(COOKIEDIR)/$S)
-	@rm -f $(COOKIEROOTDIR)/global/install-$(MODULATION)
-
-# merge in all isas to the package directory after installation
-
-# Merging in general allows the selection of parts from different ISA builds into the package
-# Per default merging is done differently depending on
-# (a) if the sources are build for more than one ISA
-# (b) if the executables should be replaced by isaexec or not
-# 
-# - If there is only one ISA to build for everything is copied verbatim to PKGROOT.
-# - If there are builds for more than one ISA the destination differs depending on if
-#   the binaries should be executed by isaexec. This is usually bin, sbin and libexec.
-#
-# default:        relocate to ISA subdirs if more than one ISA, use isaexec-wrapper for bin/, etc.
-# NO_ISAEXEC = 1: ISA_DEFAULT gets installed in bin/..., all others in bin/$ISA/
-#
-# Automatic merging is only possible if you have the default modulation "ISA"
-# Otherwise you *must* specify merge scripts for all modulations.
-
-ifeq ($(DEBUG_MERGING),)
-_DBG_MERGE=@
-else
-_DBG_MERGE=
-endif
-
-ifeq ($(NEEDED_ISAS),$(ISA_DEFAULT))
-MERGE_SCRIPTS_isa-$(ISA_DEFAULT) ?= copy-all $(EXTRA_MERGE_SCRIPTS_$(ISA_DEFAULT)) $(EXTRA_MERGE_SCRIPTS)
-else
-ISAEXEC_DIRS ?= $(if $(NO_ISAEXEC),,$(bindir) $(sbindir) $(libexecdir))
-MERGE_DIRS_isa-$(ISA_DEFAULT) ?= $(EXTRA_MERGE_DIRS) $(EXTRA_MERGE_DIRS_isa-$(ISA_DEFAULT))
-MERGE_DIRS_isa-$(ISA) ?= $(bindir) $(sbindir) $(libexecdir) $(libdir) $(EXTRA_MERGE_DIRS) $(EXTRA_MERGE_DIRS_isa-$(ISA))
-MERGE_SCRIPTS_isa-$(ISA_DEFAULT) ?= copy-relocate $(EXTRA_MERGE_SCRIPTS_isa-$(ISA_DEFAULT)) $(EXTRA_MERGE_SCRIPTS)
-MERGE_SCRIPTS_isa-$(ISA) ?= copy-relocated-only $(EXTRA_MERGE_SCRIPTS_isa-$(ISA)) $(EXTRA_MERGE_SCRIPTS)
-endif
-
-# These directories get relocated into their ISA subdirectories
-MERGE_DIRS ?= $(MERGE_DIRS_$(MODULATION))
-
-# The files in ISAEXEC get relocated and will be replaced by the isaexec-wrapper
-_ISAEXEC_EXCLUDE_FILES = $(bindir)/%-config $(bindir)/%/%-config
-_ISAEXEC_FILES = $(filter-out $(foreach F,$(_ISAEXEC_EXCLUDE_FILES) $(ISAEXEC_EXCLUDE_FILES),$(PKGROOT)$(F)), \
-			$(wildcard $(foreach D,$(ISAEXEC_DIRS),$(PKGROOT)$(D)/* )) \
-		)
-ISAEXEC_FILES ?= $(if $(_ISAEXEC_FILES),$(patsubst $(PKGROOT)%,%,		\
-	$(shell for F in $(_ISAEXEC_FILES); do		\
-		if test -f "$$F" -a \! -h "$$F"; then echo $$F; fi;	\
-	done)),)
-
-ifneq ($(ISAEXEC_FILES),)
-_EXTRA_GAR_PKGS += CSWisaexec
-endif
-
-# These merge-rules are actually processed for the current modulation
-MERGE_TARGETS ?= $(addprefix merge-,$(MERGE_SCRIPTS_$(MODULATION))) $(EXTRA_MERGE_TARGETS)
-
-# Include only these files
-ifeq ($(origin MERGE_INCLUDE_FILES_$(MODULATION)), undefined)
-_MERGE_INCLUDE_FILES = $(MERGE_INCLUDE_FILES)
-else
-_MERGE_INCLUDE_FILES = $(MERGE_INCLUDE_FILES_$(MODULATION))
-endif
-_MERGE_INCLUDE_FILES += $(EXTRA_MERGE_INCLUDE_FILES) $(EXTRA_MERGE_INCLUDE_FILES_$(MODULATION))
-
-# This can be defined in category.mk
-MERGE_EXCLUDE_CATEGORY ?= $(_MERGE_EXCLUDE_CATEGORY)
-
-# Support for cswpycompile, skip pre-compiled python files (.pyc, .pyo)
-# during the merge phase.
-_PYCOMPILE_FILES = /opt/csw/lib/python/site-packages/.*\.py
-MERGE_EXCLUDE_PYCOMPILE ?= $(if $(PYCOMPILE), $(addsuffix c,$(_PYCOMPILE_FILES)) $(addsuffix o,$(_PYCOMPILE_FILES)))
-
-MERGE_EXCLUDE_INFODIR ?= $(sharedstatedir)/info/dir
-MERGE_EXCLUDE_LIBTOOL ?= $(libdir)/.*\.la
-MERGE_EXCLUDE_BACKUPFILES ?= .*\~
-MERGE_EXCLUDE_STATICLIBS ?= $(libdir)/.*\.a
-# Exclude all other .pc-files apart from the default 32- and 64 bit versions
-MERGE_EXCLUDE_EXTRA_ISA_PKGCONFIG ?= $(if $(filter-out $(ISA_DEFAULT) $(ISA_DEFAULT64),$(ISA)),$(libdir)/.*\.pc)
-MERGE_EXCLUDE_DEFAULT ?= $(MERGE_EXCLUDE_CATEGORY) $(MERGE_EXCLUDE_INFODIR) $(MERGE_EXCLUDE_LIBTOOL) $(MERGE_EXCLUDE_BACKUPFILES) $(MERGE_EXCLUDE_STATICLIBS) $(MERGE_EXCLUDE_EXTRA_ISA_PKGCONFIG) $(MERGE_EXCLUDE_PYCOMPILE)
-
-# Exclude these files
-ifeq ($(origin MERGE_EXCLUDE_FILES_$(MODULATION)), undefined)
-_MERGE_EXCLUDE_FILES = $(MERGE_EXCLUDE_FILES)
-else
-_MERGE_EXCLUDE_FILES = $(MERGE_EXCLUDE_FILES_$(MODULATION))
-endif
-_MERGE_EXCLUDE_FILES += $(EXTRA_MERGE_EXCLUDE_FILES) $(EXTRA_MERGE_EXCLUDE_FILES_$(MODULATION)) $(MERGE_EXCLUDE_DEFAULT)
-
-# This variable contains parameter for pax to honor global file inclusion/exclusion
-# Exclude by replacing files with the empty string
-_INC_EXT_RULE = $(foreach F,$(_MERGE_EXCLUDE_FILES),-s ',^\.$F$$,,')
-# Replace files by itself terminating on first match
-_INC_EXT_RULE += $(foreach F,$(_MERGE_INCLUDE_FILES),-s ",^\(\.$F\)$$,\1,")
-
-# These are used during merge phase to determine the base installation directory
-MERGEBASE_$(bindir)     ?= $(bindir_install)
-MERGEBASE_$(sbindir)    ?= $(sbindir_install)
-MERGEBASE_$(libexecdir) ?= $(libexecdir_install)
-MERGEBASE_$(libdir)     ?= $(libdir_install)
-
-define mergebase
-$(if $(MERGEBASE_$(1)),$(MERGEBASE_$(1)),$(1))
-endef
-
-# A package is compiled for the pathes defined in $(bindir), $(libdir), etc.
-# These may not be the standard pathes, because specific ISA compilation
-# could have appended e. g. /64 for .pc-pathes to be correct. Anyway these
-# pathes may need to be rewritten e. g. from lib/64 to lib/amd64. Here,
-# $(libdir) has the memorymodel-directory appended, whereas $(libdir_install)
-# has not, so we use this one for appending.
-
-
-_PAX_ARGS = $(_INC_EXT_RULE) $(_EXTRA_PAX_ARGS) $(EXTRA_PAX_ARGS_$(MODULATION)) $(EXTRA_PAX_ARGS)
-
-define killprocandparent
-cpids() { \
-  P=$1 \
-  PPIDS=$P \
-  PP=`ps -eo pid,ppid | awk "BEGIN { ORS=\" \" } \\$2 == $P { print \\$1 }\"` \
-  while [ -n "$PP" ]; do \
-    PQ=$PP \
-    PP= \
-    for q in $PQ; do \
-      PPIDS="$PPIDS $q" \
-      PP=$PP\ `ps -eo pid,ppid | awk "BEGIN { ORS=\" \" } \\$2 == $q { print \\$1 }\"` \
-    done \
-  done \
- \
-  echo $PPIDS \
-}
-endef
-
-
-# The basic merge merges the compiles for all ISAs on the current architecture
-merge: checksum pre-merge merge-do merge-license $(if $(COMPILE_ELISP),compile-elisp) $(if $(NOSOURCEPACKAGE),,merge-src) post-merge
-	@$(DONADA)
-
-merge-do: $(if $(PARALLELMODULATIONS),merge-parallel,merge-sequential)
-
-merge-sequential: $(addprefix merge-,$(MODULATIONS))
-
-merge-parallel: _PIDFILE=$(WORKROOTDIR)/build-global/multitail.pid
-merge-parallel: merge-watch
-	$(_DBG_MERGE)trap "kill -9 `cat $(_PIDFILE) $(foreach M,$(MODULATIONS),$(WORKROOTDIR)/build-$M/build.pid) 2>/dev/null`;stty sane" INT;\
-		$(foreach M,$(MODULATIONS),($(MAKE) merge-$M >$(WORKROOTDIR)/build-$M/build.log 2>&1; echo $$? >$(WORKROOTDIR)/build-$M/build.ret) & echo $$! >$(WORKROOTDIR)/build-$M/build.pid; ) wait
-	$(_DBG_MERGE)if [ -f $(_PIDFILE) ]; then kill `cat $(_PIDFILE)`; stty sane; fi
-	$(_DBG_MERGE)$(foreach M,$(MODULATIONS),if [ "`cat $(WORKROOTDIR)/build-$M/build.ret`" -ne 0 ]; then \
-		echo "Build error in modulation $M. Please see"; \
-		echo "  $(WORKROOTDIR)/build-$M/build.log"; \
-		echo "for details:"; \
-		echo; \
-		tail -100 $(WORKROOTDIR)/build-$M/build.log; \
-		exit "Return code: `cat $(WORKROOTDIR)/build-$M/build.ret`"; \
-	fi;)
-
-merge-watch: _USEMULTITAIL=$(shell test -x $(MULTITAIL) && test -x $(TTY) && $(TTY) >/dev/null 2>&1; if [ $$? -eq 0 ]; then echo yes; fi)
-merge-watch: $(addprefix $(WORKROOTDIR)/build-,global $(MODULATIONS))
-	$(_DBG_MERGE)$(if $(_USEMULTITAIL),\
-		$(MULTITAIL) --retry-all $(foreach M,$(MODULATIONS),$(WORKROOTDIR)/build-$M/build.log) -j & echo $$! > $(WORKROOTDIR)/build-global/multitail.pid,\
-		echo "Building all ISAs in parallel. Please see the individual logfiles for details:";$(foreach M,$(MODULATIONS),echo "- $(WORKROOTDIR)/build-$M/build.log";)\
-	)
-
-
-# This merges the 
-merge-modulated: install-modulated pre-merge-modulated pre-merge-$(MODULATION) $(MERGE_TARGETS) post-merge-$(MODULATION) post-merge-modulated
-	@$(MAKECOOKIE)
-
-# Copy the whole tree verbatim
-merge-copy-all: $(PKGROOT) $(INSTALLISADIR)
-	$(_DBG_MERGE)(cd $(INSTALLISADIR); pax -r -w -v $(_PAX_ARGS) \
-		$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/,p") \
-		. $(PKGROOT))
-	@$(MAKECOOKIE)
-
-# Copy only the merge directories
-merge-copy-only: $(PKGROOT)
-	$(_DBG_MERGE)(cd $(INSTALLISADIR); pax -r -w -v $(_PAX_ARGS) \
-		$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/,p") -s ",.*,," \
-		. $(PKGROOT) \
-	)
-	@$(MAKECOOKIE)
-
-# Copy the whole tree and relocate the directories in $(MERGE_DIRS)
-merge-copy-relocate: $(PKGROOT) $(INSTALLISADIR)
-	$(_DBG_MERGE)(cd $(INSTALLISADIR); pax -r -w -v $(_PAX_ARGS) \
-		$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/$(ISA)/,p") \
-		. $(PKGROOT) \
-	)
-	@$(MAKECOOKIE)
-
-# Copy only the relocated directories
-merge-copy-relocated-only: $(PKGROOT) $(INSTALLISADIR)
-	$(_DBG_MERGE)(cd $(INSTALLISADIR); pax -r -w -v $(_PAX_ARGS) \
-		$(foreach DIR,$(MERGE_DIRS),-s ",^\(\.$(DIR)/\),.$(call mergebase,$(DIR))/$(ISA)/,p") -s ",.*,," \
-		 . $(PKGROOT) \
-	)
-	@$(MAKECOOKIE)
-
-# Copy 
-merge-copy-config-only:
-	$(_DBG_MERGE)(cd $(INSTALLISADIR); pax -r -w -v $(_PAX_ARGS) \
-		-s ",^\(\.$(bindir)/.*-config\)\$$,\1,p" \
-		-s ",.*,," \
-		. $(PKGROOT) \
-	)
-	@$(MAKECOOKIE)
-
-.PHONY: remerge reset-merge reset-merge-modulated
-remerge: reset-merge merge
-
-reset-merge: reset-package $(addprefix reset-merge-,$(MODULATIONS)) reset-merge-license reset-merge-src
-	@rm -f $(COOKIEDIR)/pre-merge $(foreach M,$(MODULATIONS),$(COOKIEDIR)/merge-$M) $(COOKIEDIR)/merge $(COOKIEDIR)/post-merge
-	@rm -rf $(PKGROOT)
-	@$(DONADA)
-
-reset-merge-modulated:
-	@$(call _pmod,Reset merge state)
-	@rm -f $(COOKIEDIR)/merge-*
-
-# The clean rule.  It must be run if you want to re-download a
-# file after a successful checksum (or just remove the checksum
-# cookie, but that would be lame and unportable).
-
-clean: $(addprefix clean-,$(MODULATIONS))
-	@rm -rf $(WORKROOTDIR) $(COOKIEROOTDIR) $(DOWNLOADDIR)
-
-clean-modulated:
-	$(call _pmod,Cleaning )
-	@rm -rf $(WORKSRC) $(EXTRACTDIR) \
-		   $(SCRATCHDIR) $(SCRATCHDIR)-$(COOKIEDIR) \
-		   $(SCRATCHDIR)-build $(SCRATCHDIR)-$(COOKIEROOTDIR) \
-		   $(LOGDIR) *~
-	@rm -rf $(COOKIEDIR)
-
-
-SRC_CLEAN_TARGET ?= clean
-clean-source:
-	@if test -d $(WORKSRC) ; then \
-		( $(MAKE) -C $(WORKSRC) $(SRC_CLEAN_TARGET) || true ) ; \
-	fi
-
-# Remove specified files/directories
-clean-dirs:
-	@for target in "" $(REMOVE_DIRS) ; do \
-		test -z "$$target" && continue ; \
-		rm -rf $$target ; \
-	done ; \
-
-# Clean an image
-imageclean:
-	@echo " ==> Removing $(COOKIEDIR)"
-	@-rm -rf $(COOKIEDIR)
-	@echo " ==> Removing $(WORKDIR)"
-	@-rm -rf $(WORKDIR)
-
-spotless: imageclean
-	@echo " ==> Removing $(DESTDIR)"
-	@-rm -rf work
-
-# Print package dependencies
-PKGDEP_LIST = $(filter-out $(BUILDDEPS),$(DEPEND_LIST))
-printdepends:
-	@for depend in "" $(PKGDEP_LIST) ; do \
-		test -z "$$depend" && continue ; \
-		echo "  $$depend" ; \
-		if test -n "$(DEPFILE)" ; then \
-			check_pkgdb -o $(DEPFILE) $$depend ; \
-		else \
-			check_pkgdb $$depend ; \
-		fi ; \
-	done
-
-# Update inter-package depends
-makedepend:
-	@for gspec in `gfind $(CURDIR) -type f -name '*.gspec' | ggrep files`; do \
-		pkgname=`basename $$gspec .gspec` ; \
-		pkgfiles=`dirname $$gspec` ; \
-		pkgdir=`dirname $$pkgfiles` ; \
-		pkgbuild=`basename $$pkgdir` ; \
-		pkgdep="$$pkgname.depend" ; \
-		echo " ==> $$pkgbuild ($$pkgname)" ; \
-		( cd $$pkgdir ; \
-		  rm -f /tmp/$$pkgdep ; \
-		  if test -f $$pkgfiles/$$pkgdep ; then \
-		  	cat $$pkgfiles/$$pkgdep > /tmp/$$pkgdep ; \
-		  fi ; \
-		  DEPFILE=/tmp/$$pkgdep $(MAKE) printdepends ; \
-		  if test -f /tmp/$$pkgdep ; then \
-		  	sort /tmp/$$pkgdep | uniq > $$pkgfiles/$$pkgname.depend ; \
-		  fi ) ; \
-	done
-
-buildstatus:
-
-love:
-	@echo "not war!"
-
-# these targets do not have actual corresponding files
-.PHONY: all fetch-list beaujolais fetch-p checksum-p extract-p patch-p configure-p build-p install-p package-p love
-
-# apparently this makes all previous rules non-parallelizable,
-# but the actual builds of the packages will be, according to
-# jdub.
-.NOTPARALLEL:

Copied: csw/mgar/gar/v2-collapsed-modulations/gar.mk (from rev 6854, csw/mgar/gar/v2/gar.mk)
===================================================================
--- csw/mgar/gar/v2-collapsed-modulations/gar.mk	                        (rev 0)
+++ csw/mgar/gar/v2-collapsed-modulations/gar.mk	2009-10-18 17:29:04 UTC (rev 6894)
@@ -0,0 +1,828 @@
+
+#
+# $Id$
+#
+# Copyright (C) 2001 Nick Moffitt
+# 
+# Redistribution and/or use, with or without modification, is
+# permitted.  This software is without warranty of any kind.  The
+# author(s) shall not be liable in the event that use of the
+# software causes damage.
+
+# Comment this out to make much verbosity
+#.SILENT:
+
+#ifeq ($(origin GARDIR), undefined)
+#GARDIR := $(CURDIR)/../..
+#endif
+
+#GARDIR ?= ../..
+#ifeq ($(origin GARDIR), undefined)
+#GARDIR := $(CURDIR)/../..
+#endif
+
+ifneq ($(abspath /),/)
+$(error Your version of 'make' is too old: $(MAKE_VERSION). Please make sure you are using at least 3.81)
+endif
+
+GARDIR ?= gar
+GARBIN  = $(GARDIR)/bin
+
+DIRSTODOTS = $(subst . /,./,$(patsubst %,/..,$(subst /, ,/$(1))))
+ROOTFROMDEST = $(call DIRSTODOTS,$(DESTDIR))
+MAKEPATH = $(shell echo $(1) | perl -lne 'print join(":", split)')
+TOLOWER = $(shell echo $(1) | tr '[A-Z]' '[a-z]')
+
+# If you call this the value is only evaluated the first time
+# Usage: $(call SETONCE,A,MyComplexVariableEvaluatedOnlyOnce)
+SETONCE = $(eval $(1) ?= $(2))
+
+#meant to take a git url and return just the $proj.git part
+GITPROJ = $(lastword $(subst /, ,$(1)))
+
+PARALLELMFLAGS ?= $(MFLAGS)
+export PARALLELMFLAGS
+
+DISTNAME ?= $(GARNAME)-$(GARVERSION)
+
+DYNSCRIPTS = $(foreach PKG,$(SPKG_SPECS),$(foreach SCR,$(ADMSCRIPTS),$(if $(value $(PKG)_$(SCR)), $(PKG).$(SCR))))
+_NOCHECKSUM += $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))
+
+# Allow overriding of only specific components of ALLFILES by clearing e. g. 'ALLFILES_DYNSCRIPTS = '
+ALLFILES_DISTFILES ?= $(DISTFILES)
+ALLFILES_PATCHFILES ?= $(PATCHFILES) $(foreach M,$(MODULATIONS),$(PATCHFILES_$M))
+ALLFILES_DYNSCRIPTS ?= $(DYNSCRIPTS)
+ALLFILES_GIT_REPOS ?= $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))
+ALLFILES ?= $(sort $(ALLFILES_DISTFILES) $(ALLFILES_PATCHFILES) $(ALLFILES_DYNSCRIPTS) $(ALLFILES_GIT_REPOS) $(EXTRA_ALLFILES))
+
+ifeq ($(MAKE_INSTALL_DIRS),1)
+INSTALL_DIRS = $(addprefix $(DESTDIR),$(prefix) $(exec_prefix) $(bindir) $(sbindir) $(libexecdir) $(datadir) $(sysconfdir) $(sharedstatedir) $(localstatedir) $(libdir) $(infodir) $(lispdir) $(includedir) $(mandir) $(foreach NUM,1 2 3 4 5 6 7 8, $(mandir)/man$(NUM)) $(sourcedir))
+else
+INSTALL_DIRS =
+endif
+
+# For rules that do nothing, display what dependencies they
+# successfully completed
+#DONADA = @echo "	[$@] complete.  Finished rules: $+"
+#DONADA = @touch $(COOKIEDIR)/$@; echo "	[$@] complete for $(GARNAME)."
+COOKIEFILE = $(COOKIEDIR)/$(patsubst $(COOKIEDIR)/%,%,$1)
+DONADA = @touch $(call COOKIEFILE,$@); echo "	[$@] complete for $(GARNAME)."
+
+
+# TODO: write a stub rule to print out the name of a rule when it
+# *does* do something, and handle indentation intelligently.
+
+# Default sequence for "all" is:  fetch checksum extract patch configure build
+all: build
+
+# include the configuration file to override any of these variables
+include $(GARDIR)/gar.conf.mk
+include $(GARDIR)/gar.lib.mk
+
+# ========================= MODULATIONS ======================== 
+
+# The default is to modulate over ISAs
+MODULATORS ?= ISA $(EXTRA_MODULATORS) $(EXTRA_MODULATORS_$(GARCH))
+MODULATIONS_ISA = $(NEEDED_ISAS)
+
+tolower = $(shell echo $(1) | tr '[A-Z]' '[a-z]')
+expand_modulator_1 = $(addprefix $(call tolower,$(1))-,$(MODULATIONS_$(1)))
+# This expands to the list of all modulators with their respective modulations
+modulations = $(if $(word 2,$(1)),\
+	$(foreach P,$(call expand_modulator_1,$(firstword $(1))),\
+		$(addprefix $(P)-,$(call modulations,$(wordlist 2,$(words $(1)),$(1))))\
+	),\
+	$(call expand_modulator_1,$(1)))
+
+MODULATIONS ?= $(filter-out $(SKIP_MODULATIONS),$(strip $(call modulations,$(strip $(MODULATORS)))))
+
+# _modulate(ISA STATIC,,,)
+# -> _modulate2(STATIC,isa-i386,ISA,ISA=i386)
+#    -> _modulate2(,,isa-i386-static-yes,ISA STATIC,ISA=i386 STATIC=yes)
+#       -> xxx-isa-i386-static-yes: @gmake xxx ISA=i386 STATIC=yes
+#    -> _modulate2(,,isa-i386-static-no,ISA STATIC,ISA=i386 STATIC=no)
+#       -> xxx-isa-i386-static-no: @gmake xxx ISA=i386 STATIC=no
+# -> _modulate2(STATIC,isa-amd64,ISA,ISA=amd64)
+#    -> _modulate2(,,isa-amd64-static-yes,ISA STATIC,ISA=amd64 STATIC=yes)
+#       -> xxx-isa-amd64-static-yes: @gmake xxx ISA=amd64 STATIC=yes
+#    -> _modulate2(,,isa-amd64-static-no,ISA STATIC,ISA=amd64 STATIC=no)
+#       -> xxx-isa-amd64-static-no: @gmake xxx ISA=amd64 STATIC=no
+
+define _modulate_target
+$(1)-$(2):
+	@$(MAKE) MODULATION=$(2) $(3) $(1)-modulated
+	@# This is MAKECOOKIE expanded to use the name of the rule explicily as the rule has
+	@# not been evaluated yet. XXX: Use function _MAKECOOKIE for both
+	@mkdir -p $(COOKIEDIR)/$(dir $(1)-$(2)) && date >> $(COOKIEDIR)/$(1)-$(2)
+	@# The next line has intentionally been left blank to explicitly terminate this make rule
+
+endef
+
+define _modulate_target_nocookie
+$(1)-$(2):
+	@$(MAKE) -s MODULATION=$(2) $(3) $(1)-modulated
+	@# The next line has intentionally been left blank to explicitly terminate this make rule
+
+endef
+
+define _modulate_merge
+$(foreach ASSIGNMENT,$(3),
+merge-$(2): $(ASSIGNMENT)
+)
+merge-$(2): BUILDHOST=$$(call modulation2host)
+merge-$(2):
+	@echo "[===== Building modulation '$(2)' on host '$$(BUILDHOST)' =====]"
+	$$(if $$(and $$(BUILDHOST),$$(filter-out $$(THISHOST),$$(BUILDHOST))),\
+		$(SSH) $$(BUILDHOST) "PATH=$$(PATH) $(MAKE) -C $$(CURDIR) $(if $(PLATFORM),PLATFORM=$(PLATFORM)) MODULATION=$(2) $(3) merge-modulated",\
+		$(MAKE) $(if $(PLATFORM),PLATFORM=$(PLATFORM)) MODULATION=$(2) $(3) merge-modulated\
+	)
+	@# The next line has intentionally been left blank to explicitly terminate this make rule
+
+endef
+
+define _modulate_do
+$(call _modulate_target,extract,$(2),$(4))
+$(call _modulate_target,patch,$(2),$(4))
+$(call _modulate_target,configure,$(2),$(4))
+$(call _modulate_target_nocookie,reset-configure,$(2),$(4))
+$(call _modulate_target,build,$(2),$(4))
+$(call _modulate_target_nocookie,reset-build,$(2),$(4))
+$(call _modulate_target,test,$(2),$(4))
+$(call _modulate_target,install,$(2),$(4))
+$(call _modulate_target_nocookie,reset-install,$(2),$(4))
+#$(call _modulate_target,merge,$(2),$(4))
+$(call _modulate_merge,,$(2),$(4))
+$(call _modulate_target_nocookie,reset-merge,$(2),$(4))
+$(call _modulate_target_nocookie,clean,$(2),$(4))
+$(call _modulate_target_nocookie,_modenv,$(2),$(4))
+endef
+
+# This evaluates to the make rules for all modulations passed as first argument
+# Usage: _modulate( <MODULATORS> )
+define _modulate
+$(foreach M,$(MODULATIONS_$(firstword $(1))),\
+	$(call _modulate2,\
+		$(wordlist 2,$(words $(1)),$(1)),\
+		$(call tolower,$(firstword $(1)))-$(M),\
+		$(firstword $(1)),\
+		$(firstword $(1))=$(M)\
+	)\
+)
+endef
+
+# This is a helper for the recursive _modulate
+define _modulate2
+$(if $(strip $(1)),\
+	$(foreach M,$(MODULATIONS_$(firstword $(1))),\
+		$(call _modulate2,\
+			$(wordlist 2,$(words $(1)),$(1)),\
+			$(addprefix $(2)-,$(call tolower,$(firstword $(1)))-$(M)),\
+			$(3) $(firstword $(1)),\
+			$(4) $(firstword $(1))=$(M)\
+		)\
+	),\
+	$(call _modulate_do,,$(strip $(2)),$(3),$(4))\
+)
+endef
+
+define _pmod
+	@echo "[ $1 for modulation $(MODULATION): $(foreach M,$(MODULATORS),$M=$($M)) ]"
+endef
+
+$(eval $(call _modulate,$(MODULATORS)))
+
+#################### DIRECTORY MAKERS ####################
+
+# This is to make dirs as needed by the base rules
+$(sort $(DOWNLOADDIR) $(PARTIALDIR) $(COOKIEDIR) $(WORKSRC) $(addprefix $(WORKROOTDIR)/build-,global $(MODULATIONS)) $(EXTRACTDIR) $(FILEDIR) $(SCRATCHDIR) $(PKGROOT) $(INSTALL_DIRS) $(INSTALLISADIR) $(GARCHIVEDIR) $(GARPKGDIR) $(STAGINGDIR)) $(COOKIEDIR)/%:
+	@if test -d $@; then : ; else \
+		ginstall -d $@; \
+		echo "ginstall -d $@"; \
+	fi
+
+# These stubs are wildcarded, so that the port maintainer can
+# define something like "pre-configure" and it won't conflict,
+# while the configure target can call "pre-configure" safely even
+# if the port maintainer hasn't defined it.
+# 
+# in addition to the pre-<target> rules, the maintainer may wish
+# to set a "pre-everything" rule, which runs before the first
+# actual target.
+pre-%:
+	@true
+
+post-%:
+	@true
+
+# Call any arbitrary rule recursively for all dependencies
+deep-%: %
+	@for target in "" $(DEPEND_LIST) ; do \
+		test -z "$$target" && continue ; \
+		$(MAKE) -C ../../$$target DESTIMG=$(DESTIMG) $@ ; \
+	done
+	@$(foreach IMG,$(filter-out $(DESTIMG),$(IMGDEPS)),for dep in "" $($(IMG)_DEPENDS); do test -z "$$dep" && continue ; $(MAKE) -C ../../$$dep DESTIMG=$(IMG) $@; done; )
+
+
+# ========================= MAIN RULES ========================= 
+# The main rules are the ones that the user can specify as a
+# target on the "make" command-line.  Currently, they are:
+#	prereq fetch-list fetch checksum makesum extract checkpatch patch
+#	build install reinstall uninstall package
+# (some may not be complete yet).
+#
+# Each of these rules has dependencies that run in the following
+# order:
+# 	- run the previous main rule in the chain (e.g., install
+# 	  depends on build)
+#	- run the pre- rule for the target (e.g., configure would
+#	  then run pre-configure)
+#	- generate a set of files to depend on.  These are typically
+#	  cookie files in $(COOKIEDIR), but in the case of fetch are
+#	  actual downloaded files in $(DOWNLOADDIR)
+# 	- run the post- rule for the target
+# 
+# The main rules also run the $(DONADA) code, which prints out
+# what just happened when all the dependencies are finished.
+
+announce:
+	@echo "[===== NOW BUILDING: $(DISTNAME) =====]"
+
+announce-modulation:
+	@echo "[===== NOW BUILDING: $(DISTNAME) MODULATION $(MODULATION): $(foreach M,$(MODULATORS),$M=$($M)) =====]"
+
+# prerequisite	- Make sure that the system is in a sane state for building the package
+PREREQUISITE_TARGETS = $(addprefix prerequisitepkg-,$(PREREQUISITE_BASE_PKGS) $(PREREQUISITE_PKGS)) $(addprefix prerequisite-,$(PREREQUISITE_SCRIPTS))
+
+# Force to be called in global modulation
+prerequisite: $(if $(filter global,$(MODULATION)),announce pre-everything $(COOKIEDIR) $(DOWNLOADDIR) $(PARTIALDIR) $(addprefix dep-$(GARDIR)/,$(FETCHDEPS)) pre-prerequisite $(PREREQUISITE_TARGETS) post-prerequisite)
+	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global prerequisite)
+	$(DONADA)
+
+prerequisitepkg-%:
+	@echo " ==> Verifying for installed package $*: \c"
+	@(pkginfo -q $*; if [ $$? -eq 0 ]; then echo "installed"; else echo "MISSING"; exit 1; fi)
+	@$(MAKECOOKIE)
+
+# fetch-list	- Show list of files that would be retrieved by fetch.
+# NOTE: DOES NOT RUN pre-everything!
+fetch-list:
+	@echo "Distribution files: "
+	@$(foreach F,$(DISTFILES),echo "	$F";)
+	@echo "Patch files: "
+	@$(foreach P,$(PATCHFILES),echo "	$P";)
+	@$(foreach M,$(MODULATIONS),$(if $(PATCHFILES_$M),echo "  Modulation $M only: $(PATCHFILES_$M)";))
+	@echo "Dynamically generated scripts: "
+	@$(foreach D,$(DYNSCRIPTS),echo "	$D";)
+	@echo "Git Repos tracked: "
+	@$(foreach R,$(GIT_REPOS),echo "       $R";)
+
+# fetch			- Retrieves $(DISTFILES) (and $(PATCHFILES) if defined)
+#				  into $(DOWNLOADDIR) as necessary.
+FETCH_TARGETS =  $(addprefix $(DOWNLOADDIR)/,$(ALLFILES))
+
+fetch: prerequisite pre-fetch $(FETCH_TARGETS) post-fetch
+	@$(DONADA)
+
+# returns true if fetch has completed successfully, false
+# otherwise
+fetch-p:
+	@$(foreach COOKIEFILE,$(FETCH_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
+
+# checksum		- Use $(CHECKSUMFILE) to ensure that your
+# 				  distfiles are valid.
+CHECKSUM_TARGETS = $(addprefix checksum-,$(filter-out $(_NOCHECKSUM) $(NOCHECKSUM),$(ALLFILES)))
+
+checksum: fetch $(COOKIEDIR) pre-checksum $(CHECKSUM_TARGETS) post-checksum
+	@$(DONADA)
+
+checksum-global: $(if $(filter global,$(MODULATION)),checksum)
+	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global checksum)
+	@$(DONADA)
+
+# The next rule handles the dependency from the modulated context to
+# the contextless checksumming. The rule is called when the cookie
+# to the global checksum is requested. If the global checksum has not run,
+# then run it. Otherwise it is silently accepted.
+checksum-modulated: checksum-global
+	@$(DONADA)
+
+# returns true if checksum has completed successfully, false
+# otherwise
+checksum-p:
+	@$(foreach COOKIEFILE,$(CHECKSUM_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
+
+# makesum		- Generate distinfo (only do this for your own ports!).
+MAKESUM_TARGETS =  $(filter-out $(_NOCHECKSUM) $(NOCHECKSUM),$(ALLFILES))
+
+makesum: fetch $(addprefix $(DOWNLOADDIR)/,$(MAKESUM_TARGETS))
+	@if test "x$(MAKESUM_TARGETS)" != "x "; then \
+		(cd $(DOWNLOADDIR) && gmd5sum $(MAKESUM_TARGETS)) > $(CHECKSUM_FILE) ; \
+		echo "Checksums made for $(MAKESUM_TARGETS)" ; \
+		cat $(CHECKSUM_FILE) ; \
+	fi
+
+# I am always typing this by mistake
+makesums: makesum
+
+GARCHIVE_TARGETS =  $(addprefix $(GARCHIVEDIR)/,$(ALLFILES))
+
+garchive: checksum $(GARCHIVE_TARGETS) ;
+
+# extract		- Unpacks $(DISTFILES) into $(EXTRACTDIR) (patches are "zcatted" into the patch program)
+EXTRACT_TARGETS-global ?= $(foreach SPEC,$(SPKG_SPECS),$(filter $(SPEC).%,$(DISTFILES) $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R)))))
+EXTRACT_TARGETS = $(addprefix extract-archive-,$(filter-out $(NOEXTRACT),$(if $(EXTRACT_TARGETS-$(MODULATION)),$(EXTRACT_TARGETS-$(MODULATION)),$(DISTFILES) $(DYNSCRIPTS) $(foreach R,$(GIT_REPOS),$(call GITPROJ,$(R))))))
+
+# We call an additional extract-modulated without resetting any variables so
+# a complete unpacked set goes to the global dir for packaging (like gspec)
+extract: checksum $(COOKIEDIR) pre-extract extract-modulated $(addprefix extract-,$(MODULATIONS)) post-extract
+	@$(DONADA)
+
+extract-global: $(if $(filter global,$(MODULATION)),extract-modulated)
+	$(if $(filter-out global,$(MODULATION)),$(MAKE) -s MODULATION=global extract)
+	@$(MAKECOOKIE)
+
+extract-modulated: checksum-modulated $(EXTRACTDIR) $(COOKIEDIR) \
+		$(addprefix dep-$(GARDIR)/,$(EXTRACTDEPS)) \
+		announce-modulation \
+		pre-extract-modulated pre-extract-$(MODULATION) $(EXTRACT_TARGETS) post-extract-$(MODULATION) post-extract-modulated
+	@$(DONADA)
+
+# returns true if extract has completed successfully, false
+# otherwise
+extract-p:
+	@$(foreach COOKIEFILE,$(EXTRACT_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
+
+# checkpatch	- Do a "patch -C" instead of a "patch".  Note
+# 				  that it may give incorrect results if multiple
+# 				  patches deal with the same file.
+# TODO: actually write it!
+checkpatch: extract
+	@echo "$@ NOT IMPLEMENTED YET"
+
+# patch			- Apply any provided patches to the source.
+PATCH_TARGETS = $(addprefix patch-extract-,$(PATCHFILES) $(PATCHFILES_$(MODULATION)))
+
+patch: pre-patch $(addprefix patch-,$(MODULATIONS)) post-patch
+	@$(DONADA)
+
+patch-modulated: extract-modulated $(WORKSRC) pre-patch-modulated pre-patch-$(MODULATION) $(PATCH_TARGETS) post-patch-$(MODULATION) post-patch-modulated
+	@$(DONADA)
+
+# returns true if patch has completed successfully, false
+# otherwise
+patch-p:
+	@$(foreach COOKIEFILE,$(PATCH_TARGETS), test -e $(COOKIEDIR)/$(COOKIEFILE) ;)
+
+# makepatch		- Grab the upstream source and diff against $(WORKSRC).  Since
+# 				  diff returns 1 if there are differences, we remove the patch
+# 				  file on "success".  Goofy diff.
+makepatch: $(SCRATCHDIR) $(FILEDIR) $(FILEDIR)/gar-base.diff
+	$(DONADA)
+
+# XXX: Allow patching of pristine sources separate from ISA directories
+# XXX: Use makepatch on global/
+
+# this takes the changes you've made to a working directory,
+# distills them to a patch, updates the checksum file, and tries
+# out the build (assuming you've listed the gar-base.diff in your
+# PATCHFILES).  This is way undocumented.  -NickM
+beaujolais: makepatch makesum clean build
+	$(DONADA)
+
+update: makesum garchive clean
+
+# configure		- Runs either GNU configure, one or more local
+# 				  configure scripts or nothing, depending on
+# 				  what's available.

@@ Diff output truncated at 100000 characters. @@

This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.



More information about the devel mailing list