[csw-devel] SF.net SVN: gar:[9523] csw/mgar/gar/v2/gar.lib.mk
skayser at users.sourceforge.net
skayser at users.sourceforge.net
Sat Apr 3 11:13:12 CEST 2010
Revision: 9523
http://gar.svn.sourceforge.net/gar/?rev=9523&view=rev
Author: skayser
Date: 2010-04-03 09:13:11 +0000 (Sat, 03 Apr 2010)
Log Message:
-----------
gar: tweak file download process (display URLs with colon, filter make errors - GAR does error handling anyway)
Modified Paths:
--------------
csw/mgar/gar/v2/gar.lib.mk
Modified: csw/mgar/gar/v2/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2/gar.lib.mk 2010-04-03 02:25:09 UTC (rev 9522)
+++ csw/mgar/gar/v2/gar.lib.mk 2010-04-03 09:13:11 UTC (rev 9523)
@@ -27,28 +27,32 @@
#################### FETCH RULES ####################
-URLS = $(call URLSTRIP,$(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(ALLFILES_PATCHFILES))))
+URLS = $(foreach SITE,$(FILE_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(DISTFILES))) $(foreach SITE,$(FILE_SITES) $(PATCH_SITES) $(MASTER_SITES),$(addprefix $(SITE),$(ALLFILES_PATCHFILES)))
# if the caller has defined _postinstall, etc targets for a package, add
# these 'dynamic script' targets to our fetch list
-URLS += $(foreach DYN,$(DYNSCRIPTS),dynscr//$(DYN))
+URLS += $(foreach DYN,$(DYNSCRIPTS),dynscr://$(DYN))
ifdef GIT_REPOS
-URLS += $(foreach R,$(GIT_REPOS),gitrepo//$(call GITPROJ,$(R)) $(subst http,git-http,$(call URLSTRIP,$(R))))
+URLS += $(foreach R,$(GIT_REPOS),gitrepo://$(call GITPROJ,$(R)) $(subst http,git-http,$(call URLSTRIP,$(R))))
endif
# Download the file if and only if it doesn't have a preexisting
# checksum file. Loop through available URLs and stop when you
# get one that doesn't return an error code.
+# Note that GAR targets are used to download the URLs, thus:
+# 1) we have to strip the colon from the URLs
+# 2) the download is very costly with bigger Makefiles as they will be
+# re-evaluated for every URL (nested gmake invocation, room for improvement)
$(DOWNLOADDIR)/%:
@if test -f $(COOKIEDIR)/checksum-$*; then : ; else \
echo " ==> Grabbing $@"; \
- for i in $(filter %/$*,$(URLS)); do \
+ ( for i in $(filter %/$*,$(URLS)); do \
echo " ==> Trying $$i"; \
- $(MAKE) -s $$i || continue; \
+ $(MAKE) -s `echo $$i | tr -d :` || continue; \
mv $(PARTIALDIR)/$* $@; \
break; \
- done; \
+ done; ) 2>&1 | grep -v '^$(MAKE)'; \
if test -r $@ ; then : ; else \
echo '(!!!) Failed to download $@!' 1>&2; \
false; \
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
More information about the devel
mailing list