[csw-devel] SF.net SVN: gar:[14199] csw/mgar/gar
skayser at users.sourceforge.net
skayser at users.sourceforge.net
Fri Apr 15 01:08:15 CEST 2011
Revision: 14199
http://gar.svn.sourceforge.net/gar/?rev=14199&view=rev
Author: skayser
Date: 2011-04-14 23:08:15 +0000 (Thu, 14 Apr 2011)
Log Message:
-----------
gar/v2: branch to refactor the fetch phase and avoid a sub-GAR invocation for every download attempt
Modified Paths:
--------------
csw/mgar/gar/v2-speedup-fetch/gar.conf.mk
csw/mgar/gar/v2-speedup-fetch/gar.lib.mk
Added Paths:
-----------
csw/mgar/gar/v2-speedup-fetch/
csw/mgar/gar/v2-speedup-fetch/bin/fetch_url
Added: csw/mgar/gar/v2-speedup-fetch/bin/fetch_url
===================================================================
--- csw/mgar/gar/v2-speedup-fetch/bin/fetch_url (rev 0)
+++ csw/mgar/gar/v2-speedup-fetch/bin/fetch_url 2011-04-14 23:08:15 UTC (rev 14199)
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+set -u
+set -e
+
+URL=$1
+TARGET_DIR=$2
+
+function fetch-rsync() {
+ local url=$1
+ rsync -azvLP $url $TARGET_DIR/
+}
+
+function fetch-scp() {
+ local url=$1
+ scp -C $url $TARGET_DIR/
+}
+
+function fetch-wget() {
+ local url=$1
+ wget ${WGET_OPTS:-} -T 30 -c -P $TARGET_DIR $url
+}
+
+function fetch-file() {
+ local file=$1
+ [ ! -f $file ] && return 1
+ gln -sf /$file $TARGET_DIR/`basename $file`
+}
+
+function fetch-svn() {
+ local url=$1
+ svn co ${SVNHTTP_CO_ARGS:-} $url $TARGET_DIR/`basename $url`
+}
+
+case $URL in
+ ftp://*) fetch-wget $URL;;
+ http://*) fetch-wget $URL;;
+ https://*) fetch-wget $URL;;
+ file://*) fetch-file `echo $URL | sed -e 's,file://,,'`;;
+ rsync://*) fetch-rsync $URL;;
+ svn-http://*) fetch-svn `echo $URL | sed -e 's,svn-,,'`;;
+ svn-https://*) fetch-svn `echo $URL | sed -e 's,svn-,,'`;;
+ scp://*) fetch-scp `echo $URL | sed -e 's,scp://,,'`;;
+ *) echo "ERROR: unsupported scheme" >&2; exit 1;;
+esac
+
+exit $?
Property changes on: csw/mgar/gar/v2-speedup-fetch/bin/fetch_url
___________________________________________________________________
Added: svn:executable
+ *
Modified: csw/mgar/gar/v2-speedup-fetch/gar.conf.mk
===================================================================
--- csw/mgar/gar/v2/gar.conf.mk 2011-04-13 11:11:44 UTC (rev 14158)
+++ csw/mgar/gar/v2-speedup-fetch/gar.conf.mk 2011-04-14 23:08:15 UTC (rev 14199)
@@ -704,11 +704,14 @@
COMMON_EXPORTS += LD_OPTIONS
endif
+_FETCH_EXPORTS = WGET_OPTS SVNHTTP_CO_ARGS
+FETCH_EXPORTS ?= $(COMMON_EXPORTS) $(EXTRA_FETCH_EXPORTS) $(_FETCH_EXPORTS)
CONFIGURE_EXPORTS ?= $(COMMON_EXPORTS) $(EXTRA_CONFIGURE_EXPORTS) PKG_CONFIG_PATH DESTDIR
BUILD_EXPORTS ?= $(COMMON_EXPORTS) $(EXTRA_BUILD_EXPORTS)
TEST_EXPORTS ?= $(COMMON_EXPORTS) $(EXTRA_TEST_EXPORTS)
INSTALL_EXPORTS ?= $(COMMON_EXPORTS) $(EXTRA_INSTALL_EXPORTS) DESTDIR
+FETCH_ENV ?= $(foreach TTT,$(FETCH_EXPORTS),$(TTT)="$($(TTT))")
CONFIGURE_ENV ?= $(foreach TTT,$(CONFIGURE_EXPORTS),$(TTT)="$($(TTT))")
BUILD_ENV ?= $(foreach TTT,$(BUILD_EXPORTS),$(TTT)="$($(TTT))")
TEST_ENV ?= $(foreach TTT,$(TEST_EXPORTS),$(TTT)="$($(TTT))")
Modified: csw/mgar/gar/v2-speedup-fetch/gar.lib.mk
===================================================================
--- csw/mgar/gar/v2/gar.lib.mk 2011-04-13 11:11:44 UTC (rev 14158)
+++ csw/mgar/gar/v2-speedup-fetch/gar.lib.mk 2011-04-14 23:08:15 UTC (rev 14199)
@@ -48,15 +48,16 @@
# 1) we have to strip the colon from the URLs
# 2) the download is very costly with bigger Makefiles as they will be
# re-evaluated for every URL (nested gmake invocation, room for improvement)
+$(DOWNLOADDIR)/%: FILE_URLS = $(filter %/$*, $(URLS))
$(DOWNLOADDIR)/%:
@if test -f $(COOKIEDIR)/checksum-$*; then : ; else \
echo " ==> Grabbing $@"; \
- ( for i in $(filter %/$*,$(URLS)); do \
- echo " ==> Trying $$i"; \
- $(MAKE) -s `echo $$i | tr -d :` || continue; \
+ for i in $(FILE_URLS); do \
+ echo " ==> Trying $$i"; \
+ $(FETCH_ENV) $(GARBIN)/fetch_url $$i $(PARTIALDIR) || continue; \
mv $(PARTIALDIR)/$* $@; \
break; \
- done; ) 2>&1 | grep -v '^$(MAKE)'; \
+ done; \
if test -r $@ ; then : ; else \
echo '(!!!) Failed to download $@!' 1>&2; \
false; \
@@ -109,53 +110,7 @@
dynscr//%:
$(MAKE) --no-print-directory -n _$@ > $(PARTIALDIR)/$*
-# download an http URL (colons omitted)
-http//%:
- @wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) http://$*
-https//%:
- @wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) https://$*
-
-# download an ftp URL (colons omitted)
-#ftp//%:
-# @wget -T 30 -c --passive-ftp -P $(PARTIALDIR) ftp://$*
-ftp//%:
- @wget $(WGET_OPTS) -T 30 -c -P $(PARTIALDIR) ftp://$*
-
-# link to a local copy of the file
-# (absolute path)
-file///%:
- @if test -f /$*; then \
- gln -sf /$* $(PARTIALDIR)/$(notdir $*); \
- else \
- false; \
- fi
-
-# link to a local copy of the file
-# (relative path)
-file//%:
- @if test -f $*; then \
- gln -sf "$(CURDIR)/$*" $(PARTIALDIR)/$(notdir $*); \
- else \
- false; \
- fi
-
-# Using Jeff Waugh's rsync rule.
-# DOES NOT PRESERVE SYMLINKS!
-rsync//%:
- @rsync -azvLP rsync://$* $(PARTIALDIR)/
-
-# Using Jeff Waugh's scp rule
-scp//%:
- @scp -C $* $(PARTIALDIR)/
-
-# Fetch a SVN repo via http
-svn-http//%:
- @svn co $(SVNHTTP_CO_ARGS) http://$* $(PARTIALDIR)/$(notdir $*)
-
-svn-https//%:
- @svn co $(SVNHTTP_CO_ARGS) https://$* $(PARTIALDIR)/$(notdir $*)
-
#################### CHECKSUM RULES ####################
# check a given file's checksum against $(CHECKSUM_FILE) and
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.
More information about the devel
mailing list