gsrc-commit
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Gsrc-commit] trunk r7483: some fixes


From: Carl Hansen
Subject: [Gsrc-commit] trunk r7483: some fixes
Date: Sun, 11 Apr 2021 16:15:22 -0400 (EDT)
User-agent: Bazaar (2.7.0dev1)

------------------------------------------------------------
revno: 7483
revision-id: carlhansen@gnu.org-20210411201516-9e0mvxbwk0rkpozj
parent: carlhansen@gnu.org-20210411191211-dvb3xxvru37g1fxn
committer: Carl Hansen <carlhansen@gnu.org>
branch nick: trunk
timestamp: Sun 2021-04-11 13:15:16 -0700
message:
  some fixes
modified:
  gar/gar.lib.mk                 gar.lib.mk-20160617013741-ul4q527k85thcz4p-1
  gar/gar.master.mk              gar.master.mk-20160617013745-b969jpkm3t4wwhf2-1
  gar/gar.mk                     gar.mk-20160617013745-r4h3dj0kiuppihsx-1
=== modified file 'gar/gar.lib.mk'
--- a/gar/gar.lib.mk    2020-03-03 05:41:01 +0000
+++ b/gar/gar.lib.mk    2021-04-11 20:15:16 +0000
@@ -171,6 +171,41 @@
                (cd $(DISTNAME) && git checkout $(GIT_REVISION)) && \
                tar czvf ../$(notdir $*) $(DISTNAME)
 
+
+# Check out source from Git with https. , .tar.gz etc
+# main purpose is to save the download to DISTNAME rather that github goofy 
name
+# a rather annoying kludge
+
+githttps//%:
+       @echo githttp $*
+       wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial https://$*
+       mv $(DOWNLOADDIR)/$(notdir $*).partial  $(DOWNLOADDIR)/$(notdir $*) 
+       cp $(DOWNLOADDIR)/$(notdir $*)          
$(DOWNLOADDIR)/$(DISTNAME).tar.gz
+       @echo before ----------------ALLFILES      $(ALLFILES)
+       $(MAKE) $(subst    $(notdir $*) , $(DISTNAME).tar.gz, $(ALLFILES) )
+       @echo after ----------------ALLFILES      $(ALLFILES)
+       $(MAKE) garchve-$(DOWNLOADDIR)/$(DISTNAME).tar.gz
+
+
+## 
+## # download an http URL (colons omitted)
+## http//%:
+##     wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial http://$*
+##     mv $(DOWNLOADDIR)/$(notdir $*).partial $(DOWNLOADDIR)/$(notdir $*)
+## 
+## # download an https URL (colons omitted)
+## https//%:
+##     wget $(WGET_OPTS) -O $(DOWNLOADDIR)/$(notdir $*).partial https://$*
+##     mv $(DOWNLOADDIR)/$(notdir $*).partial $(DOWNLOADDIR)/$(notdir $*)
+## 
+## ---
+##     mkdir -p $(DOWNLOADDIR)/git
+##     cd $(DOWNLOADDIR)/git && \
+##             git clone $(GIT_PATH) $(DISTNAME) && \
+##             (cd $(DISTNAME) && git checkout $(GIT_REVISION)) && \
+##             tar czvf ../$(notdir $*) $(DISTNAME)
+## ---
+
 # Check out source from Mercurial.
 HG_REVISION ?= $(strip $(GARVERSION))
 HG_CLONE_OPTS ?= -r "$(HG_REVISION)"

=== modified file 'gar/gar.master.mk'
--- a/gar/gar.master.mk 2021-03-05 04:13:34 +0000
+++ b/gar/gar.master.mk 2021-04-11 20:15:16 +0000
@@ -105,6 +105,8 @@
        pserver:anonymous@cvs.savannah.gnu.org:/sources/
 MASTER_SAVANNAH_GIT ?= \
        http://git.savannah.nongnu.org/cgit/
+MASTER_SAVANNAH_GNU_GIT ?= \
+       https://git.savannah.gnu.org/git/config.git
 MASTER_SOURCEFORGE ?= \
        http://sourceforge.net/projects/
 MASTER_SOURCEFORGE_JP ?= \

=== modified file 'gar/gar.mk'
--- a/gar/gar.mk        2020-11-17 20:41:29 +0000
+++ b/gar/gar.mk        2021-04-11 20:15:16 +0000
@@ -79,13 +79,14 @@
 
 # This is to make dirs as needed by the base rules
 $(sort $(DOWNLOADDIR) $(COOKIEDIR) $(LOGDIR) $(WORKSRC) $(WORKDIR) 
$(EXTRACTDIR) $(FILEDIR) $(SCRATCHDIR)):
-       if test -d $@; then : ; else \
-               mkdir -p $@; \
+       if test -d "$@"; then : ; else \
+               mkdir -p "$@"; \
        fi
 
 $(COOKIEDIR)/%:
-       if test -d $@; then : ; else \
-               mkdir -p $@; \
+       echo ------ $@
+       if test -d "$@"; then : ; else \
+               mkdir -p "$@"; \
        fi
 
 
@@ -189,7 +190,7 @@
 
 # fetch                        - Retrieves $(DISTFILES) (and $(PATCHFILES) if 
defined)
 #                                into $(DOWNLOADDIR) as necessary.
-fetch: pre-everything custom-pre-everything $(COOKIEDIR) $(DOWNLOADDIR) 
$(addprefix dep-$(GARDIR)/,$(FETCHDEPS)) pre-fetch custom-pre-fetch $(addprefix 
$(DOWNLOADDIR)/,$(ALLFILES)) post-fetch custom-post-fetch
+fetch: pre-everything custom-pre-everything $(COOKIEDIR) $(DOWNLOADDIR) 
$(WORKDIR) $(addprefix dep-$(GARDIR)/,$(FETCHDEPS)) pre-fetch custom-pre-fetch 
$(addprefix $(DOWNLOADDIR)/,$(ALLFILES)) post-fetch custom-post-fetch
        $(DONADA)
 
 # returns true if fetch has completed successfully, false
@@ -398,7 +399,10 @@
 # cookie, but that would be lame and unportable).
 clean:
        @printf "[$(OK)clean$(OFF)] $(MSG)Removing working files and 
directories$(OFF)\n"
-       rm -rf $(DOWNLOADDIR) $(COOKIEDIR) $(LOGDIR) $(WORKSRC) $(WORKDIR) 
$(EXTRACTDIR) $(SCRATCHDIR) $(SCRATCHDIR)-$(COOKIEDIR) *~
+       rm --preserve-root=all -rf $(DOWNLOADDIR) $(COOKIEDIR) $(LOGDIR) 
$(WORKSRC) $(WORKDIR) $(EXTRACTDIR) $(SCRATCHDIR) $(SCRATCHDIR)-$(COOKIEDIR) *~
+
+# could be dangerous....
+
 
 # these targets do not have actual corresponding files
 .PHONY: all info fetch-list fetch checksum makesum garchive extract patch 
makepatch configure build test install clean beaujolais update help install-src 
uninstall-src


reply via email to

[Prev in Thread] Current Thread [Next in Thread]