source: trunk/base/src/port1.0/portfetch.tcl @ 50835

Last change on this file since 50835 was 50835, checked in by jmr@…, 11 years ago

indentation

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 26.4 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# portfetch.tcl
3# $Id: portfetch.tcl 50835 2009-05-10 20:55:43Z jmr@macports.org $
4#
5# Copyright (c) 2002 - 2003 Apple Computer, Inc.
6# All rights reserved.
7#
8# Redistribution and use in source and binary forms, with or without
9# modification, are permitted provided that the following conditions
10# are met:
11# 1. Redistributions of source code must retain the above copyright
12#    notice, this list of conditions and the following disclaimer.
13# 2. Redistributions in binary form must reproduce the above copyright
14#    notice, this list of conditions and the following disclaimer in the
15#    documentation and/or other materials provided with the distribution.
16# 3. Neither the name of Apple Computer, Inc. nor the names of its contributors
17#    may be used to endorse or promote products derived from this software
18#    without specific prior written permission.
19#
20# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
24# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30# POSSIBILITY OF SUCH DAMAGE.
31#
32
33package provide portfetch 1.0
34package require portutil 1.0
35package require Pextlib 1.0
36
37set org.macports.fetch [target_new org.macports.fetch portfetch::fetch_main]
38target_init ${org.macports.fetch} portfetch::fetch_init
39target_provides ${org.macports.fetch} fetch
40target_requires ${org.macports.fetch} main
41target_prerun ${org.macports.fetch} portfetch::fetch_start
42
43namespace eval portfetch {
44    namespace export suffix
45    variable fetch_urls {}
46}
47
48# Name space for internal site lists storage
49namespace eval portfetch::mirror_sites {
50    variable sites
51
52    array set sites {}
53}
54
55# define options: distname master_sites
56options master_sites patch_sites extract.suffix distfiles patchfiles use_zip use_bzip2 use_lzma use_7z use_dmg dist_subdir \
57    fetch.type fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert \
58    master_sites.mirror_subdir patch_sites.mirror_subdir portname \
59    cvs.module cvs.root cvs.password cvs.date cvs.tag cvs.method \
60    svn.url svn.tag svn.revision svn.method \
61    git.url git.branch \
62    hg.url hg.tag
63
64# XXX we use the command framework to buy us some useful features,
65# but this is not a user-modifiable command
66commands cvs
67commands svn
68
69# Defaults
70default extract.suffix .tar.gz
71default fetch.type standard
72
73default cvs.cmd {$portutil::autoconf::cvs_path}
74default cvs.password ""
75default cvs.dir {${workpath}}
76default cvs.method {export}
77default cvs.module {$distname}
78default cvs.tag ""
79default cvs.date ""
80default cvs.env {CVS_PASSFILE=${workpath}/.cvspass}
81default cvs.pre_args {"-z9 -f -d ${cvs.root}"}
82default cvs.args ""
83default cvs.post_args {"${cvs.module}"}
84
85default svn.cmd {$portutil::autoconf::svn_path}
86default svn.dir {${workpath}}
87default svn.method {export}
88default svn.tag ""
89default svn.revision ""
90default svn.env {}
91default svn.pre_args {"--non-interactive"}
92default svn.args ""
93default svn.post_args {"${svn.url}"}
94
95default git.dir {${workpath}}
96default git.branch {}
97
98default hg.dir {${workpath}}
99default hg.tag {tip}
100
101# Set distfiles
102default distfiles {[portfetch::suffix $distname]}
103default dist_subdir {${portname}}
104
105# user name & password
106default fetch.user ""
107default fetch.password ""
108# Use EPSV for FTP transfers
109default fetch.use_epsv "yes"
110# Ignore SSL certificate
111default fetch.ignore_sslcert "no"
112# Use remote timestamps
113default fetch.remote_time "no"
114
115default fallback_mirror_site "macports"
116default global_mirror_site "macports_distfiles"
117default mirror_sites.listfile {"mirror_sites.tcl"}
118default mirror_sites.listpath {"port1.0/fetch"}
119
120# Deprecation
121option_deprecate svn.tag svn.revision
122
123# Option-executed procedures
124option_proc use_bzip2 portfetch::set_extract_type
125option_proc use_lzma  portfetch::set_extract_type
126option_proc use_zip   portfetch::set_extract_type
127option_proc use_7z    portfetch::set_extract_type
128option_proc use_dmg   portfetch::set_extract_type
129
130option_proc fetch.type portfetch::set_fetch_type
131
132# We should probably add something like a depends_fetch; see #15161
133proc portfetch::set_extract_type {option action args} {
134    global extract.suffix
135    if {[string equal ${action} "set"] && [tbool args]} {
136        switch $option {
137            use_bzip2 {
138                set extract.suffix .tar.bz2
139            }
140            use_lzma {
141                set extract.suffix .tar.lzma
142                depends_build-append bin:lzma:lzmautils
143            }
144            use_zip {
145                set extract.suffix .zip
146                depends_build-append bin:unzip:unzip
147            }
148            use_7z {
149                set extract.suffix .7z
150                depends_build-append bin:7za:p7zip
151            }
152            use_dmg {
153                set extract.suffix .dmg
154            }
155        }
156    }
157}
158
159proc portfetch::set_fetch_type {option action args} {
160    if {[string equal ${action} "set"]} {
161        switch $args {
162            cvs {
163                depends_build-append bin:cvs:cvs
164            }
165            svn {
166                depends_build-append bin:svn:subversion
167            }
168            git {
169                depends_build-append bin:git:git-core
170            }
171            hg {
172                depends_build-append bin:hg:mercurial
173            }
174        }
175    }
176}
177
178set_ui_prefix
179
180
181# Given a distname, return a suffix based on the use_zip / use_bzip2 / use_dmg / extract.suffix options
182proc portfetch::suffix {distname} {
183    global extract.suffix fetch.type
184    switch -- "${fetch.type}" {
185        cvs         -
186        svn         -
187        git         -
188        hg          { return "" }
189        standard    -
190        default     { return "${distname}${extract.suffix}" }
191    }
192}
193# XXX import suffix into the global namespace as it is currently used from
194# Portfiles, but should better go somewhere else
195namespace import portfetch::suffix
196
197# Given a site url and the name of the distfile, assemble url and
198# return it.
199proc portfetch::assemble_url {site distfile} {
200    if {[string index $site end] != "/"} {
201        return "${site}/${distfile}"
202    } else {
203        return "${site}${distfile}"
204    }
205}
206
207# For a given mirror site type, e.g. "gnu" or "x11", check to see if there's a
208# pre-registered set of sites, and if so, return them.
209proc portfetch::mirror_sites {mirrors tag subdir} {
210    global UI_PREFIX portname porturl mirror_sites.listfile mirror_sites.listpath dist_subdir
211    global global_mirror_site fallback_mirror_site
212
213    set mirrorfile [getportresourcepath $porturl [file join ${mirror_sites.listpath} ${mirror_sites.listfile}]]
214    if {[file exists $mirrorfile]} {
215        source $mirrorfile
216    }
217
218    if {![info exists portfetch::mirror_sites::sites($mirrors)]} {
219        if {$mirrors != $global_mirror_site && $mirrors != $fallback_mirror_site} {
220            ui_warn "[format [msgcat::mc "No mirror sites on file for class %s"] $mirrors]"
221        }
222        return {}
223    }
224
225    set ret [list]
226    foreach element $portfetch::mirror_sites::sites($mirrors) {
227
228    # here we have the chance to take a look at tags, that possibly
229    # have been assigned in mirror_sites.tcl
230    set splitlist [split $element :]
231    # every element is a URL, so we'll always have multiple elements. no need to check
232    set element "[lindex $splitlist 0]:[lindex $splitlist 1]"
233    set mirror_tag "[lindex $splitlist 2]"
234
235    set name_re {\$(?:name\y|\{name\})}
236    # if the URL has $name embedded, kill any mirror_tag that may have been added
237    # since a mirror_tag and $name are incompatible
238    if {[regexp $name_re $element]} {
239        set mirror_tag ""
240    }
241
242    if {$mirror_tag == "mirror"} {
243        set thesubdir ${dist_subdir}
244    } elseif {$subdir == "" && $mirror_tag != "nosubdir"} {
245        set thesubdir ${portname}
246    } else {
247        set thesubdir ${subdir}
248    }
249
250    # parse an embedded $name. if present, remove the subdir
251    if {[regsub $name_re $element $thesubdir element] > 0} {
252        set thesubdir ""
253    }
254
255    if {"$tag" != ""} {
256        eval append element "${thesubdir}:${tag}"
257    } else {
258        eval append element "${thesubdir}"
259    }
260        eval lappend ret $element
261    }
262
263    return $ret
264}
265
266# Checks sites.
267# sites tags create variables in the portfetch:: namespace containing all sites
268# within that tag distfiles are added in $site $distfile format, where $site is
269# the name of a variable in the portfetch:: namespace containing a list of fetch
270# sites
271proc portfetch::checksites {args} {
272    global patch_sites master_sites master_sites.mirror_subdir \
273        patch_sites.mirror_subdir fallback_mirror_site global_mirror_site env
274
275    append master_sites " ${global_mirror_site} ${fallback_mirror_site}"
276    if {[info exists env(MASTER_SITE_LOCAL)]} {
277    set master_sites [concat $env(MASTER_SITE_LOCAL) $master_sites]
278    }
279
280    append patch_sites " ${global_mirror_site} ${fallback_mirror_site}"
281    if {[info exists env(PATCH_SITE_LOCAL)]} {
282    set patch_sites [concat $env(PATCH_SITE_LOCAL) $patch_sites]
283    }
284
285    foreach list {master_sites patch_sites} {
286        upvar #0 $list uplist
287        if {![info exists uplist]} {
288            continue
289        }
290
291        set site_list [list]
292        foreach site $uplist {
293            if {[regexp {([a-zA-Z]+://.+)} $site match site]} {
294                set site_list [concat $site_list $site]
295            } else {
296            set splitlist [split $site :]
297        if {[llength $splitlist] > 3 || [llength $splitlist] <1} {
298                    ui_error [format [msgcat::mc "Unable to process mirror sites for: %s, ignoring."] $site]
299        }
300        set mirrors "[lindex $splitlist 0]"
301        set subdir "[lindex $splitlist 1]"
302        set tag "[lindex $splitlist 2]"
303                if {[info exists $list.mirror_subdir]} {
304                    append subdir "[set ${list}.mirror_subdir]"
305                }
306                set site_list [concat $site_list [mirror_sites $mirrors $tag $subdir]]
307            }
308        }
309
310        # add in the global and fallback mirrors for each tag
311        foreach site $site_list {
312            if {[regexp {([a-zA-Z]+://.+/?):([0-9A-Za-z_-]+)$} $site match site tag]} {
313                if {![info exists extras_added($tag)]} {
314                    set site_list [concat $site_list [mirror_sites $global_mirror_site $tag ""] [mirror_sites $fallback_mirror_site $tag ""]]
315                    if {[string equal $list master_sites] && [info exists env(MASTER_SITE_LOCAL)]} {
316                        set site_list [concat [list $env(MASTER_SITE_LOCAL)] $site_list]
317                    } elseif {[string equal $list patch_sites] && [info exists env(PATCH_SITE_LOCAL)]} {
318                        set site_list [concat [list $env(PATCH_SITE_LOCAL)] $site_list]
319                    }
320                    set extras_added($tag) yes
321                }
322            }
323        }
324
325        foreach site $site_list {
326        if {[regexp {([a-zA-Z]+://.+/?):([0-9A-Za-z_-]+)$} $site match site tag]} {
327                lappend portfetch::$tag $site
328            } else {
329                lappend portfetch::$list $site
330            }
331        }
332    }
333}
334
335# Checks patch files and their tags to assemble url lists for later fetching
336proc portfetch::checkpatchfiles {args} {
337    global patchfiles all_dist_files patch_sites filespath
338    variable fetch_urls
339
340    if {[info exists patchfiles]} {
341        foreach file $patchfiles {
342            if {![file exists $filespath/$file]} {
343                set distsite [getdisttag $file]
344                set file [getdistname $file]
345                lappend all_dist_files $file
346                if {$distsite != ""} {
347                    lappend fetch_urls $distsite $file
348                } elseif {[info exists patch_sites]} {
349                    lappend fetch_urls patch_sites $file
350                } else {
351                    lappend fetch_urls master_sites $file
352                }
353            }
354        }
355    }
356}
357
358# Checks dist files and their tags to assemble url lists for later fetching
359proc portfetch::checkdistfiles {args} {
360    global distfiles all_dist_files master_sites filespath
361    variable fetch_urls
362
363    if {[info exists distfiles]} {
364        foreach file $distfiles {
365            if {![file exists $filespath/$file]} {
366                set distsite [getdisttag $file]
367                set file [getdistname $file]
368                lappend all_dist_files $file
369                if {$distsite != ""} {
370                    lappend fetch_urls $distsite $file
371                } else {
372                    lappend fetch_urls master_sites $file
373                }
374            }
375        }
376    }
377}
378
379# sorts fetch_urls in order of ping time
380proc portfetch::sortsites {args} {
381    global fallback_mirror_site
382    variable fetch_urls
383
384    set fallback_mirror_list [mirror_sites $fallback_mirror_site {} {}]
385
386    foreach {url_var distfile} $fetch_urls {
387        variable portfetch::$url_var
388        if {![info exists $url_var]} {
389            ui_error [format [msgcat::mc "No defined site for tag: %s, using master_sites"] $url_var]
390            set url_var master_sites
391            variable portfetch::$url_var
392        }
393        set urllist [set $url_var]
394        set hosts {}
395        set hostregex {[a-zA-Z]+://([a-zA-Z0-9\.\-_]+)}
396
397        if {[llength $urllist] - [llength $fallback_mirror_list] <= 1} {
398            # there is only one mirror, no need to ping or sort
399            continue
400        }
401
402        foreach site $urllist {
403            regexp $hostregex $site -> host
404
405            if { [info exists seen($host)] } {
406                continue
407            }
408            foreach fallback $fallback_mirror_list {
409                if {[string match [append fallback *] $site]} {
410                    # don't bother pinging fallback mirrors
411                    set seen($host) yes
412                    # and make them sort to the very end of the list
413                    set pingtimes($host) 20000
414                    break
415                }
416            }
417            if { ![info exists seen($host)] } {
418                if {[catch {set fds($host) [open "|ping -noq -c3 -t3 $host | grep round-trip | cut -d / -f 5"]}]} {
419                    ui_debug "Spawning ping for $host failed"
420                    # will end up after all hosts that were pinged OK but before those that didn't respond
421                    set pingtimes($host) 5000
422                } else {
423                    ui_debug "Pinging $host..."
424                    set seen($host) yes
425                    lappend hosts $host
426                }
427            }
428        }
429
430        foreach host $hosts {
431            set len [gets $fds($host) pingtimes($host)]
432            if { [catch { close $fds($host) }] || ![string is double -strict $pingtimes($host)] } {
433                # ping failed, so put it last in the list (but before the fallback mirrors)
434                set pingtimes($host) 10000
435            }
436            ui_debug "$host ping time is $pingtimes($host)"
437        }
438
439        set pinglist {}
440        foreach site $urllist {
441            regexp $hostregex $site -> host
442            lappend pinglist [ list $site $pingtimes($host) ]
443        }
444
445        set pinglist [ lsort -real -index 1 $pinglist ]
446
447        set $url_var {}
448        foreach pair $pinglist {
449            lappend $url_var [lindex $pair 0]
450        }
451    }
452}
453
454# Perform the full checksites/checkpatchfiles/checkdistfiles sequence.
455# This method is used by distcheck target.
456proc portfetch::checkfiles {args} {
457    variable fetch_urls
458
459    checksites
460    checkpatchfiles
461    checkdistfiles
462}
463
464
465# Perform a CVS login and fetch, storing the CVS login
466# information in a custom .cvspass file
467proc portfetch::cvsfetch {args} {
468    global workpath cvs.env cvs.cmd cvs.args cvs.post_args
469    global cvs.root cvs.date cvs.tag cvs.method cvs.password
470    global patch_sites patchfiles filespath
471
472    set cvs.args "${cvs.method} ${cvs.args}"
473    if {${cvs.method} == "export" && ![string length ${cvs.tag}] && ![string length ${cvs.date}]} {
474        set cvs.tag "HEAD"
475    }
476    if {[string length ${cvs.tag}]} {
477        set cvs.args "${cvs.args} -r ${cvs.tag}"
478    }
479
480    if {[string length ${cvs.date}]} {
481        set cvs.args "${cvs.args} -D ${cvs.date}"
482    }
483
484    if {[regexp ^:pserver: ${cvs.root}]} {
485        set savecmd ${cvs.cmd}
486        set saveargs ${cvs.args}
487        set savepost_args ${cvs.post_args}
488        set cvs.cmd "echo ${cvs.password} | $portutil::autoconf::cvs_path"
489        set cvs.args login
490        set cvs.post_args ""
491        if {[catch {command_exec cvs -notty "" "2>&1"} result]} {
492            return -code error [msgcat::mc "CVS login failed"]
493        }
494        set cvs.cmd ${savecmd}
495        set cvs.args ${saveargs}
496        set cvs.post_args ${savepost_args}
497    } else {
498        set env(CVS_RSH) ssh
499    }
500
501    if {[catch {command_exec cvs "" "2>&1"} result]} {
502        return -code error [msgcat::mc "CVS check out failed"]
503    }
504
505    if {[info exists patchfiles]} {
506        return [portfetch::fetchfiles]
507    }
508    return 0
509}
510
511# Perform an svn fetch
512proc portfetch::svnfetch {args} {
513    global workpath prefix_frozen
514    global svn.env svn.cmd svn.args svn.post_args svn.revision svn.url svn.method
515
516    # Look for the svn command, either in the path or in the prefix
517    set goodcmd 0
518    foreach svncmd "${svn.cmd} ${prefix_frozen}/bin/svn svn" {
519        if { [file executable ${svncmd}] } {
520            set svn.cmd $svncmd
521            set goodcmd 1
522            break;
523        }
524    }
525    if { !$goodcmd } {
526        ui_error "The subversion tool (svn) is required to fetch ${svn.url}."
527        ui_error "Please install the subversion port before proceeding."
528        return -code error [msgcat::mc "Subversion check out failed"]
529    }
530
531    set svn.args "${svn.method} ${svn.args}"
532    if {[string length ${svn.revision}]} {
533        set svn.args "${svn.args} -r ${svn.revision}"
534    }
535
536    if {[catch {command_exec svn "" "2>&1"} result]} {
537        return -code error [msgcat::mc "Subversion check out failed"]
538    }
539
540    if {[info exists patchfiles]} {
541        return [portfetch::fetchfiles]
542    }
543
544    return 0
545}
546
547# Perform a git fetch
548proc portfetch::gitfetch {args} {
549    global worksrcpath prefix_frozen
550    global git.url git.branch git.sha1
551
552    # Look for the git command
553    set git.cmd {}
554    foreach gitcmd "$portutil::autoconf::git_path $prefix_frozen/bin/git git" {
555        if {[file executable $gitcmd]} {
556            set git.cmd $gitcmd
557            break
558        }
559    }
560    if {${git.cmd} == {}} {
561        ui_error "git is required to fetch ${git.url}"
562        ui_error "Please install the git-core port before proceeding."
563        return -code error [msgcat::mc "Git command not found"]
564    }
565
566    set options "-q"
567    if {[string length ${git.branch}] == 0} {
568        # if we're just using HEAD, we can make a shallow repo
569        set options "$options --depth=1"
570    }
571    set cmdstring "${git.cmd} clone $options ${git.url} ${worksrcpath} 2>&1"
572    ui_debug "Executing: $cmdstring"
573    if {[catch {system $cmdstring} result]} {
574        return -code error [msgcat::mc "Git clone failed"]
575    }
576
577    if {[string length ${git.branch}] > 0} {
578        set env "GIT_DIR=${worksrcpath}/.git GIT_WORK_TREE=${worksrcpath}"
579        set cmdstring "$env ${git.cmd} checkout -q ${git.branch} 2>&1"
580        ui_debug "Executing $cmdstring"
581        if {[catch {system $cmdstring} result]} {
582            return -code error [msgcat::mc "Git checkout failed"]
583        }
584    }
585
586    if {[info exists patchfiles]} {
587        return [portfetch::fetchfiles]
588    }
589
590    return 0
591}
592
593# Perform a mercurial fetch.
594proc portfetch::hgfetch {args} {
595    global worksrcpath prefix_frozen
596    global hg.url hg.tag
597
598    # Look for the hg command.
599    set hg.cmd {}
600    foreach hgcmd "$prefix_frozen/bin/hg hg" {
601        if {[file executable $hgcmd]} {
602            set hg.cmd $hgcmd
603            break
604        }
605    }
606    if {${hg.cmd} == {}} {
607        ui_error "hg is required to fetch ${hg.url}"
608        ui_error "Please install the mercurial port before proceeding."
609        return -code error [msgcat::mc "Mercurial command not found"]
610    }
611
612    set cmdstring "${hg.cmd} clone --rev ${hg.tag} ${hg.url} ${worksrcpath} 2>&1"
613    ui_debug "Executing: $cmdstring"
614    if {[catch {system $cmdstring} result]} {
615        return -code error [msgcat::mc "Mercurial clone failed"]
616    }
617
618    if {[info exists patchfiles]} {
619        return [portfetch::fetchfiles]
620    }
621
622    return 0
623}
624
625# Perform a standard fetch, assembling fetch urls from
626# the listed url variable and associated distfile
627proc portfetch::fetchfiles {args} {
628    global distpath all_dist_files UI_PREFIX
629    global fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert fetch.remote_time
630    global distfile site
631    global portverbose
632    variable fetch_urls
633
634    if {![file isdirectory $distpath]} {
635        if {[catch {file mkdir $distpath} result]} {
636            return -code error [format [msgcat::mc "Unable to create distribution files path: %s"] $result]
637        }
638    }
639
640    set fetch_options {}
641    if {[string length ${fetch.user}] || [string length ${fetch.password}]} {
642        lappend fetch_options -u
643        lappend fetch_options "${fetch.user}:${fetch.password}"
644    }
645    if {${fetch.use_epsv} != "yes"} {
646        lappend fetch_options "--disable-epsv"
647    }
648    if {${fetch.ignore_sslcert} != "no"} {
649        lappend fetch_options "--ignore-ssl-cert"
650    }
651    if {${fetch.remote_time} != "no"} {
652        lappend fetch_options "--remote-time"
653    }
654    if {$portverbose == "yes"} {
655        lappend fetch_options "-v"
656    }
657    set sorted no
658
659    foreach {url_var distfile} $fetch_urls {
660        if {![file isfile $distpath/$distfile]} {
661            ui_info "$UI_PREFIX [format [msgcat::mc "%s doesn't seem to exist in %s"] $distfile $distpath]"
662            if {![file writable $distpath]} {
663                return -code error [format [msgcat::mc "%s must be writable"] $distpath]
664            }
665            if {!$sorted} {
666                sortsites
667                set sorted yes
668            }
669            variable portfetch::$url_var
670            if {![info exists $url_var]} {
671                ui_error [format [msgcat::mc "No defined site for tag: %s, using master_sites"] $url_var]
672                set url_var master_sites
673                variable portfetch::$url_var
674            }
675            unset -nocomplain fetched
676            foreach site [set $url_var] {
677                ui_msg "$UI_PREFIX [format [msgcat::mc "Attempting to fetch %s from %s"] $distfile $site]"
678                set file_url [portfetch::assemble_url $site $distfile]
679                set effectiveURL ""
680                if {![catch {eval curl fetch --effective-url effectiveURL $fetch_options {$file_url} ${distpath}/${distfile}.TMP} result] &&
681                    ![catch {file rename -force "${distpath}/${distfile}.TMP" "${distpath}/${distfile}"} result]} {
682
683                    # Special hack to check for sourceforge mirrors, which don't return a proper error code on failure
684                    if {![string equal $effectiveURL $file_url] &&
685                        [string match "*sourceforge*" $file_url] &&
686                        [string match "*failedmirror*" $effectiveURL]} {
687
688                        # *SourceForge hackage in effect*
689                        # The url seen by curl seems to have been a redirect to the sourceforge mirror page
690                        ui_debug "[msgcat::mc "Fetching from sourceforge mirror failed"]"
691                        file delete -force "${distpath}/${distfile}.TMP"
692
693                        # Continue on to try the next mirror, if any
694                    } else {
695
696                        # Successful fetch
697                        set fetched 1
698                        break
699
700                    }
701
702                } else {
703                    ui_debug "[msgcat::mc "Fetching failed:"]: $result"
704                    file delete -force "${distpath}/${distfile}.TMP"
705                }
706            }
707            if {![info exists fetched]} {
708                return -code error [msgcat::mc "fetch failed"]
709            }
710        }
711    }
712    return 0
713}
714
715# Utility function to delete fetched files.
716proc portfetch::fetch_deletefiles {args} {
717    global distpath
718    variable fetch_urls
719    foreach {url_var distfile} $fetch_urls {
720        if {[file isfile $distpath/$distfile]} {
721            file delete -force "${distpath}/${distfile}"
722        }
723    }
724}
725
726# Utility function to add files to a list of fetched files.
727proc portfetch::fetch_addfilestomap {filemapname} {
728    global distpath $filemapname
729    variable fetch_urls
730    foreach {url_var distfile} $fetch_urls {
731        if {[file isfile $distpath/$distfile]} {
732            filemap set $filemapname $distpath/$distfile 1
733        }
734    }
735}
736
737# Initialize fetch target and call checkfiles.
738proc portfetch::fetch_init {args} {
739    global distfiles distname distpath all_dist_files dist_subdir fetch.type fetch_init_done
740    global altprefix usealtworkpath
741
742    if {[info exists distpath] && [info exists dist_subdir] && ![info exists fetch_init_done]} {
743
744        # start gsoc08-privileges
745        if {[info exists usealtworkpath] && $usealtworkpath == "yes"} {
746            # I have removed ![file writable $distpath] from the if condition as
747            # the writable condition seems to get confused by effective uids.
748            set distpath "$altprefix/[ string range $distpath 1 end ]"
749            ui_debug "Going to use $distpath for fetch."
750        }
751        # end gsoc08-privileges
752        set distpath ${distpath}/${dist_subdir}
753        set fetch_init_done yes
754    }
755    portfetch::checkfiles
756}
757
758proc portfetch::fetch_start {args} {
759    global UI_PREFIX portname
760
761    ui_msg "$UI_PREFIX [format [msgcat::mc "Fetching %s"] $portname]"
762}
763
764# Main fetch routine
765# If all_dist_files is not populated and $fetch.type == standard, then
766# there are no files to download. Otherwise, either do a cvs checkout
767# or call the standard fetchfiles procedure
768proc portfetch::fetch_main {args} {
769    global distname distpath all_dist_files fetch.type
770
771    # Check for files, download if necessary
772    if {![info exists all_dist_files] && "${fetch.type}" == "standard"} {
773        return 0
774    }
775
776    # Fetch the files
777    switch -- "${fetch.type}" {
778        cvs     { return [cvsfetch] }
779        svn     { return [svnfetch] }
780        git     { return [gitfetch] }
781        hg      { return [hgfetch] }
782        standard -
783        default { return [portfetch::fetchfiles] }
784    }
785}
Note: See TracBrowser for help on using the repository browser.