source: branches/gsoc09-logging/base/src/port1.0/portfetch.tcl @ 52218

Last change on this file since 52218 was 52218, checked in by enl@…, 11 years ago

Merge from trunk

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 25.2 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# portfetch.tcl
3# $Id: portfetch.tcl 52218 2009-06-12 08:57:53Z enl@macports.org $
4#
5# Copyright (c) 2002 - 2003 Apple Computer, Inc.
6# All rights reserved.
7#
8# Redistribution and use in source and binary forms, with or without
9# modification, are permitted provided that the following conditions
10# are met:
11# 1. Redistributions of source code must retain the above copyright
12#    notice, this list of conditions and the following disclaimer.
13# 2. Redistributions in binary form must reproduce the above copyright
14#    notice, this list of conditions and the following disclaimer in the
15#    documentation and/or other materials provided with the distribution.
16# 3. Neither the name of Apple Computer, Inc. nor the names of its contributors
17#    may be used to endorse or promote products derived from this software
18#    without specific prior written permission.
19#
20# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
24# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30# POSSIBILITY OF SUCH DAMAGE.
31#
32
33package provide portfetch 1.0
34package require portutil 1.0
35package require Pextlib 1.0
36
37set org.macports.fetch [target_new org.macports.fetch portfetch::fetch_main]
38target_init ${org.macports.fetch} portfetch::fetch_init
39target_provides ${org.macports.fetch} fetch
40target_requires ${org.macports.fetch} main
41target_prerun ${org.macports.fetch} portfetch::fetch_start
42
43namespace eval portfetch {
44    namespace export suffix
45    variable fetch_urls {}
46}
47
48# Name space for internal site lists storage
49namespace eval portfetch::mirror_sites {
50    variable sites
51
52    array set sites {}
53}
54
55# define options: distname master_sites
56options master_sites patch_sites extract.suffix distfiles patchfiles use_zip use_bzip2 use_lzma use_7z use_dmg dist_subdir \
57    fetch.type fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert \
58    master_sites.mirror_subdir patch_sites.mirror_subdir \
59    cvs.module cvs.root cvs.password cvs.date cvs.tag cvs.method \
60    svn.url svn.revision svn.method \
61    git.cmd git.url git.branch \
62    hg.cmd hg.url hg.tag
63
64# XXX we use the command framework to buy us some useful features,
65# but this is not a user-modifiable command
66commands cvs
67commands svn
68
69# Defaults
70default extract.suffix .tar.gz
71default fetch.type standard
72
73default cvs.cmd {[findBinary cvs $portutil::autoconf::cvs_path]}
74default cvs.password ""
75default cvs.dir {${workpath}}
76default cvs.method {export}
77default cvs.module {$distname}
78default cvs.tag ""
79default cvs.date ""
80default cvs.env {CVS_PASSFILE=${workpath}/.cvspass}
81default cvs.pre_args {"-z9 -f -d ${cvs.root}"}
82default cvs.args ""
83default cvs.post_args {"${cvs.module}"}
84
85default svn.cmd {[findBinary svn $portutil::autoconf::svn_path]}
86default svn.dir {${workpath}}
87default svn.method {export}
88default svn.revision ""
89default svn.env {}
90default svn.pre_args {"--non-interactive"}
91default svn.args ""
92default svn.post_args {"${svn.url}"}
93
94default git.cmd {[findBinary git $portutil::autoconf::git_path]}
95default git.dir {${workpath}}
96default git.branch {}
97
98default hg.cmd {[findBinary hg $portutil::autoconf::hg_path]}
99default hg.dir {${workpath}}
100default hg.tag {tip}
101
102# Set distfiles
103default distfiles {[portfetch::suffix $distname]}
104default dist_subdir {${name}}
105
106# user name & password
107default fetch.user ""
108default fetch.password ""
109# Use EPSV for FTP transfers
110default fetch.use_epsv "yes"
111# Ignore SSL certificate
112default fetch.ignore_sslcert "no"
113# Use remote timestamps
114default fetch.remote_time "no"
115
116default fallback_mirror_site "macports"
117default global_mirror_site "macports_distfiles"
118default mirror_sites.listfile {"mirror_sites.tcl"}
119default mirror_sites.listpath {"port1.0/fetch"}
120
121# Deprecation
122option_deprecate svn.tag svn.revision
123
124# Option-executed procedures
125option_proc use_bzip2 portfetch::set_extract_type
126option_proc use_lzma  portfetch::set_extract_type
127option_proc use_zip   portfetch::set_extract_type
128option_proc use_7z    portfetch::set_extract_type
129option_proc use_dmg   portfetch::set_extract_type
130
131option_proc fetch.type portfetch::set_fetch_type
132
133proc portfetch::set_extract_type {option action args} {
134    global extract.suffix
135    if {[string equal ${action} "set"] && [tbool args]} {
136        switch $option {
137            use_bzip2 {
138                set extract.suffix .tar.bz2
139            }
140            use_lzma {
141                set extract.suffix .tar.lzma
142                depends_extract-append bin:lzma:lzmautils
143            }
144            use_zip {
145                set extract.suffix .zip
146                depends_extract-append bin:unzip:unzip
147            }
148            use_7z {
149                set extract.suffix .7z
150                depends_extract-append bin:7za:p7zip
151            }
152            use_dmg {
153                set extract.suffix .dmg
154            }
155        }
156    }
157}
158
159proc portfetch::set_fetch_type {option action args} {
160    if {[string equal ${action} "set"]} {
161        switch $args {
162            cvs {
163                depends_fetch-append bin:cvs:cvs
164            }
165            svn {
166                depends_fetch-append bin:svn:subversion
167            }
168            git {
169                depends_fetch-append bin:git:git-core
170            }
171            hg {
172                depends_fetch-append bin:hg:mercurial
173            }
174        }
175    }
176}
177
178set_ui_prefix
179
180
181# Given a distname, return a suffix based on the use_zip / use_bzip2 / use_dmg / extract.suffix options
182proc portfetch::suffix {distname} {
183    global extract.suffix fetch.type
184    switch -- "${fetch.type}" {
185        cvs         -
186        svn         -
187        git         -
188        hg          { return "" }
189        standard    -
190        default     { return "${distname}${extract.suffix}" }
191    }
192}
193# XXX import suffix into the global namespace as it is currently used from
194# Portfiles, but should better go somewhere else
195namespace import portfetch::suffix
196
197# Given a site url and the name of the distfile, assemble url and
198# return it.
199proc portfetch::assemble_url {site distfile} {
200    if {[string index $site end] != "/"} {
201        return "${site}/${distfile}"
202    } else {
203        return "${site}${distfile}"
204    }
205}
206
207# For a given mirror site type, e.g. "gnu" or "x11", check to see if there's a
208# pre-registered set of sites, and if so, return them.
209proc portfetch::mirror_sites {mirrors tag subdir} {
210    global UI_PREFIX name porturl mirror_sites.listfile mirror_sites.listpath dist_subdir
211    global global_mirror_site fallback_mirror_site
212
213    set mirrorfile [getportresourcepath $porturl [file join ${mirror_sites.listpath} ${mirror_sites.listfile}]]
214    if {[file exists $mirrorfile]} {
215        source $mirrorfile
216    }
217
218    if {![info exists portfetch::mirror_sites::sites($mirrors)]} {
219        if {$mirrors != $global_mirror_site && $mirrors != $fallback_mirror_site} {
220            ui_warn "[format [msgcat::mc "No mirror sites on file for class %s"] $mirrors]"
221        }
222        return {}
223    }
224
225    set ret [list]
226    foreach element $portfetch::mirror_sites::sites($mirrors) {
227
228    # here we have the chance to take a look at tags, that possibly
229    # have been assigned in mirror_sites.tcl
230    set splitlist [split $element :]
231    # every element is a URL, so we'll always have multiple elements. no need to check
232    set element "[lindex $splitlist 0]:[lindex $splitlist 1]"
233    set mirror_tag "[lindex $splitlist 2]"
234
235    set name_re {\$(?:name\y|\{name\})}
236    # if the URL has $name embedded, kill any mirror_tag that may have been added
237    # since a mirror_tag and $name are incompatible
238    if {[regexp $name_re $element]} {
239        set mirror_tag ""
240    }
241
242    if {$mirror_tag == "mirror"} {
243        set thesubdir ${dist_subdir}
244    } elseif {$subdir == "" && $mirror_tag != "nosubdir"} {
245        set thesubdir ${name}
246    } else {
247        set thesubdir ${subdir}
248    }
249
250    # parse an embedded $name. if present, remove the subdir
251    if {[regsub $name_re $element $thesubdir element] > 0} {
252        set thesubdir ""
253    }
254
255    if {"$tag" != ""} {
256        eval append element "${thesubdir}:${tag}"
257    } else {
258        eval append element "${thesubdir}"
259    }
260        eval lappend ret $element
261    }
262
263    return $ret
264}
265
266# Checks sites.
267# sites tags create variables in the portfetch:: namespace containing all sites
268# within that tag distfiles are added in $site $distfile format, where $site is
269# the name of a variable in the portfetch:: namespace containing a list of fetch
270# sites
271proc portfetch::checksites {args} {
272    global patch_sites master_sites master_sites.mirror_subdir \
273        patch_sites.mirror_subdir fallback_mirror_site global_mirror_site env
274
275    append master_sites " ${global_mirror_site} ${fallback_mirror_site}"
276    if {[info exists env(MASTER_SITE_LOCAL)]} {
277    set master_sites [concat $env(MASTER_SITE_LOCAL) $master_sites]
278    }
279
280    append patch_sites " ${global_mirror_site} ${fallback_mirror_site}"
281    if {[info exists env(PATCH_SITE_LOCAL)]} {
282    set patch_sites [concat $env(PATCH_SITE_LOCAL) $patch_sites]
283    }
284
285    foreach list {master_sites patch_sites} {
286        upvar #0 $list uplist
287        if {![info exists uplist]} {
288            continue
289        }
290
291        set site_list [list]
292        foreach site $uplist {
293            if {[regexp {([a-zA-Z]+://.+)} $site match site]} {
294                set site_list [concat $site_list $site]
295            } else {
296            set splitlist [split $site :]
297        if {[llength $splitlist] > 3 || [llength $splitlist] <1} {
298                    ui_error [format [msgcat::mc "Unable to process mirror sites for: %s, ignoring."] $site]
299        }
300        set mirrors "[lindex $splitlist 0]"
301        set subdir "[lindex $splitlist 1]"
302        set tag "[lindex $splitlist 2]"
303                if {[info exists $list.mirror_subdir]} {
304                    append subdir "[set ${list}.mirror_subdir]"
305                }
306                set site_list [concat $site_list [mirror_sites $mirrors $tag $subdir]]
307            }
308        }
309
310        # add in the global and fallback mirrors for each tag
311        foreach site $site_list {
312            if {[regexp {([a-zA-Z]+://.+/?):([0-9A-Za-z_-]+)$} $site match site tag]} {
313                if {![info exists extras_added($tag)]} {
314                    set site_list [concat $site_list [mirror_sites $global_mirror_site $tag ""] [mirror_sites $fallback_mirror_site $tag ""]]
315                    if {[string equal $list master_sites] && [info exists env(MASTER_SITE_LOCAL)]} {
316                        set site_list [concat [list $env(MASTER_SITE_LOCAL)] $site_list]
317                    } elseif {[string equal $list patch_sites] && [info exists env(PATCH_SITE_LOCAL)]} {
318                        set site_list [concat [list $env(PATCH_SITE_LOCAL)] $site_list]
319                    }
320                    set extras_added($tag) yes
321                }
322            }
323        }
324
325        foreach site $site_list {
326        if {[regexp {([a-zA-Z]+://.+/?):([0-9A-Za-z_-]+)$} $site match site tag]} {
327                lappend portfetch::$tag $site
328            } else {
329                lappend portfetch::$list $site
330            }
331        }
332    }
333}
334
335# Checks patch files and their tags to assemble url lists for later fetching
336proc portfetch::checkpatchfiles {args} {
337    global patchfiles all_dist_files patch_sites filespath
338    variable fetch_urls
339
340    if {[info exists patchfiles]} {
341        foreach file $patchfiles {
342            if {![file exists $filespath/$file]} {
343                set distsite [getdisttag $file]
344                set file [getdistname $file]
345                lappend all_dist_files $file
346                if {$distsite != ""} {
347                    lappend fetch_urls $distsite $file
348                } elseif {[info exists patch_sites]} {
349                    lappend fetch_urls patch_sites $file
350                } else {
351                    lappend fetch_urls master_sites $file
352                }
353            }
354        }
355    }
356}
357
358# Checks dist files and their tags to assemble url lists for later fetching
359proc portfetch::checkdistfiles {args} {
360    global distfiles all_dist_files master_sites filespath
361    variable fetch_urls
362
363    if {[info exists distfiles]} {
364        foreach file $distfiles {
365            if {![file exists $filespath/$file]} {
366                set distsite [getdisttag $file]
367                set file [getdistname $file]
368                lappend all_dist_files $file
369                if {$distsite != ""} {
370                    lappend fetch_urls $distsite $file
371                } else {
372                    lappend fetch_urls master_sites $file
373                }
374            }
375        }
376    }
377}
378
379# sorts fetch_urls in order of ping time
380proc portfetch::sortsites {args} {
381    global fallback_mirror_site
382    variable fetch_urls
383
384    set fallback_mirror_list [mirror_sites $fallback_mirror_site {} {}]
385
386    foreach {url_var distfile} $fetch_urls {
387        variable portfetch::$url_var
388        if {![info exists $url_var]} {
389            ui_error [format [msgcat::mc "No defined site for tag: %s, using master_sites"] $url_var]
390            set url_var master_sites
391            variable portfetch::$url_var
392        }
393        set urllist [set $url_var]
394        set hosts {}
395        set hostregex {[a-zA-Z]+://([a-zA-Z0-9\.\-_]+)}
396
397        if {[llength $urllist] - [llength $fallback_mirror_list] <= 1} {
398            # there is only one mirror, no need to ping or sort
399            continue
400        }
401
402        foreach site $urllist {
403            regexp $hostregex $site -> host
404
405            if { [info exists seen($host)] } {
406                continue
407            }
408            foreach fallback $fallback_mirror_list {
409                if {[string match [append fallback *] $site]} {
410                    # don't bother pinging fallback mirrors
411                    set seen($host) yes
412                    # and make them sort to the very end of the list
413                    set pingtimes($host) 20000
414                    break
415                }
416            }
417            if { ![info exists seen($host)] } {
418                if {[catch {set fds($host) [open "|ping -noq -c3 -t3 $host | grep round-trip | cut -d / -f 5"]}]} {
419                    ui_debug "Spawning ping for $host failed"
420                    # will end up after all hosts that were pinged OK but before those that didn't respond
421                    set pingtimes($host) 5000
422                } else {
423                    ui_debug "Pinging $host..."
424                    set seen($host) yes
425                    lappend hosts $host
426                }
427            }
428        }
429
430        foreach host $hosts {
431            set len [gets $fds($host) pingtimes($host)]
432            if { [catch { close $fds($host) }] || ![string is double -strict $pingtimes($host)] } {
433                # ping failed, so put it last in the list (but before the fallback mirrors)
434                set pingtimes($host) 10000
435            }
436            ui_debug "$host ping time is $pingtimes($host)"
437        }
438
439        set pinglist {}
440        foreach site $urllist {
441            regexp $hostregex $site -> host
442            lappend pinglist [ list $site $pingtimes($host) ]
443        }
444
445        set pinglist [ lsort -real -index 1 $pinglist ]
446
447        set $url_var {}
448        foreach pair $pinglist {
449            lappend $url_var [lindex $pair 0]
450        }
451    }
452}
453
454# Perform the full checksites/checkpatchfiles/checkdistfiles sequence.
455# This method is used by distcheck target.
456proc portfetch::checkfiles {args} {
457    variable fetch_urls
458
459    checksites
460    checkpatchfiles
461    checkdistfiles
462}
463
464
465# Perform a CVS login and fetch, storing the CVS login
466# information in a custom .cvspass file
467proc portfetch::cvsfetch {args} {
468    global workpath cvs.env cvs.cmd cvs.args cvs.post_args
469    global cvs.root cvs.date cvs.tag cvs.method cvs.password
470    global patch_sites patchfiles filespath
471
472    set cvs.args "${cvs.method} ${cvs.args}"
473    if {${cvs.method} == "export" && ![string length ${cvs.tag}] && ![string length ${cvs.date}]} {
474        set cvs.tag "HEAD"
475    }
476    if {[string length ${cvs.tag}]} {
477        set cvs.args "${cvs.args} -r ${cvs.tag}"
478    }
479
480    if {[string length ${cvs.date}]} {
481        set cvs.args "${cvs.args} -D ${cvs.date}"
482    }
483
484    if {[regexp ^:pserver: ${cvs.root}]} {
485        set savecmd ${cvs.cmd}
486        set saveargs ${cvs.args}
487        set savepost_args ${cvs.post_args}
488        set cvs.cmd "echo ${cvs.password} | ${cvs.cmd}"
489        set cvs.args login
490        set cvs.post_args ""
491        if {[catch {command_exec cvs -notty "" "2>&1"} result]} {
492            return -code error [msgcat::mc "CVS login failed"]
493        }
494        set cvs.cmd ${savecmd}
495        set cvs.args ${saveargs}
496        set cvs.post_args ${savepost_args}
497    } else {
498        set env(CVS_RSH) ssh
499    }
500
501    if {[catch {command_exec cvs "" "2>&1"} result]} {
502        return -code error [msgcat::mc "CVS check out failed"]
503    }
504
505    if {[info exists patchfiles]} {
506        return [portfetch::fetchfiles]
507    }
508    return 0
509}
510
511# Perform an svn fetch
512proc portfetch::svnfetch {args} {
513    global svn.args svn.revision svn.method
514
515    set svn.args "${svn.method} ${svn.args}"
516    if {[string length ${svn.revision}]} {
517        set svn.args "${svn.args} -r ${svn.revision}"
518    }
519
520    if {[catch {command_exec svn "" "2>&1"} result]} {
521        return -code error [msgcat::mc "Subversion check out failed"]
522    }
523
524    if {[info exists patchfiles]} {
525        return [portfetch::fetchfiles]
526    }
527
528    return 0
529}
530
531# Perform a git fetch
532proc portfetch::gitfetch {args} {
533    global worksrcpath
534    global git.url git.branch git.sha1 git.cmd
535
536    set options "-q"
537    if {[string length ${git.branch}] == 0} {
538        # if we're just using HEAD, we can make a shallow repo
539        set options "$options --depth=1"
540    }
541    set cmdstring "${git.cmd} clone $options ${git.url} ${worksrcpath} 2>&1"
542    ui_debug "Executing: $cmdstring"
543    if {[catch {system $cmdstring} result]} {
544        return -code error [msgcat::mc "Git clone failed"]
545    }
546
547    if {[string length ${git.branch}] > 0} {
548        set env "GIT_DIR=${worksrcpath}/.git GIT_WORK_TREE=${worksrcpath}"
549        set cmdstring "$env ${git.cmd} checkout -q ${git.branch} 2>&1"
550        ui_debug "Executing $cmdstring"
551        if {[catch {system $cmdstring} result]} {
552            return -code error [msgcat::mc "Git checkout failed"]
553        }
554    }
555
556    if {[info exists patchfiles]} {
557        return [portfetch::fetchfiles]
558    }
559
560    return 0
561}
562
563# Perform a mercurial fetch.
564proc portfetch::hgfetch {args} {
565    global worksrcpath prefix_frozen
566    global hg.url hg.tag hg.cmd
567
568    set cmdstring "${hg.cmd} clone --rev ${hg.tag} ${hg.url} ${worksrcpath} 2>&1"
569    ui_debug "Executing: $cmdstring"
570    if {[catch {system $cmdstring} result]} {
571        return -code error [msgcat::mc "Mercurial clone failed"]
572    }
573
574    if {[info exists patchfiles]} {
575        return [portfetch::fetchfiles]
576    }
577
578    return 0
579}
580
581# Perform a standard fetch, assembling fetch urls from
582# the listed url variable and associated distfile
583proc portfetch::fetchfiles {args} {
584    global distpath all_dist_files UI_PREFIX
585    global fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert fetch.remote_time
586    global distfile site
587    global portverbose
588    variable fetch_urls
589
590    if {![file isdirectory $distpath]} {
591        if {[catch {file mkdir $distpath} result]} {
592            elevateToRoot "fetch"
593            set elevated yes
594            if {[catch {file mkdir $distpath} result]} {
595                return -code error [format [msgcat::mc "Unable to create distribution files path: %s"] $result]
596            }
597        }
598    }
599    chownAsRoot $distpath
600    if {[info exists elevated] && $elevated == yes} {
601        dropPrivileges
602    }
603
604    set fetch_options {}
605    if {[string length ${fetch.user}] || [string length ${fetch.password}]} {
606        lappend fetch_options -u
607        lappend fetch_options "${fetch.user}:${fetch.password}"
608    }
609    if {${fetch.use_epsv} != "yes"} {
610        lappend fetch_options "--disable-epsv"
611    }
612    if {${fetch.ignore_sslcert} != "no"} {
613        lappend fetch_options "--ignore-ssl-cert"
614    }
615    if {${fetch.remote_time} != "no"} {
616        lappend fetch_options "--remote-time"
617    }
618    if {$portverbose == "yes"} {
619        lappend fetch_options "-v"
620    }
621    set sorted no
622
623    foreach {url_var distfile} $fetch_urls {
624        if {![file isfile $distpath/$distfile]} {
625            ui_info "$UI_PREFIX [format [msgcat::mc "%s doesn't seem to exist in %s"] $distfile $distpath]"
626            if {![file writable $distpath]} {
627                return -code error [format [msgcat::mc "%s must be writable"] $distpath]
628            }
629            if {!$sorted} {
630                sortsites
631                set sorted yes
632            }
633            variable portfetch::$url_var
634            if {![info exists $url_var]} {
635                ui_error [format [msgcat::mc "No defined site for tag: %s, using master_sites"] $url_var]
636                set url_var master_sites
637                variable portfetch::$url_var
638            }
639            unset -nocomplain fetched
640            foreach site [set $url_var] {
641                ui_msg "$UI_PREFIX [format [msgcat::mc "Attempting to fetch %s from %s"] $distfile $site]"
642                set file_url [portfetch::assemble_url $site $distfile]
643                set effectiveURL ""
644                if {![catch {eval curl fetch --effective-url effectiveURL $fetch_options {$file_url} ${distpath}/${distfile}.TMP} result] &&
645                    ![catch {file rename -force "${distpath}/${distfile}.TMP" "${distpath}/${distfile}"} result]} {
646
647                    # Special hack to check for sourceforge mirrors, which don't return a proper error code on failure
648                    if {![string equal $effectiveURL $file_url] &&
649                        [string match "*sourceforge*" $file_url] &&
650                        [string match "*failedmirror*" $effectiveURL]} {
651
652                        # *SourceForge hackage in effect*
653                        # The url seen by curl seems to have been a redirect to the sourceforge mirror page
654                        ui_debug "[msgcat::mc "Fetching from sourceforge mirror failed"]"
655                        file delete -force "${distpath}/${distfile}.TMP"
656
657                        # Continue on to try the next mirror, if any
658                    } else {
659
660                        # Successful fetch
661                        set fetched 1
662                        break
663
664                    }
665
666                } else {
667                    ui_debug "[msgcat::mc "Fetching failed:"]: $result"
668                    file delete -force "${distpath}/${distfile}.TMP"
669                }
670            }
671            if {![info exists fetched]} {
672                return -code error [msgcat::mc "fetch failed"]
673            }
674        }
675    }
676    return 0
677}
678
679# Utility function to delete fetched files.
680proc portfetch::fetch_deletefiles {args} {
681    global distpath
682    variable fetch_urls
683    foreach {url_var distfile} $fetch_urls {
684        if {[file isfile $distpath/$distfile]} {
685            file delete -force "${distpath}/${distfile}"
686        }
687    }
688}
689
690# Utility function to add files to a list of fetched files.
691proc portfetch::fetch_addfilestomap {filemapname} {
692    global distpath $filemapname
693    variable fetch_urls
694    foreach {url_var distfile} $fetch_urls {
695        if {[file isfile $distpath/$distfile]} {
696            filemap set $filemapname $distpath/$distfile 1
697        }
698    }
699}
700
701# Initialize fetch target and call checkfiles.
702proc portfetch::fetch_init {args} {
703    global distfiles distname distpath all_dist_files dist_subdir fetch.type fetch_init_done
704    global altprefix usealtworkpath
705
706    if {[info exists distpath] && [info exists dist_subdir] && ![info exists fetch_init_done]} {
707
708        # start gsoc08-privileges
709        if {[info exists usealtworkpath] && $usealtworkpath == "yes"} {
710            # I have removed ![file writable $distpath] from the if condition as
711            # the writable condition seems to get confused by effective uids.
712            set distpath "$altprefix/[ string range $distpath 1 end ]"
713            ui_debug "Going to use $distpath for fetch."
714        }
715        # end gsoc08-privileges
716        set distpath ${distpath}/${dist_subdir}
717        set fetch_init_done yes
718    }
719    portfetch::checkfiles
720}
721
722proc portfetch::fetch_start {args} {
723    global UI_PREFIX name
724
725    ui_msg "$UI_PREFIX [format [msgcat::mc "Fetching %s"] $name]"
726}
727
728# Main fetch routine
729# If all_dist_files is not populated and $fetch.type == standard, then
730# there are no files to download. Otherwise, either do a cvs checkout
731# or call the standard fetchfiles procedure
732proc portfetch::fetch_main {args} {
733    global distname distpath all_dist_files fetch.type
734
735    # Check for files, download if necessary
736    if {![info exists all_dist_files] && "${fetch.type}" == "standard"} {
737        return 0
738    }
739
740    # Fetch the files
741    switch -- "${fetch.type}" {
742        cvs     { return [cvsfetch] }
743        svn     { return [svnfetch] }
744        git     { return [gitfetch] }
745        hg      { return [hgfetch] }
746        standard -
747        default { return [portfetch::fetchfiles] }
748    }
749}
Note: See TracBrowser for help on using the repository browser.