source: trunk/base/src/port1.0/portfetch.tcl

Last change on this file was 146644, checked in by raimue@…, 4 years ago

Clarify info message for non-existing files

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 21.9 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: portfetch.tcl 146644 2016-03-14 19:10:35Z raimue@macports.org $
3#
4# Copyright (c) 2004 - 2014 The MacPorts Project
5# Copyright (c) 2002 - 2003 Apple Inc.
6# All rights reserved.
7#
8# Redistribution and use in source and binary forms, with or without
9# modification, are permitted provided that the following conditions
10# are met:
11# 1. Redistributions of source code must retain the above copyright
12#    notice, this list of conditions and the following disclaimer.
13# 2. Redistributions in binary form must reproduce the above copyright
14#    notice, this list of conditions and the following disclaimer in the
15#    documentation and/or other materials provided with the distribution.
16# 3. Neither the name of Apple Inc. nor the names of its contributors
17#    may be used to endorse or promote products derived from this software
18#    without specific prior written permission.
19#
20# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
24# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30# POSSIBILITY OF SUCH DAMAGE.
31#
32
33package provide portfetch 1.0
34package require fetch_common 1.0
35package require portutil 1.0
36package require Pextlib 1.0
37
38set org.macports.fetch [target_new org.macports.fetch portfetch::fetch_main]
39target_init ${org.macports.fetch} portfetch::fetch_init
40target_provides ${org.macports.fetch} fetch
41target_requires ${org.macports.fetch} main
42target_prerun ${org.macports.fetch} portfetch::fetch_start
43
44namespace eval portfetch {
45    namespace export suffix
46    variable fetch_urls {}
47}
48
49# define options: distname master_sites
50options master_sites patch_sites extract.suffix distfiles patchfiles use_bzip2 use_lzma use_xz use_zip use_7z use_lzip use_dmg dist_subdir \
51    fetch.type fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert \
52    master_sites.mirror_subdir patch_sites.mirror_subdir \
53    bzr.url bzr.revision \
54    cvs.module cvs.root cvs.password cvs.date cvs.tag cvs.method \
55    svn.url svn.revision svn.method \
56    git.cmd git.url git.branch \
57    hg.cmd hg.url hg.tag
58
59# XXX we use the command framework to buy us some useful features,
60# but this is not a user-modifiable command
61commands bzr
62commands cvs
63commands svn
64
65# Defaults
66default extract.suffix .tar.gz
67default fetch.type standard
68
69default bzr.cmd {[findBinary bzr $portutil::autoconf::bzr_path]}
70default bzr.dir {${workpath}}
71default bzr.revision {-1}
72default bzr.pre_args {"--builtin --no-aliases checkout --lightweight"}
73default bzr.args ""
74default bzr.post_args {"-r ${bzr.revision} ${bzr.url} ${worksrcdir}"}
75
76default cvs.cmd {[findBinary cvs $portutil::autoconf::cvs_path]}
77default cvs.password ""
78default cvs.dir {${workpath}}
79default cvs.method {export}
80default cvs.module {$distname}
81default cvs.tag ""
82default cvs.date ""
83default cvs.env {CVS_PASSFILE=${workpath}/.cvspass}
84default cvs.pre_args {"-z9 -f -d ${cvs.root}"}
85default cvs.args ""
86default cvs.post_args {"${cvs.module}"}
87
88default svn.cmd {[portfetch::find_svn_path]}
89default svn.dir {${workpath}}
90default svn.method {export}
91default svn.revision ""
92default svn.env {}
93default svn.pre_args {"--non-interactive --trust-server-cert"}
94default svn.args ""
95default svn.post_args ""
96
97default git.cmd {[findBinary git $portutil::autoconf::git_path]}
98default git.dir {${workpath}}
99default git.branch {}
100
101default hg.cmd {[findBinary hg $portutil::autoconf::hg_path]}
102default hg.dir {${workpath}}
103default hg.tag {tip}
104
105# Set distfiles
106default distfiles {[list [portfetch::suffix $distname]]}
107default dist_subdir {${name}}
108
109# user name & password
110default fetch.user ""
111default fetch.password ""
112# Use EPSV for FTP transfers
113default fetch.use_epsv "yes"
114# Ignore SSL certificate
115default fetch.ignore_sslcert "no"
116# Use remote timestamps
117default fetch.remote_time "no"
118
119default fallback_mirror_site "macports"
120default global_mirror_site "macports_distfiles"
121default mirror_sites.listfile {"mirror_sites.tcl"}
122default mirror_sites.listpath {"port1.0/fetch"}
123
124# Option-executed procedures
125option_proc use_bzip2 portfetch::set_extract_type
126option_proc use_lzma  portfetch::set_extract_type
127option_proc use_xz    portfetch::set_extract_type
128option_proc use_zip   portfetch::set_extract_type
129option_proc use_7z    portfetch::set_extract_type
130option_proc use_lzip  portfetch::set_extract_type
131option_proc use_dmg   portfetch::set_extract_type
132
133option_proc fetch.type portfetch::set_fetch_type
134
135proc portfetch::set_extract_type {option action args} {
136    global extract.suffix
137    if {[string equal ${action} "set"] && [tbool args]} {
138        switch $option {
139            use_bzip2 {
140                set extract.suffix .tar.bz2
141                if {![catch {findBinary lbzip2} result]} {
142                    depends_extract-append bin:lbzip2:lbzip2
143                }
144            }
145            use_lzma {
146                set extract.suffix .tar.lzma
147                depends_extract-append bin:lzma:xz
148            }
149            use_xz {
150                set extract.suffix .tar.xz
151                depends_extract-append bin:xz:xz
152            }
153            use_zip {
154                set extract.suffix .zip
155                depends_extract-append bin:unzip:unzip
156            }
157            use_7z {
158                set extract.suffix .7z
159                depends_extract-append bin:7za:p7zip
160            }
161            use_lzip {
162                set extract.suffix .tar.lz
163                depends_extract-append bin:lzip:lzip
164            }
165            use_dmg {
166                set extract.suffix .dmg
167            }
168        }
169    }
170}
171
172proc portfetch::set_fetch_type {option action args} {
173    global os.platform os.major
174    if {[string equal ${action} "set"]} {
175        if {$args ne "standard"} {
176            distfiles
177        }
178        switch $args {
179            bzr {
180                depends_fetch-append bin:bzr:bzr
181            }
182            cvs {
183                depends_fetch-append bin:cvs:cvs
184            }
185            svn {
186                if {${os.major} >= 10 || ${os.platform} ne "darwin"} {
187                    depends_fetch-append bin:svn:subversion
188                } else {
189                    depends_fetch-append port:subversion
190                }
191            }
192            git {
193                depends_fetch-append bin:git:git
194            }
195            hg {
196                depends_fetch-append bin:hg:mercurial
197            }
198        }
199    }
200}
201
202proc portfetch::find_svn_path {args} {
203    global prefix os.platform os.major
204    # Snow Leopard is the first Mac OS X version to include a recent enough svn (1.6.x) to support the --trust-server-cert option.
205    if {${os.major} >= 10 || ${os.platform} ne "darwin"} {
206        return [findBinary svn $portutil::autoconf::svn_path]
207    } else {
208        return ${prefix}/bin/svn
209    }
210}
211
212set_ui_prefix
213
214
215# Given a distname, return the distname with extract.suffix appended
216proc portfetch::suffix {distname} {
217    global extract.suffix
218    return "${distname}${extract.suffix}"
219}
220# XXX import suffix into the global namespace as it is currently used from
221# Portfiles, but should better go somewhere else
222namespace import portfetch::suffix
223
224# Checks patch files and their tags to assemble url lists for later fetching
225proc portfetch::checkpatchfiles {urls} {
226    global patchfiles all_dist_files patch_sites filespath
227    upvar $urls fetch_urls
228
229    if {[info exists patchfiles]} {
230        foreach file $patchfiles {
231            if {![file exists "${filespath}/${file}"]} {
232                set distsite [getdisttag $file]
233                set file [getdistname $file]
234                lappend all_dist_files $file
235                if {$distsite ne ""} {
236                    lappend fetch_urls $distsite $file
237                } elseif {[info exists patch_sites]} {
238                    lappend fetch_urls patch_sites $file
239                } else {
240                    lappend fetch_urls master_sites $file
241                }
242            }
243        }
244    }
245}
246
247# Checks dist files and their tags to assemble url lists for later fetching
248proc portfetch::checkdistfiles {urls} {
249    global distfiles all_dist_files filespath
250    upvar $urls fetch_urls
251
252    if {[info exists distfiles]} {
253        foreach file $distfiles {
254            if {![file exists "${filespath}/${file}"]} {
255                set distsite [getdisttag $file]
256                set file [getdistname $file]
257                lappend all_dist_files $file
258                if {$distsite ne ""} {
259                    lappend fetch_urls $distsite $file
260                } else {
261                    lappend fetch_urls master_sites $file
262                }
263            }
264        }
265    }
266}
267
268# returns full path to mirror list file
269proc portfetch::get_full_mirror_sites_path {} {
270    global mirror_sites.listfile mirror_sites.listpath porturl
271    return [getportresourcepath $porturl [file join ${mirror_sites.listpath} ${mirror_sites.listfile}]]
272}
273
274# Perform the full checksites/checkpatchfiles/checkdistfiles sequence.
275# This method is used by distcheck target.
276proc portfetch::checkfiles {urls} {
277    global global_mirror_site fallback_mirror_site
278    upvar $urls fetch_urls
279
280    checksites [list patch_sites [list $global_mirror_site $fallback_mirror_site PATCH_SITE_LOCAL] \
281                master_sites [list $global_mirror_site $fallback_mirror_site MASTER_SITE_LOCAL]] \
282               [get_full_mirror_sites_path]
283    checkpatchfiles fetch_urls
284    checkdistfiles fetch_urls
285}
286
287# Perform a bzr fetch
288proc portfetch::bzrfetch {args} {
289    global env patchfiles
290
291    # Behind a proxy bzr will fail with the following error if proxies
292    # listed in macports.conf appear in the environment in their
293    # unmodified form:
294    #   bzr: ERROR: Invalid url supplied to transport:
295    #   "proxy.example.com:8080": No host component
296    # Set the "http_proxy" and "HTTPS_PROXY" environmental variables
297    # to valid URLs by prepending "http://" and appending "/".
298    if {   [info exists env(http_proxy)]
299        && [string compare -length 7 {http://} $env(http_proxy)] != 0} {
300        set orig_http_proxy $env(http_proxy)
301        set env(http_proxy) http://${orig_http_proxy}/
302    }
303
304    if {   [info exists env(HTTPS_PROXY)]
305        && [string compare -length 7 {http://} $env(HTTPS_PROXY)] != 0} {
306        set orig_https_proxy $env(HTTPS_PROXY)
307        set env(HTTPS_PROXY) http://${orig_https_proxy}/
308    }
309
310    try {
311        if {[catch {command_exec bzr "" "2>&1"} result]} {
312            return -code error [msgcat::mc "Bazaar checkout failed"]
313        }
314    } finally {
315        if ([info exists orig_http_proxy]) {
316            set env(http_proxy) ${orig_http_proxy}
317        }
318        if ([info exists orig_https_proxy]) {
319            set env(HTTPS_PROXY) ${orig_https_proxy}
320        }
321    }
322
323    if {[info exists patchfiles]} {
324        return [portfetch::fetchfiles]
325    }
326
327    return 0
328}
329
330# Perform a CVS login and fetch, storing the CVS login
331# information in a custom .cvspass file
332proc portfetch::cvsfetch {args} {
333    global workpath cvs.env cvs.cmd cvs.args cvs.post_args \
334           cvs.root cvs.date cvs.tag cvs.method cvs.password
335           patch_sites patchfiles filespath
336
337    set cvs.args "${cvs.method} ${cvs.args}"
338    if {${cvs.method} == "export" && ![string length ${cvs.tag}] && ![string length ${cvs.date}]} {
339        set cvs.tag "HEAD"
340    }
341    if {[string length ${cvs.tag}]} {
342        set cvs.args "${cvs.args} -r ${cvs.tag}"
343    }
344
345    if {[string length ${cvs.date}]} {
346        set cvs.args "${cvs.args} -D ${cvs.date}"
347    }
348
349    if {[regexp ^:pserver: ${cvs.root}]} {
350        set savecmd ${cvs.cmd}
351        set saveargs ${cvs.args}
352        set savepost_args ${cvs.post_args}
353        set cvs.cmd "echo ${cvs.password} | ${cvs.cmd}"
354        set cvs.args login
355        set cvs.post_args ""
356        if {[catch {command_exec cvs -notty "" "2>&1"} result]} {
357            return -code error [msgcat::mc "CVS login failed"]
358        }
359        set cvs.cmd ${savecmd}
360        set cvs.args ${saveargs}
361        set cvs.post_args ${savepost_args}
362    } else {
363        set env(CVS_RSH) ssh
364    }
365
366    if {[catch {command_exec cvs "" "2>&1"} result]} {
367        return -code error [msgcat::mc "CVS check out failed"]
368    }
369
370    if {[info exists patchfiles]} {
371        return [portfetch::fetchfiles]
372    }
373    return 0
374}
375
376# Given a URL to a Subversion repository, if the URL is http:// or
377# https:// and MacPorts has been configured with a proxy for that URL
378# type, then return command line options that should be passed to the
379# svn command line client to enable use of that proxy.  There are no
380# proxies for Subversion's native protocol, identified by svn:// URLs.
381proc portfetch::svn_proxy_args {url} {
382    global env
383
384    if {   [string compare -length 7 {http://} ${url}] == 0
385        && [info exists env(http_proxy)]} {
386        set proxy_str $env(http_proxy)
387    } elseif {   [string compare -length 8 {https://} ${url}] == 0
388              && [info exists env(HTTPS_PROXY)]} {
389        set proxy_str $env(HTTPS_PROXY)
390    } else {
391        return ""
392    }
393    regexp {(.*://)?([[:alnum:].-]+)(:(\d+))?} $proxy_str - - proxy_host - proxy_port
394    set ret "--config-option servers:global:http-proxy-host=${proxy_host}"
395    if {$proxy_port ne ""} {
396        append ret " --config-option servers:global:http-proxy-port=${proxy_port}"
397    }
398    return $ret
399}
400
401# Perform an svn fetch
402proc portfetch::svnfetch {args} {
403    global svn.args svn.method svn.revision svn.url patchfiles
404
405    if {[regexp {\s} ${svn.url}]} {
406        return -code error [msgcat::mc "Subversion URL cannot contain whitespace"]
407    }
408
409    if {[string length ${svn.revision}]} {
410        append svn.url "@${svn.revision}"
411    }
412
413    set proxy_args [svn_proxy_args ${svn.url}]
414
415    set svn.args "${svn.method} ${svn.args} ${proxy_args} ${svn.url}"
416
417    if {[catch {command_exec svn "" "2>&1"} result]} {
418        return -code error [msgcat::mc "Subversion check out failed"]
419    }
420
421    if {[info exists patchfiles]} {
422        return [portfetch::fetchfiles]
423    }
424
425    return 0
426}
427
428# Perform a git fetch
429proc portfetch::gitfetch {args} {
430    global worksrcpath patchfiles \
431           git.url git.branch git.sha1 git.cmd
432
433    set options "-q"
434    if {${git.branch} eq ""} {
435        # if we're just using HEAD, we can make a shallow repo
436        set options "$options --depth=1"
437    }
438    set cmdstring "${git.cmd} clone $options ${git.url} ${worksrcpath} 2>&1"
439    ui_debug "Executing: $cmdstring"
440    if {[catch {system $cmdstring} result]} {
441        return -code error [msgcat::mc "Git clone failed"]
442    }
443
444    if {${git.branch} ne ""} {
445        set env "GIT_DIR=${worksrcpath}/.git GIT_WORK_TREE=${worksrcpath}"
446        set cmdstring "$env ${git.cmd} checkout -q ${git.branch} 2>&1"
447        ui_debug "Executing $cmdstring"
448        if {[catch {system $cmdstring} result]} {
449            return -code error [msgcat::mc "Git checkout failed"]
450        }
451    }
452
453    if {[info exists patchfiles]} {
454        return [portfetch::fetchfiles]
455    }
456
457    return 0
458}
459
460# Perform a mercurial fetch.
461proc portfetch::hgfetch {args} {
462    global worksrcpath prefix_frozen patchfiles hg.url hg.tag hg.cmd \
463           fetch.ignore_sslcert
464
465    set insecureflag ""
466    if {${fetch.ignore_sslcert}} {
467        set insecureflag " --insecure"
468    }
469
470    set cmdstring "${hg.cmd} clone${insecureflag} --rev \"${hg.tag}\" ${hg.url} ${worksrcpath} 2>&1"
471    ui_debug "Executing: $cmdstring"
472    if {[catch {system $cmdstring} result]} {
473        return -code error [msgcat::mc "Mercurial clone failed"]
474    }
475
476    if {[info exists patchfiles]} {
477        return [portfetch::fetchfiles]
478    }
479
480    return 0
481}
482
483# Perform a standard fetch, assembling fetch urls from
484# the listed url variable and associated distfile
485proc portfetch::fetchfiles {args} {
486    global distpath all_dist_files UI_PREFIX \
487           fetch.user fetch.password fetch.use_epsv fetch.ignore_sslcert fetch.remote_time \
488           fallback_mirror_site portverbose usealtworkpath altprefix
489    variable fetch_urls
490    variable urlmap
491
492    set fetch_options {}
493    if {[string length ${fetch.user}] || [string length ${fetch.password}]} {
494        lappend fetch_options -u
495        lappend fetch_options "${fetch.user}:${fetch.password}"
496    }
497    if {${fetch.use_epsv} != "yes"} {
498        lappend fetch_options "--disable-epsv"
499    }
500    if {${fetch.ignore_sslcert} != "no"} {
501        lappend fetch_options "--ignore-ssl-cert"
502    }
503    if {${fetch.remote_time} != "no"} {
504        lappend fetch_options "--remote-time"
505    }
506    if {$portverbose eq "yes"} {
507        lappend fetch_options "--progress"
508        lappend fetch_options "builtin"
509    } elseif {[llength [info commands ui_progress_download]] > 0} {
510        lappend fetch_options "--progress"
511        lappend fetch_options "ui_progress_download"
512    }
513    set sorted no
514
515    foreach {url_var distfile} $fetch_urls {
516        if {![file isfile "${distpath}/${distfile}"]} {
517            ui_info "$UI_PREFIX [format [msgcat::mc "%s does not exist in %s"] $distfile $distpath]"
518            if {![file writable $distpath]} {
519                return -code error [format [msgcat::mc "%s must be writable"] $distpath]
520            }
521            if {!$usealtworkpath && [file isfile ${altprefix}${distpath}/${distfile}]} {
522                if {[catch {file link -hard "${distpath}/${distfile}" "${altprefix}${distpath}/${distfile}"}]} {
523                    ui_debug "failed to hardlink ${distfile} into distpath, copying instead"
524                    file copy "${altprefix}${distpath}/${distfile}" "${distpath}/${distfile}"
525                }
526                ui_info "Found $distfile in ${altprefix}${distpath}"
527                continue
528            }
529            if {!$sorted} {
530                sortsites fetch_urls [mirror_sites $fallback_mirror_site {} {} [get_full_mirror_sites_path]] master_sites
531                set sorted yes
532            }
533            if {![info exists urlmap($url_var)]} {
534                ui_error [format [msgcat::mc "No defined site for tag: %s, using master_sites"] $url_var]
535                set urlmap($url_var) $urlmap(master_sites)
536            }
537            unset -nocomplain fetched
538            set lastError ""
539            foreach site $urlmap($url_var) {
540                ui_notice "$UI_PREFIX [format [msgcat::mc "Attempting to fetch %s from %s"] $distfile $site]"
541                set file_url [portfetch::assemble_url $site $distfile]
542                try -pass_signal {
543                    curl fetch {*}$fetch_options $file_url "${distpath}/${distfile}.TMP"
544                    file rename -force "${distpath}/${distfile}.TMP" "${distpath}/${distfile}"
545                    set fetched 1
546                    break
547                } catch {{*} eCode eMessage} {
548                    ui_debug [msgcat::mc "Fetching distfile failed: %s" $eMessage]
549                    set lastError $eMessage
550                } finally {
551                    file delete -force "${distpath}/${distfile}.TMP"
552                }
553            }
554            if {![info exists fetched]} {
555                if {$lastError ne ""} {
556                    error $lastError
557                } else {
558                    error [msgcat::mc "fetch failed"]
559                }
560            }
561        }
562    }
563    return 0
564}
565
566# Utility function to delete fetched files.
567proc portfetch::fetch_deletefiles {args} {
568    global distpath
569    variable fetch_urls
570    foreach {url_var distfile} $fetch_urls {
571        if {[file isfile $distpath/$distfile]} {
572            file delete -force "${distpath}/${distfile}"
573        }
574    }
575}
576
577# Utility function to add files to a list of fetched files.
578proc portfetch::fetch_addfilestomap {filemapname} {
579    global distpath $filemapname
580    variable fetch_urls
581    foreach {url_var distfile} $fetch_urls {
582        if {[file isfile $distpath/$distfile]} {
583            filemap set $filemapname $distpath/$distfile 1
584        }
585    }
586}
587
588# Initialize fetch target and call checkfiles.
589proc portfetch::fetch_init {args} {
590    variable fetch_urls
591
592    portfetch::checkfiles fetch_urls
593}
594
595proc portfetch::fetch_start {args} {
596    global UI_PREFIX subport distpath
597
598    ui_notice "$UI_PREFIX [format [msgcat::mc "Fetching distfiles for %s"] $subport]"
599
600    # create and chown $distpath
601    if {![file isdirectory $distpath]} {
602        if {[catch {file mkdir $distpath} result]} {
603            elevateToRoot "fetch"
604            if {[catch {file mkdir $distpath} result]} {
605                return -code error [format [msgcat::mc "Unable to create distribution files path: %s"] $result]
606            }
607            chownAsRoot $distpath
608            dropPrivileges
609        }
610    }
611    if {![file owned $distpath]} {
612        if {[catch {chownAsRoot $distpath} result]} {
613            if {[file writable $distpath]} {
614                ui_warn "$UI_PREFIX [format [msgcat::mc "Couldn't change ownership of distribution files path to macports user: %s"] $result]"
615            } else {
616                return -code error [format [msgcat::mc "Distribution files path %s not writable and could not be chowned: %s"] $distpath $result]
617            }
618        }
619    }
620
621    portfetch::check_dns
622}
623
624# Main fetch routine
625# If all_dist_files is not populated and $fetch.type == standard, then
626# there are no files to download. Otherwise, either do a cvs checkout
627# or call the standard fetchfiles procedure
628proc portfetch::fetch_main {args} {
629    global all_dist_files fetch.type
630
631    # Check for files, download if necessary
632    if {![info exists all_dist_files] && "${fetch.type}" == "standard"} {
633        return 0
634    }
635
636    # Fetch the files
637    switch -- "${fetch.type}" {
638        bzr     { return [bzrfetch] }
639        cvs     { return [cvsfetch] }
640        svn     { return [svnfetch] }
641        git     { return [gitfetch] }
642        hg      { return [hgfetch] }
643        standard -
644        default { return [portfetch::fetchfiles] }
645    }
646}
Note: See TracBrowser for help on using the repository browser.