source: trunk/base/src/package1.0/portarchivefetch.tcl

Last change on this file was 147456, checked in by raimue@…, 4 years ago

archivefetch: no fallback to default archive_sites

If a ports tree provides a modified version of a port, falling back to the
default ports tree might fetch a binary archive that was built from a different
Portfile. As this binary archive would not have the expected contents and might
be incompatible, only the archive sites defined for this ports tree should be
checked.

Using any globally defined archive site from archive_sites.conf is fine, as we
expect the user to be aware that this has to match the ports trees as defined
in sources.conf.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 14.6 KB
Line 
1# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
2# $Id: portarchivefetch.tcl 147456 2016-04-04 14:24:27Z raimue@macports.org $
3#
4# Copyright (c) 2002 - 2003 Apple Inc.
5# Copyright (c) 2004 - 2013 The MacPorts Project
6# All rights reserved.
7#
8# Redistribution and use in source and binary forms, with or without
9# modification, are permitted provided that the following conditions
10# are met:
11# 1. Redistributions of source code must retain the above copyright
12#    notice, this list of conditions and the following disclaimer.
13# 2. Redistributions in binary form must reproduce the above copyright
14#    notice, this list of conditions and the following disclaimer in the
15#    documentation and/or other materials provided with the distribution.
16# 3. Neither the name of Apple Inc. nor the names of its contributors
17#    may be used to endorse or promote products derived from this software
18#    without specific prior written permission.
19#
20# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
23# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
24# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
25# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
26# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
28# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
29# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
30# POSSIBILITY OF SUCH DAMAGE.
31#
32
33package provide portarchivefetch 1.0
34package require fetch_common 1.0
35package require portutil 1.0
36package require Pextlib 1.0
37
38set org.macports.archivefetch [target_new org.macports.archivefetch portarchivefetch::archivefetch_main]
39#target_init ${org.macports.archivefetch} portarchivefetch::archivefetch_init
40target_provides ${org.macports.archivefetch} archivefetch
41target_requires ${org.macports.archivefetch} main
42target_prerun ${org.macports.archivefetch} portarchivefetch::archivefetch_start
43
44namespace eval portarchivefetch {
45    variable archivefetch_urls {}
46}
47
48options archive_sites archivefetch.user archivefetch.password \
49    archivefetch.use_epsv archivefetch.ignore_sslcert \
50    archive_sites.mirror_subdir archivefetch.pubkeys \
51    archive.subdir
52
53# user name & password
54default archivefetch.user ""
55default archivefetch.password ""
56# Use EPSV for FTP transfers
57default archivefetch.use_epsv no
58# Ignore SSL certificate
59default archivefetch.ignore_sslcert no
60default archivefetch.pubkeys {$archivefetch_pubkeys}
61
62default archive_sites {[portarchivefetch::filter_sites]}
63default archive_sites.listfile {"archive_sites.tcl"}
64default archive_sites.listpath {"port1.0/fetch"}
65default archive.subdir {${subport}}
66
67proc portarchivefetch::filter_sites {} {
68    global prefix frameworks_dir applications_dir porturl \
69        portfetch::mirror_sites::sites portfetch::mirror_sites::archive_type \
70        portfetch::mirror_sites::archive_prefix \
71        portfetch::mirror_sites::archive_frameworks_dir \
72        portfetch::mirror_sites::archive_applications_dir
73
74    # get defaults from ports tree resources
75    set mirrorfile [get_full_archive_sites_path]
76    if {[file exists $mirrorfile]} {
77        source $mirrorfile
78    }
79    # get archive_sites.conf values
80    foreach {key val} [get_archive_sites_conf_values] {
81        set $key $val
82    }
83
84    set ret {}
85    foreach site [array names portfetch::mirror_sites::archive_prefix] {
86        set missing 0
87        foreach var {archive_frameworks_dir archive_applications_dir archive_type} {
88            if {![info exists portfetch::mirror_sites::${var}($site)]} {
89                ui_warn "no $var configured for site '$site'"
90                set missing 1
91            }
92        }
93        if {$missing} {
94            continue
95        }
96        if {$portfetch::mirror_sites::sites($site) ne {} &&
97            $portfetch::mirror_sites::archive_prefix($site) == $prefix &&
98            $portfetch::mirror_sites::archive_frameworks_dir($site) == $frameworks_dir &&
99            $portfetch::mirror_sites::archive_applications_dir($site) == $applications_dir &&
100            ![catch {archiveTypeIsSupported $portfetch::mirror_sites::archive_type($site)}]} {
101            # using the archive type as a tag
102            lappend ret ${site}::$portfetch::mirror_sites::archive_type($site)
103        }
104    }
105
106    # check if porturl itself points to an archive
107    if {[file rootname [file tail $porturl]] eq [file rootname [get_portimage_name]] && [file extension $porturl] ne ""} {
108        lappend ret [string range $porturl 0 end-[string length [file tail $porturl]]]:[string range [file extension $porturl] 1 end]
109        archive.subdir
110    }
111    return $ret
112}
113
114set_ui_prefix
115
116# Checks possible archive files to assemble url lists for later fetching
117proc portarchivefetch::checkarchivefiles {urls} {
118    global all_archive_files archivefetch.fulldestpath archive_sites
119    upvar $urls fetch_urls
120
121    # Define archive directory path
122    set archivefetch.fulldestpath [file join [option portdbpath] incoming/verified]
123    set archive.rootname [file rootname [get_portimage_name]]
124
125    foreach entry [option archive_sites] {
126        # the archive type is used as a tag
127        set type [lindex [split $entry :] end]
128        if {![info exists seen($type)]} {
129            set archive.file "${archive.rootname}.${type}"
130            lappend all_archive_files ${archive.file}
131            lappend fetch_urls $type ${archive.file}
132            set seen($type) 1
133        }
134    }
135}
136
137# returns full path to mirror list file
138proc portarchivefetch::get_full_archive_sites_path {} {
139    global archive_sites.listfile archive_sites.listpath porturl
140    # look up archive sites only from this ports tree,
141    # do not fallback to the default
142    return [getportresourcepath $porturl [file join ${archive_sites.listpath} ${archive_sites.listfile}] no]
143}
144
145# Perform the full checksites/checkarchivefiles sequence.
146proc portarchivefetch::checkfiles {urls} {
147    upvar $urls fetch_urls
148
149    portfetch::checksites [list archive_sites [list {} {} ARCHIVE_SITE_LOCAL]] \
150                          [get_full_archive_sites_path]
151    checkarchivefiles fetch_urls
152}
153
154
155# Perform a standard fetch, assembling fetch urls from
156# the listed url variable and associated archive file
157proc portarchivefetch::fetchfiles {args} {
158    global archivefetch.fulldestpath UI_PREFIX \
159           archivefetch.user archivefetch.password archivefetch.use_epsv \
160           archivefetch.ignore_sslcert \
161           portverbose ports_binary_only
162    variable archivefetch_urls
163    variable ::portfetch::urlmap
164
165    if {![file isdirectory ${archivefetch.fulldestpath}]} {
166        if {[catch {file mkdir ${archivefetch.fulldestpath}} result]} {
167            elevateToRoot "archivefetch"
168            set elevated yes
169            if {[catch {file mkdir ${archivefetch.fulldestpath}} result]} {
170                return -code error [format [msgcat::mc "Unable to create archive path: %s"] $result]
171            }
172        }
173    }
174    set incoming_path [file join [option portdbpath] incoming]
175    chownAsRoot $incoming_path
176    if {[info exists elevated] && $elevated == yes} {
177        dropPrivileges
178    }
179
180    set fetch_options {}
181    if {[string length ${archivefetch.user}] || [string length ${archivefetch.password}]} {
182        lappend fetch_options -u
183        lappend fetch_options "${archivefetch.user}:${archivefetch.password}"
184    }
185    if {${archivefetch.use_epsv} != "yes"} {
186        lappend fetch_options "--disable-epsv"
187    }
188    if {${archivefetch.ignore_sslcert} != "no"} {
189        lappend fetch_options "--ignore-ssl-cert"
190    }
191    if {$portverbose eq "yes"} {
192        lappend fetch_options "--progress"
193        lappend fetch_options "builtin"
194    } elseif {[llength [info commands ui_progress_download]] > 0} {
195        lappend fetch_options "--progress"
196        lappend fetch_options "ui_progress_download"
197    }
198    set sorted no
199
200    set existing_archive [find_portarchive_path]
201
202    foreach {url_var archive} $archivefetch_urls {
203        if {![file isfile ${archivefetch.fulldestpath}/${archive}] && $existing_archive eq ""} {
204            ui_info "$UI_PREFIX [format [msgcat::mc "%s doesn't seem to exist in %s"] $archive ${archivefetch.fulldestpath}]"
205            if {![file writable ${archivefetch.fulldestpath}]} {
206                return -code error [format [msgcat::mc "%s must be writable"] ${archivefetch.fulldestpath}]
207            }
208            if {![file writable $incoming_path]} {
209                return -code error [format [msgcat::mc "%s must be writable"] $incoming_path]
210            }
211            if {!$sorted} {
212                portfetch::sortsites archivefetch_urls {} archive_sites
213                set sorted yes
214            }
215            if {![info exists urlmap($url_var)]} {
216                ui_error [format [msgcat::mc "No defined site for tag: %s, using archive_sites"] $url_var]
217                set urlmap($url_var) $urlmap(archive_sites)
218            }
219            set failed_sites 0
220            unset -nocomplain fetched
221            set lastError ""
222            foreach site $urlmap($url_var) {
223                if {[string index $site end] ne "/"} {
224                    append site "/[option archive.subdir]"
225                } else {
226                    append site [option archive.subdir]
227                }
228                ui_msg "$UI_PREFIX [format [msgcat::mc "Attempting to fetch %s from %s"] $archive ${site}]"
229                set file_url [portfetch::assemble_url $site $archive]
230                set effectiveURL ""
231                try {
232                    curl fetch --effective-url effectiveURL {*}$fetch_options $file_url "${incoming_path}/${archive}.TMP"
233                    set fetched 1
234                    break
235                } catch {{POSIX SIG SIGINT} eCode eMessage} {
236                    ui_debug [msgcat::mc "Aborted fetching archive due to SIGINT"]
237                    file delete -force "${incoming_path}/${archive}.TMP"
238                    throw
239                } catch {{POSIX SIG SIGTERM} eCode eMessage} {
240                    ui_debug [msgcat::mc "Aborted fetching archive due to SIGTERM"]
241                    file delete -force "${incoming_path}/${archive}.TMP"
242                    throw
243                } catch {{*} eCode eMessage} {
244                    ui_debug [msgcat::mc "Fetching archive failed: %s" $eMessage]
245                    set lastError $eMessage
246                    file delete -force "${incoming_path}/${archive}.TMP"
247                    incr failed_sites
248                    if {$failed_sites > 2 && ![tbool ports_binary_only] && ![_archive_available]} {
249                        break
250                    }
251                }
252            }
253            if {[info exists fetched]} {
254                # there should be an rmd160 digest of the archive signed with one of the trusted keys
255                set signature "${incoming_path}/${archive}.rmd160"
256                ui_msg "$UI_PREFIX [format [msgcat::mc "Attempting to fetch %s from %s"] ${archive}.rmd160 $site]"
257                # reusing $file_url from the last iteration of the loop above
258                if {[catch {curl fetch --effective-url effectiveURL {*}$fetch_options ${file_url}.rmd160 $signature} result]} {
259                    ui_debug "$::errorInfo"
260                    return -code error "Failed to fetch signature for archive: $result"
261                }
262                set openssl [findBinary openssl $portutil::autoconf::openssl_path]
263                set verified 0
264                foreach pubkey [option archivefetch.pubkeys] {
265                    if {![catch {exec $openssl dgst -ripemd160 -verify $pubkey -signature $signature "${incoming_path}/${archive}.TMP"} result]} {
266                        set verified 1
267                        break
268                    } else {
269                        ui_debug "failed verification with key $pubkey"
270                        ui_debug "openssl output: $result"
271                    }
272                }
273                file delete -force $signature
274                if {!$verified} {
275                    # fall back to building from source (or error out later if binary only mode)
276                    ui_warn "Failed to verify signature for archive!"
277                    file delete -force "${incoming_path}/${archive}.TMP"
278                    break
279                } elseif {[catch {file rename -force "${incoming_path}/${archive}.TMP" "${archivefetch.fulldestpath}/${archive}"} result]} {
280                    ui_debug "$::errorInfo"
281                    return -code error "Failed to move downloaded archive into place: $result"
282                }
283                set archive_exists 1
284                break
285            }
286        } else {
287            set archive_exists 1
288            break
289        }
290    }
291    if {[info exists archive_exists]} {
292        # modify state file to skip remaining phases up to destroot
293        global target_state_fd
294        foreach target {fetch checksum extract patch configure build destroot} {
295            write_statefile target "org.macports.${target}" $target_state_fd
296        }
297        return 0
298    }
299    if {([info exists ports_binary_only] && $ports_binary_only eq "yes") || [_archive_available]} {
300        if {[info exists lastError] && $lastError ne ""} {
301            error [msgcat::mc "version @[option version]_[option revision][option portvariants]: %s" $lastError]
302        } else {
303            error "version @[option version]_[option revision][option portvariants]"
304        }
305    } else {
306        return 0
307    }
308}
309
310# Initialize archivefetch target and call checkfiles.
311#proc portarchivefetch::archivefetch_init {args} {
312#    return 0
313#}
314
315proc portarchivefetch::archivefetch_start {args} {
316    variable archivefetch_urls
317    global UI_PREFIX subport all_archive_files destroot target_state_fd \
318           ports_source_only ports_binary_only
319    if {![tbool ports_source_only] && ([tbool ports_binary_only] ||
320            !([check_statefile target org.macports.destroot $target_state_fd] && [file isdirectory $destroot]))} {
321        portarchivefetch::checkfiles archivefetch_urls
322    }
323    if {[info exists all_archive_files] && [llength $all_archive_files] > 0} {
324        ui_msg "$UI_PREFIX [format [msgcat::mc "Fetching archive for %s"] $subport]"
325    } elseif {[tbool ports_binary_only]} {
326        error "Binary-only mode requested with no usable archive sites configured"
327    }
328    portfetch::check_dns
329}
330
331# Main archive fetch routine
332# just calls the standard fetchfiles procedure
333proc portarchivefetch::archivefetch_main {args} {
334    global all_archive_files
335    if {[info exists all_archive_files] && [llength $all_archive_files] > 0} {
336        # Fetch the files
337        portarchivefetch::fetchfiles
338    }
339    return 0
340}
Note: See TracBrowser for help on using the repository browser.