2005-12-05 14:11:09 +00:00
|
|
|
|
source $stdenv/setup
|
2003-11-02 17:42:19 +00:00
|
|
|
|
|
2008-08-22 16:53:21 +01:00
|
|
|
|
source $mirrorsFile
|
|
|
|
|
|
2016-08-15 15:27:39 +01:00
|
|
|
|
curlVersion=$(curl -V | head -1 | cut -d' ' -f2)
|
2003-11-02 17:42:19 +00:00
|
|
|
|
|
2007-08-23 16:22:30 +01:00
|
|
|
|
# Curl flags to handle redirects, not use EPSV, handle cookies for
|
|
|
|
|
# servers to need them during redirects, and work on SSL without a
|
|
|
|
|
# certificate (this isn't a security problem because we check the
|
2013-02-06 14:15:28 +00:00
|
|
|
|
# cryptographic hash of the output anyway).
|
2016-08-15 15:27:39 +01:00
|
|
|
|
curl=(
|
|
|
|
|
curl
|
|
|
|
|
--location
|
|
|
|
|
--max-redirs 20
|
2018-06-11 02:48:00 +01:00
|
|
|
|
--retry 3
|
2016-08-15 15:27:39 +01:00
|
|
|
|
--disable-epsv
|
|
|
|
|
--cookie-jar cookies
|
|
|
|
|
--user-agent "curl/$curlVersion Nixpkgs/$nixpkgsVersion"
|
2020-06-09 18:56:27 +01:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if ! [ -f "$SSL_CERT_FILE" ]; then
|
|
|
|
|
curl+=(--insecure)
|
|
|
|
|
fi
|
|
|
|
|
|
|
|
|
|
curl+=(
|
2016-08-15 15:27:39 +01:00
|
|
|
|
$curlOpts
|
|
|
|
|
$NIX_CURL_FLAGS
|
|
|
|
|
)
|
2007-08-23 16:22:30 +01:00
|
|
|
|
|
2014-05-08 13:57:20 +01:00
|
|
|
|
downloadedFile="$out"
|
|
|
|
|
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
|
|
|
|
|
|
|
|
|
|
|
2007-08-23 16:22:30 +01:00
|
|
|
|
tryDownload() {
|
|
|
|
|
local url="$1"
|
|
|
|
|
echo
|
|
|
|
|
header "trying $url"
|
2014-10-01 12:58:42 +01:00
|
|
|
|
local curlexit=18;
|
|
|
|
|
|
2014-10-05 08:51:57 +01:00
|
|
|
|
success=
|
|
|
|
|
|
2014-10-01 12:58:42 +01:00
|
|
|
|
# if we get error code 18, resume partial download
|
|
|
|
|
while [ $curlexit -eq 18 ]; do
|
2014-10-05 08:51:57 +01:00
|
|
|
|
# keep this inside an if statement, since on failure it doesn't abort the script
|
2016-08-15 15:27:39 +01:00
|
|
|
|
if "${curl[@]}" -C - --fail "$url" --output "$downloadedFile"; then
|
2014-10-05 08:51:57 +01:00
|
|
|
|
success=1
|
2014-10-05 18:23:11 +01:00
|
|
|
|
break
|
2014-10-05 08:51:57 +01:00
|
|
|
|
else
|
|
|
|
|
curlexit=$?;
|
|
|
|
|
fi
|
2014-10-01 12:58:42 +01:00
|
|
|
|
done
|
2007-08-23 16:22:30 +01:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
finish() {
|
2020-04-16 22:32:35 +01:00
|
|
|
|
local skipPostFetch="$1"
|
|
|
|
|
|
2014-05-08 13:57:20 +01:00
|
|
|
|
set +o noglob
|
2016-01-19 10:17:49 +00:00
|
|
|
|
|
|
|
|
|
if [[ $executable == "1" ]]; then
|
|
|
|
|
chmod +x $downloadedFile
|
|
|
|
|
fi
|
|
|
|
|
|
2020-04-16 22:32:35 +01:00
|
|
|
|
if [ -z "$skipPostFetch" ]; then
|
|
|
|
|
runHook postFetch
|
|
|
|
|
fi
|
|
|
|
|
|
2007-08-23 16:22:30 +01:00
|
|
|
|
exit 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2007-08-27 13:44:01 +01:00
|
|
|
|
tryHashedMirrors() {
|
2007-09-11 14:48:53 +01:00
|
|
|
|
if test -n "$NIX_HASHED_MIRRORS"; then
|
|
|
|
|
hashedMirrors="$NIX_HASHED_MIRRORS"
|
|
|
|
|
fi
|
2013-02-06 14:15:28 +00:00
|
|
|
|
|
2007-08-27 13:44:01 +01:00
|
|
|
|
for mirror in $hashedMirrors; do
|
|
|
|
|
url="$mirror/$outputHashAlgo/$outputHash"
|
2016-08-15 15:27:39 +01:00
|
|
|
|
if "${curl[@]}" --retry 0 --connect-timeout "${NIX_CONNECT_TIMEOUT:-15}" \
|
2013-02-06 14:15:28 +00:00
|
|
|
|
--fail --silent --show-error --head "$url" \
|
2007-08-27 13:44:01 +01:00
|
|
|
|
--write-out "%{http_code}" --output /dev/null > code 2> log; then
|
|
|
|
|
tryDownload "$url"
|
2020-04-16 22:32:35 +01:00
|
|
|
|
|
|
|
|
|
# We skip postFetch here, because hashed-mirrors are
|
|
|
|
|
# already content addressed. So if $outputHash is in the
|
|
|
|
|
# hashed-mirror, changes from ‘postFetch’ would already be
|
|
|
|
|
# made. So, running postFetch will end up applying the
|
|
|
|
|
# change /again/, which we don’t want.
|
|
|
|
|
if test -n "$success"; then finish skipPostFetch; fi
|
2007-08-27 13:44:01 +01:00
|
|
|
|
else
|
|
|
|
|
# Be quiet about 404 errors, which we interpret as the file
|
|
|
|
|
# not being present on this particular mirror.
|
|
|
|
|
if test "$(cat code)" != 404; then
|
|
|
|
|
echo "error checking the existence of $url:"
|
|
|
|
|
cat log
|
|
|
|
|
fi
|
|
|
|
|
fi
|
|
|
|
|
done
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2009-05-08 18:00:33 +01:00
|
|
|
|
# URL list may contain ?. No glob expansion for that, please
|
|
|
|
|
set -o noglob
|
|
|
|
|
|
2007-08-27 13:44:01 +01:00
|
|
|
|
urls2=
|
|
|
|
|
for url in $urls; do
|
|
|
|
|
if test "${url:0:9}" != "mirror://"; then
|
|
|
|
|
urls2="$urls2 $url"
|
2007-08-23 16:22:30 +01:00
|
|
|
|
else
|
2007-08-27 13:44:01 +01:00
|
|
|
|
url2="${url:9}"; echo "${url2/\// }" > split; read site fileName < split
|
|
|
|
|
#varName="mirror_$site"
|
|
|
|
|
varName="$site" # !!! danger of name clash, fix this
|
|
|
|
|
if test -z "${!varName}"; then
|
|
|
|
|
echo "warning: unknown mirror:// site \`$site'"
|
|
|
|
|
else
|
2007-09-11 16:00:49 +01:00
|
|
|
|
mirrors=${!varName}
|
|
|
|
|
|
|
|
|
|
# Allow command-line override by setting NIX_MIRRORS_$site.
|
|
|
|
|
varName="NIX_MIRRORS_$site"
|
|
|
|
|
if test -n "${!varName}"; then mirrors="${!varName}"; fi
|
|
|
|
|
|
|
|
|
|
for url3 in $mirrors; do
|
2007-08-27 13:44:01 +01:00
|
|
|
|
urls2="$urls2 $url3$fileName";
|
|
|
|
|
done
|
2007-08-23 16:22:30 +01:00
|
|
|
|
fi
|
2005-02-22 15:23:56 +00:00
|
|
|
|
fi
|
2007-08-23 16:22:30 +01:00
|
|
|
|
done
|
2007-08-27 13:44:01 +01:00
|
|
|
|
urls="$urls2"
|
|
|
|
|
|
2009-05-08 18:00:33 +01:00
|
|
|
|
# Restore globbing settings
|
|
|
|
|
set +o noglob
|
2007-08-23 16:22:30 +01:00
|
|
|
|
|
2008-07-23 17:04:10 +01:00
|
|
|
|
if test -n "$showURLs"; then
|
|
|
|
|
echo "$urls" > $out
|
|
|
|
|
exit 0
|
|
|
|
|
fi
|
|
|
|
|
|
2007-08-27 13:44:01 +01:00
|
|
|
|
if test -n "$preferHashedMirrors"; then
|
|
|
|
|
tryHashedMirrors
|
|
|
|
|
fi
|
2007-08-23 16:22:30 +01:00
|
|
|
|
|
2009-05-08 18:00:33 +01:00
|
|
|
|
# URL list may contain ?. No glob expansion for that, please
|
|
|
|
|
set -o noglob
|
|
|
|
|
|
2007-08-23 16:22:30 +01:00
|
|
|
|
success=
|
|
|
|
|
for url in $urls; do
|
2018-10-13 17:48:43 +01:00
|
|
|
|
if [ -z "$postFetch" ]; then
|
|
|
|
|
case "$url" in
|
|
|
|
|
https://github.com/*/archive/*)
|
|
|
|
|
echo "warning: archives from GitHub revisions should use fetchFromGitHub"
|
|
|
|
|
;;
|
|
|
|
|
https://gitlab.com/*/-/archive/*)
|
|
|
|
|
echo "warning: archives from GitLab revisions should use fetchFromGitLab"
|
|
|
|
|
;;
|
|
|
|
|
esac
|
|
|
|
|
fi
|
2007-08-23 16:22:30 +01:00
|
|
|
|
tryDownload "$url"
|
|
|
|
|
if test -n "$success"; then finish; fi
|
|
|
|
|
done
|
|
|
|
|
|
2009-05-08 18:00:33 +01:00
|
|
|
|
# Restore globbing settings
|
|
|
|
|
set +o noglob
|
|
|
|
|
|
2007-08-27 13:44:01 +01:00
|
|
|
|
if test -z "$preferHashedMirrors"; then
|
|
|
|
|
tryHashedMirrors
|
|
|
|
|
fi
|
|
|
|
|
|
2005-02-22 15:23:56 +00:00
|
|
|
|
|
2007-08-23 16:22:30 +01:00
|
|
|
|
echo "error: cannot download $name from any mirror"
|
|
|
|
|
exit 1
|