Add a function "fetchzip"
This function downloads and unpacks a file in one fixed-output derivation. This is primarily useful for dynamically generated zip files, such as GitHub's /archive URLs, where the unpacked content of the zip file doesn't change, but the zip file itself may (e.g. due to minor changes in the compression algorithm, or changes in timestamps). Fetchzip is implemented by extending fetchurl with a "postFetch" hook that is executed after the file has been downloaded. This hook can thus perform arbitrary checks or transformations on the downloaded file.
This commit is contained in:
parent
2a43a4163a
commit
c8df888858
@ -17,12 +17,16 @@ curl="curl \
|
||||
$NIX_CURL_FLAGS"
|
||||
|
||||
|
||||
downloadedFile="$out"
|
||||
if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
|
||||
|
||||
|
||||
tryDownload() {
|
||||
local url="$1"
|
||||
echo
|
||||
header "trying $url"
|
||||
success=
|
||||
if $curl --fail "$url" --output "$out"; then
|
||||
if $curl --fail "$url" --output "$downloadedFile"; then
|
||||
success=1
|
||||
fi
|
||||
stopNest
|
||||
@ -30,6 +34,8 @@ tryDownload() {
|
||||
|
||||
|
||||
finish() {
|
||||
set +o noglob
|
||||
runHook postFetch
|
||||
stopNest
|
||||
exit 0
|
||||
}
|
||||
|
@ -54,6 +54,9 @@ in
|
||||
# first element of `urls').
|
||||
name ? ""
|
||||
|
||||
, # A string to be appended to the name, if the name is derived from `url'.
|
||||
nameSuffix ? ""
|
||||
|
||||
# Different ways of specifying the hash.
|
||||
, outputHash ? ""
|
||||
, outputHashAlgo ? ""
|
||||
@ -61,6 +64,17 @@ in
|
||||
, sha1 ? ""
|
||||
, sha256 ? ""
|
||||
|
||||
, recursiveHash ? false
|
||||
|
||||
, # Shell code executed after the file has been fetched
|
||||
# succesfully. This can do things like check or transform the file.
|
||||
postFetch ? ""
|
||||
|
||||
, # Whether to download to a temporary path rather than $out. Useful
|
||||
# in conjunction with postFetch. The location of the temporary file
|
||||
# is communicated to postFetch via $downloadedFile.
|
||||
downloadToTemp ? false
|
||||
|
||||
, # If set, don't download the file, but write a list of all possible
|
||||
# URLs (resulting from resolving mirror:// URLs) to $out.
|
||||
showURLs ? false
|
||||
@ -83,11 +97,11 @@ stdenv.mkDerivation {
|
||||
name =
|
||||
if showURLs then "urls"
|
||||
else if name != "" then name
|
||||
else baseNameOf (toString (builtins.head urls_));
|
||||
else baseNameOf (toString (builtins.head urls_)) + nameSuffix;
|
||||
|
||||
builder = ./builder.sh;
|
||||
|
||||
buildInputs = [curl];
|
||||
buildInputs = [ curl ];
|
||||
|
||||
urls = urls_;
|
||||
|
||||
@ -101,7 +115,9 @@ stdenv.mkDerivation {
|
||||
outputHash = if outputHash != "" then outputHash else
|
||||
if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
|
||||
|
||||
inherit curlOpts showURLs mirrorsFile impureEnvVars;
|
||||
outputHashMode = if recursiveHash then "recursive" else "flat";
|
||||
|
||||
inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp;
|
||||
|
||||
# Doing the download on a remote machine just duplicates network
|
||||
# traffic, so don't do that.
|
||||
|
42
pkgs/build-support/fetchzip/default.nix
Normal file
42
pkgs/build-support/fetchzip/default.nix
Normal file
@ -0,0 +1,42 @@
|
||||
# This function downloads and unpacks a zip file. This is primarily
|
||||
# useful for dynamically generated zip files, such as GitHub's
|
||||
# /archive URLs, where the unpacked content of the zip file doesn't
|
||||
# change, but the zip file itself may (e.g. due to minor changes in
|
||||
# the compression algorithm, or changes in timestamps).
|
||||
|
||||
{ lib, fetchurl, unzip }:
|
||||
|
||||
{ # Optionally move the contents of the unpacked tree up one level.
|
||||
stripRoot ? true
|
||||
, ... } @ args:
|
||||
|
||||
fetchurl (args // {
|
||||
# Apply a suffix to the name. Otherwise, unpackPhase will get
|
||||
# confused by the .zip extension.
|
||||
nameSuffix = "-unpacked";
|
||||
|
||||
recursiveHash = true;
|
||||
|
||||
downloadToTemp = true;
|
||||
|
||||
postFetch =
|
||||
''
|
||||
export PATH=${unzip}/bin:$PATH
|
||||
mkdir $out
|
||||
cd $out
|
||||
renamed="$TMPDIR/''${name%-unpacked}"
|
||||
mv "$downloadedFile" "$renamed"
|
||||
unpackFile "$renamed"
|
||||
''
|
||||
# FIXME: handle zip files that contain a single regular file.
|
||||
+ lib.optionalString stripRoot ''
|
||||
shopt -s dotglob
|
||||
if [ "$(ls -d $out/* | wc -l)" != 1 ]; then
|
||||
echo "error: zip file must contain a single directory."
|
||||
exit 1
|
||||
fi
|
||||
fn=$(cd "$out" && echo *)
|
||||
mv $out/$fn/* "$out/"
|
||||
rmdir "$out/$fn"
|
||||
'';
|
||||
})
|
@ -1,13 +1,13 @@
|
||||
{ stdenv, fetchurl, python, pythonPackages, sysstat, unzip, tornado
|
||||
{ stdenv, fetchzip, python, pythonPackages, sysstat, unzip, tornado
|
||||
, makeWrapper }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = "4.2.1";
|
||||
name = "dd-agent-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
src = fetchzip {
|
||||
url = "https://github.com/DataDog/dd-agent/archive/${version}.zip";
|
||||
sha256 = "0s1lg7rqx86z0y111105gwkknzplq149cxd7v3yg30l22wn68dmv";
|
||||
sha256 = "06f9nkvnpfzs2nw75cac2y9wnp2bay4sg94zz0wjm8886rigjjjm";
|
||||
};
|
||||
|
||||
buildInputs = [ python unzip makeWrapper pythonPackages.psycopg2 ];
|
||||
|
@ -338,6 +338,8 @@ let
|
||||
# linked curl in the case of stdenv-linux).
|
||||
fetchurlBoot = stdenv.fetchurlBoot;
|
||||
|
||||
fetchzip = import ../build-support/fetchzip { inherit lib fetchurl unzip; };
|
||||
|
||||
resolveMirrorURLs = {url}: fetchurl {
|
||||
showURLs = true;
|
||||
inherit url;
|
||||
|
Loading…
Reference in New Issue
Block a user