2014-06-21 03:05:48 +00:00
|
|
|
#! /bin/sh -e
|
|
|
|
|
|
|
|
usage(){
|
2014-06-22 09:42:34 +00:00
|
|
|
echo >&2 "syntax: nix-prefetch-zip [OPTIONS] [URL [EXPECTED-HASH]]
|
2014-06-21 03:05:48 +00:00
|
|
|
|
|
|
|
Options:
|
2014-06-22 09:42:34 +00:00
|
|
|
--url url The url of the archive to fetch.
|
2014-06-21 03:05:48 +00:00
|
|
|
--name name The name to use for the store path (defaults to \`basename \$url\`).
|
2015-06-30 17:57:21 +00:00
|
|
|
--ext ext The file extension (.zip, .tar.gz, ...) to be REMOVED from name
|
2014-06-22 09:42:34 +00:00
|
|
|
--hash hash The hash of unpacked archive.
|
|
|
|
--hash-type type Use the specified cryptographic hash algorithm, which can be one of md5, sha1, and sha256.
|
2014-06-21 03:05:48 +00:00
|
|
|
--leave-root Keep the root directory of the archive.
|
|
|
|
--help Show this help text.
|
|
|
|
"
|
|
|
|
exit 1
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2015-01-23 02:18:16 +00:00
|
|
|
name=""
|
2015-06-30 17:57:21 +00:00
|
|
|
ext=""
|
2014-06-21 03:05:48 +00:00
|
|
|
argi=0
|
|
|
|
argfun=""
|
|
|
|
for arg; do
|
|
|
|
if test -z "$argfun"; then
|
|
|
|
case $arg in
|
|
|
|
--url) argfun=set_url;;
|
|
|
|
--name) argfun=set_name;;
|
2015-06-30 17:57:21 +00:00
|
|
|
--ext) argfun=set_ext;;
|
2014-06-21 03:05:48 +00:00
|
|
|
--hash) argfun=set_expHash;;
|
|
|
|
--hash-type) argfun=set_hashType;;
|
|
|
|
--leave-root) leaveRoot=true;;
|
|
|
|
--help) usage;;
|
2014-06-22 09:42:34 +00:00
|
|
|
*) argi=$(($argi + 1))
|
|
|
|
case $argi in
|
|
|
|
1) url=$arg;;
|
|
|
|
2) rev=$arg;;
|
|
|
|
3) expHash=$arg;;
|
|
|
|
*) echo "Unexpected argument: $arg" >&2
|
|
|
|
usage
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
;;
|
2014-06-21 03:05:48 +00:00
|
|
|
esac
|
|
|
|
else
|
|
|
|
case $argfun in
|
|
|
|
set_*)
|
|
|
|
var=$(echo $argfun | sed 's,^set_,,')
|
|
|
|
eval "$var=\$arg"
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
argfun=""
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [ -z "$url" ]; then
|
|
|
|
echo "Error: No --url flag given" >&2
|
|
|
|
usage
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -z "$name" ]; then
|
|
|
|
name=$(basename "$url")
|
|
|
|
fi
|
|
|
|
|
|
|
|
if test -z "$hashType"; then
|
|
|
|
hashType=sha256
|
|
|
|
fi
|
|
|
|
|
2014-06-22 09:42:34 +00:00
|
|
|
hashFormat="--base32"
|
|
|
|
|
2014-06-21 03:05:48 +00:00
|
|
|
tmp=$(mktemp -d 2>/dev/null || mktemp -d -t "$$")
|
2014-06-22 09:42:34 +00:00
|
|
|
trap "rm -rf '$tmp'" EXIT
|
2014-06-21 03:05:48 +00:00
|
|
|
|
2015-06-30 17:57:21 +00:00
|
|
|
dirname=$(basename -s "$ext" "$name")
|
|
|
|
|
|
|
|
unpackDirTmp=$tmp/unpacked-tmp/$dirname
|
2015-01-23 04:44:04 +00:00
|
|
|
mkdir -p $unpackDirTmp
|
|
|
|
|
2015-06-30 17:57:21 +00:00
|
|
|
unpackDir=$tmp/unpacked/$dirname
|
2014-06-22 09:42:34 +00:00
|
|
|
mkdir -p $unpackDir
|
2015-01-23 04:44:04 +00:00
|
|
|
|
2015-12-18 20:20:58 +00:00
|
|
|
downloadedFile=$tmp/$(basename "$url")
|
2014-06-21 03:05:48 +00:00
|
|
|
|
|
|
|
unpackFile() {
|
|
|
|
local curSrc="$1"
|
|
|
|
|
|
|
|
case "$curSrc" in
|
|
|
|
*.tar.xz | *.tar.lzma)
|
|
|
|
# Don't rely on tar knowing about .xz.
|
|
|
|
xz -d < $curSrc | tar xf -
|
|
|
|
;;
|
|
|
|
*.tar | *.tar.* | *.tgz | *.tbz2)
|
|
|
|
# GNU tar can automatically select the decompression method
|
|
|
|
# (info "(tar) gzip").
|
|
|
|
tar xf $curSrc
|
|
|
|
;;
|
|
|
|
*.zip)
|
|
|
|
unzip -qq $curSrc
|
|
|
|
;;
|
|
|
|
*)
|
|
|
|
echo "source archive $curSrc has unknown type" >&2
|
|
|
|
exit 1
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
}
|
|
|
|
|
|
|
|
# If the hash was given, a file with that hash may already be in the
|
|
|
|
# store.
|
|
|
|
if test -n "$expHash"; then
|
|
|
|
finalPath=$(nix-store --print-fixed-path --recursive "$hashType" "$expHash" "$name")
|
|
|
|
if ! nix-store --check-validity "$finalPath" 2> /dev/null; then
|
|
|
|
finalPath=
|
|
|
|
fi
|
|
|
|
hash=$expHash
|
|
|
|
fi
|
|
|
|
|
|
|
|
# If we don't know the hash or a path with that hash doesn't exist,
|
|
|
|
# download the file and add it to the store.
|
|
|
|
if test -z "$finalPath"; then
|
|
|
|
curl="curl \
|
|
|
|
--location --max-redirs 20 \
|
|
|
|
--disable-epsv \
|
|
|
|
--insecure"
|
|
|
|
|
|
|
|
if ! $curl --fail "$url" --output "$downloadedFile"; then
|
|
|
|
echo "error: could not download $url" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -z "$leaveRoot" ]; then
|
|
|
|
shopt -s dotglob
|
2015-01-23 04:44:04 +00:00
|
|
|
|
|
|
|
cd "$unpackDirTmp"
|
|
|
|
unpackFile "$downloadedFile"
|
|
|
|
|
|
|
|
if [ $(ls "$unpackDirTmp" | wc -l) != 1 ]; then
|
|
|
|
echo "error: zip file must contain a single file or directory."
|
2014-06-21 03:05:48 +00:00
|
|
|
exit 1
|
|
|
|
fi
|
2015-01-23 04:44:04 +00:00
|
|
|
|
|
|
|
fn=$(cd "$unpackDirTmp" && echo *)
|
|
|
|
|
|
|
|
if [ -f "$unpackDirTmp/$fn" ]; then
|
|
|
|
mv "$unpackDirTmp/$fn" "$unpackDir"
|
|
|
|
else
|
|
|
|
mv "$unpackDirTmp/$fn/"* "$unpackDir/"
|
|
|
|
fi
|
|
|
|
else
|
|
|
|
cd $unpackDir
|
|
|
|
unpackFile "$downloadedFile"
|
2014-06-21 03:05:48 +00:00
|
|
|
fi
|
|
|
|
|
|
|
|
# Compute the hash.
|
2014-06-22 09:42:34 +00:00
|
|
|
hash=$(nix-hash --type $hashType $hashFormat $unpackDir)
|
2014-06-21 03:05:48 +00:00
|
|
|
if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
|
|
|
|
|
|
|
|
# Add the downloaded file to the Nix store.
|
2014-06-22 09:42:34 +00:00
|
|
|
finalPath=$(nix-store --add-fixed --recursive "$hashType" $unpackDir)
|
2014-06-21 03:05:48 +00:00
|
|
|
|
|
|
|
if test -n "$expHash" -a "$expHash" != "$hash"; then
|
|
|
|
echo "hash mismatch for URL \`$url'"
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
|
|
|
if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
|
|
|
|
|
|
|
|
echo $hash
|
|
|
|
|
|
|
|
if test -n "$PRINT_PATH"; then
|
|
|
|
echo $finalPath
|
|
|
|
fi
|