Commit b9d61607 authored by Vitaly Lipatov's avatar Vitaly Lipatov

eget: add --get-filename, get_filename

parent 37292f11
...@@ -259,6 +259,7 @@ CURLNAMEOPTIONS='--remote-name --remote-header-name' ...@@ -259,6 +259,7 @@ CURLNAMEOPTIONS='--remote-name --remote-header-name'
LISTONLY='' LISTONLY=''
CHECKURL='' CHECKURL=''
GETFILENAME=''
LATEST='' LATEST=''
SECONDLATEST='' SECONDLATEST=''
CHECKMIRRORS='' CHECKMIRRORS=''
...@@ -293,6 +294,7 @@ Options: ...@@ -293,6 +294,7 @@ Options:
--list|--list-only - print only URLs --list|--list-only - print only URLs
--check URL - check if the URL is accessible (returns HTTP 200 OK) --check URL - check if the URL is accessible (returns HTTP 200 OK)
--get-filename URL - print filename for the URL (via Content-Disposition if applicable)
Examples: Examples:
$ eget http://ftp.somesite.ru/package-*.x64.tar $ eget http://ftp.somesite.ru/package-*.x64.tar
...@@ -347,6 +349,9 @@ while [ -n "$1" ] ; do ...@@ -347,6 +349,9 @@ while [ -n "$1" ] ; do
CHECKURL="$1" CHECKURL="$1"
#set_quiet #set_quiet
;; ;;
--get-filename)
GETFILENAME="$1"
;;
--latest) --latest)
LATEST="$1" LATEST="$1"
;; ;;
...@@ -574,6 +579,12 @@ url_check() ...@@ -574,6 +579,12 @@ url_check()
test -f "$(path_from_url "$URL")" test -f "$(path_from_url "$URL")"
} }
url_get_filename()
{
# FIXME
basename "$1"
}
elif is_ipfsurl "$1" ; then elif is_ipfsurl "$1" ; then
# put remote content to stdout # put remote content to stdout
...@@ -643,6 +654,12 @@ url_check() ...@@ -643,6 +654,12 @@ url_check()
__wget --spider -S "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q "200" __wget --spider -S "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q "200"
} }
url_get_filename()
{
# FIXME
basename "$1"
}
else else
CURL="$(print_command_path curl)" CURL="$(print_command_path curl)"
[ -n "$CURL" ] || fatal "There are no wget nor curl in the system. Install it with $ epm install curl" [ -n "$CURL" ] || fatal "There are no wget nor curl in the system. Install it with $ epm install curl"
...@@ -682,6 +699,13 @@ url_check() ...@@ -682,6 +699,13 @@ url_check()
__curl -LI "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q -w "200\|404" __curl -LI "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q -w "200\|404"
} }
url_get_filename()
{
# FIXME
basename "$1"
}
fi fi
...@@ -700,6 +724,15 @@ check_url_is_accessible() ...@@ -700,6 +724,15 @@ check_url_is_accessible()
{ {
url_check "$@" url_check "$@"
} }
get_filename()
{
url_get_filename "$1"
}
fi
get_github_urls() get_github_urls()
{ {
# https://github.com/OWNER/PROJECT # https://github.com/OWNER/PROJECT
...@@ -768,6 +801,11 @@ if [ -n "$CHECKURL" ] ; then ...@@ -768,6 +801,11 @@ if [ -n "$CHECKURL" ] ; then
exit exit
fi fi
if [ -n "$GETFILENAME" ] ; then
get_filename "$1"
exit
fi
# separate part for github downloads # separate part for github downloads
if echo "$1" | grep -q "^https://github.com/" && \ if echo "$1" | grep -q "^https://github.com/" && \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment