Commit 4d6a88da authored by Vitaly Lipatov's avatar Vitaly Lipatov

eget: add get_real_url

parent 7f340d2d
...@@ -215,6 +215,13 @@ is_ipfsurl() ...@@ -215,6 +215,13 @@ is_ipfsurl()
echo "$1" | grep -q "^ipfs://" echo "$1" | grep -q "^ipfs://"
} }
is_httpurl()
{
# TODO: improve
echo "$1" | grep -q "^https://" & return
echo "$1" | grep -q "^http://" & return
}
cid_from_url() cid_from_url()
{ {
echo "$1" | sed -e 's|^ipfs://*||' echo "$1" | sed -e 's|^ipfs://*||'
...@@ -263,6 +270,7 @@ CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name' ...@@ -263,6 +270,7 @@ CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name'
LISTONLY='' LISTONLY=''
CHECKURL='' CHECKURL=''
GETFILENAME='' GETFILENAME=''
GETREALURL=''
LATEST='' LATEST=''
SECONDLATEST='' SECONDLATEST=''
CHECKMIRRORS='' CHECKMIRRORS=''
...@@ -299,6 +307,7 @@ Options: ...@@ -299,6 +307,7 @@ Options:
--list|--list-only - print only URLs --list|--list-only - print only URLs
--check URL - check if the URL is accessible (returns HTTP 200 OK) --check URL - check if the URL is accessible (returns HTTP 200 OK)
--get-filename URL - print filename for the URL (via Content-Disposition if applicable) --get-filename URL - print filename for the URL (via Content-Disposition if applicable)
--get-real-url URL - print URL after all redirects
Supported URLs: Supported URLs:
ftp:// http:// https:// file:/ ipfs:// ftp:// http:// https:// file:/ ipfs://
...@@ -363,6 +372,9 @@ while [ -n "$1" ] ; do ...@@ -363,6 +372,9 @@ while [ -n "$1" ] ; do
--get-filename) --get-filename)
GETFILENAME="$1" GETFILENAME="$1"
;; ;;
--get-real-url)
GETREALURL="$1"
;;
--latest) --latest)
LATEST="$1" LATEST="$1"
;; ;;
...@@ -634,6 +646,11 @@ url_get_filename() ...@@ -634,6 +646,11 @@ url_get_filename()
basename "$1" basename "$1"
} }
url_get_real_url()
{
echo "$1"
}
elif is_ipfsurl "$1" ; then elif is_ipfsurl "$1" ; then
# put remote content to stdout # put remote content to stdout
...@@ -665,10 +682,18 @@ url_check() ...@@ -665,10 +682,18 @@ url_check()
url_get_filename() url_get_filename()
{ {
local URL="$1"
local CID="$(cid_from_url "$URL")" local CID="$(cid_from_url "$URL")"
get_filename_by_cid "$CID" get_filename_by_cid "$CID"
} }
url_get_real_url()
{
local URL="$1"
local CID="$(cid_from_url "$URL")"
get_url_by_cid "$CID"
}
elif [ -n "$WGET" ] && [ "$EGET_BACKEND" != "curl" ] ; then elif [ -n "$WGET" ] && [ "$EGET_BACKEND" != "curl" ] ; then
__wget() __wget()
...@@ -723,6 +748,24 @@ url_get_header() ...@@ -723,6 +748,24 @@ url_get_header()
url_get_headers "$URL" | grep "^$HEADER: " | sed -e "s|^$HEADER: ||" url_get_headers "$URL" | grep "^$HEADER: " | sed -e "s|^$HEADER: ||"
} }
url_get_real_url()
{
local URL="$1"
! is_httpurl "$URL" && echo "$URL" && return
# don't check location if we have made form of the URL
[ -n "$MADEURL" ] && [ "$MADEURL" = "$URL" ] && echo "$URL" && return
local loc="$(url_get_header "$URL" "Location" | tail -n1)"
if is_url "$loc" ; then
echo "$loc"
return
fi
echo "$URL"
}
url_get_filename() url_get_filename()
{ {
local URL="$1" local URL="$1"
...@@ -796,6 +839,24 @@ url_get_header() ...@@ -796,6 +839,24 @@ url_get_header()
url_get_headers "$URL" | grep "^$HEADER: " | sed -e "s|^$HEADER: ||" url_get_headers "$URL" | grep "^$HEADER: " | sed -e "s|^$HEADER: ||"
} }
url_get_real_url()
{
local URL="$1"
! is_httpurl "$URL" && echo "$URL" && return
# don't check location if we have made form of the URL
[ -n "$MADEURL" ] && [ "$MADEURL" = "$URL" ] && echo "$URL" && return
local loc="$(url_get_header "$URL" "location" | tail -n1)"
if is_url "$loc" ; then
echo "$loc"
return
fi
echo "$URL"
}
url_get_filename() url_get_filename()
{ {
local URL="$1" local URL="$1"
...@@ -908,6 +969,11 @@ get_filename() ...@@ -908,6 +969,11 @@ get_filename()
url_get_filename "$1" url_get_filename "$1"
} }
get_real_url()
{
url_get_real_url "$1"
}
else else
scat() scat()
{ {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment