#!/bin/sh
# eget - simply shell on wget for loading directories over http (wget does not support wildcard for http)
# Use:
# eget http://ftp.altlinux.ru/pub/security/ssl/*
#
# Copyright (C) 2014-2014, 2016, 2020, 2022  Etersoft
# Copyright (C) 2014 Daniil Mikhailov <danil@etersoft.ru>
# Copyright (C) 2016-2017, 2020, 2022 Vitaly Lipatov <lav@etersoft.ru>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#

fatal()
{
    echo "FATAL: $*" >&2
    exit 1
}

# TODO:
arch="$(uname -m)"

# copied from eepm project

# copied from /etc/init.d/outformat (ALT Linux)
isatty()
{
	# Set a sane TERM required for tput
	[ -n "$TERM" ] || TERM=dumb
	export TERM
	test -t 1
}

isatty2()
{
	# check stderr
	test -t 2
}


check_tty()
{
	isatty || return
	which tput >/dev/null 2>/dev/null || return
	# FreeBSD does not support tput -S
	echo | tput -S >/dev/null 2>/dev/null || return
	[ -z "$USETTY" ] || return
	export USETTY=1
}

: ${BLACK:=0} ${RED:=1} ${GREEN:=2} ${YELLOW:=3} ${BLUE:=4} ${MAGENTA:=5} ${CYAN:=6} ${WHITE:=7}

set_boldcolor()
{
	[ "$USETTY" = "1" ] || return
	{
		echo bold
		echo setaf $1
	} |tput -S
}

restore_color()
{
	[ "$USETTY" = "1" ] || return
	{
		echo op; # set Original color Pair.
		echo sgr0; # turn off all special graphics mode (bold in our case).
	} |tput -S
}

echover()
{
    [ -n "$verbose" ] || return
    echo "$*" >&2
}

# Print command line and run command line
showcmd()
{
	if [ -z "$quiet" ] ; then
		set_boldcolor $GREEN
		local PROMTSIG="\$"
		[ "$UID" = 0 ] && PROMTSIG="#"
		echo " $PROMTSIG $@"
		restore_color
	fi >&2
}

# Print command line and run command line
docmd()
{
	showcmd "$@"
	"$@"
}

check_tty

WGETNOSSLCHECK=''
CURLNOSSLCHECK=''
WGETUSERAGENT=''
CURLUSERAGENT=''
WGETQ='' #-q
CURLQ='' #-s
WGETNAMEOPTIONS='--content-disposition'
CURLNAMEOPTIONS='--remote-name --remote-header-name'

set_quiet()
{
    WGETQ='-q'
    CURLQ='-s'
}

# TODO: parse options in a good way

# TODO: passthrou all wget options
if [ "$1" = "-q" ] ; then
    set_quiet
    shift
fi

if [ "$1" = "-k" ] || [ "$1" = "--no-check-certificate" ] ; then
    WGETNOSSLCHECK='--no-check-certificate'
    CURLNOSSLCHECK='-k'
    shift
fi

if [ "$1" = "-U" ] || [ "$1" = "-A" ] || [ "$1" = "--user-agent" ] ; then
    user_agent="Mozilla/5.0 (X11; Linux $arch)"
    WGETUSERAGENT="-U '$user_agent'"
    CURLUSERAGENT="-A '$user_agent'"
    shift
fi


WGET="$(which wget 2>/dev/null)"

if [ -n "$WGET" ] ; then
__wget()
{
    if [ -n "$WGETUSERAGENT" ] ; then
        docmd $WGET $WGETQ $WGETNOSSLCHECK "$WGETUSERAGENT" "$@"
    else
        docmd $WGET $WGETQ $WGETNOSSLCHECK "$@"
    fi
}
# put remote content to stdout
scat()
{
    __wget -O- "$1"
}
# download to default name of to $2
sget()
{
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$1"
    elif [ -n "$2" ] ; then
       docmd __wget -O "$2" "$1"
    else
# TODO: поддержка rsync для известных хостов?
# Не качать, если одинаковый размер и дата
# -nc
# TODO: overwrite always
       docmd __wget $WGETNAMEOPTIONS "$1"
    fi
}

check_url_http()
{
    local URL="$1"
    __wget --spider -S "$URL" 2>&1 | grep "HTTP" | tail -n1 | grep -q -w "200\|301"
}

else
CURL="$(which curl 2>/dev/null)"
[ -n "$CURL" ] || fatal "There are no wget nor curl in the system. Install it with $ epm install curl"
__curl()
{
    if [ -n "$CURLUSERAGENT" ] ; then
        docmd $CURL -L $CURLQ "$CURLUSERAGENT" $CURLNOSSLCHECK "$@"
    else
        docmd $CURL -L $CURLQ $CURLNOSSLCHECK "$@"
    fi
}
# put remote content to stdout
scat()
{
    __curl "$1"
}
# download to default name of to $2
sget()
{
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$1"
    elif [ -n "$2" ] ; then
       __curl --output "$2" "$1"
    else
       __curl $CURLNAMEOPTIONS "$1"
    fi
}

check_url_http()
{
    local URL="$1"
    __curl -I "$URL" 2>&1 | grep "HTTP" | tail -n1 | grep -q -w "200\|301"
}

fi

LISTONLY=''
if [ "$1" = "--list" ] ; then
    LISTONLY="$1"
    set_quiet
    shift
fi

if [ "$1" = "--check" ] ; then
    set_quiet
    shift
    check_url_http "$1"
    exit
fi


LATEST=''
if [ "$1" = "--latest" ] ; then
    LATEST="$1"
    shift
fi

fatal()
{
    echo "$*" >&2
    exit 1
}

# check man glob
filter_glob()
{
	[ -z "$1" ] && cat && return
	# translate glob to regexp
	grep "$(echo "$1" | sed -e "s|\*|.*|g" -e "s|?|.|g")$"
}

filter_order()
{
    [ -z "$LATEST" ] && cat && return
    sort -V | tail -n1
}

# download to this file
TARGETFILE=''
if [ "$1" = "-O" ] ; then
    TARGETFILE="$2"
    shift 2
elif [ "$1" = "-O-" ] ; then
    TARGETFILE="-"
    shift 1
fi

# TODO:
# -P support

if [ -z "$1" ] ; then
    echo "eget - wget like downloader wrapper with wildcard support" >&2
    fatal "Run $0 --help to get help"
fi

if [ "$1" = "-h" ] || [ "$1" = "--help" ] ; then
    echo "eget - wget like downloader wrapper with wildcard support in filename part of URL"
    echo "Usage: eget [-q] [-k] [-U] [-O target file] [--list] http://somesite.ru/dir/na*.log"
    echo
    echo "Options:"
    echo "    -q       - quiet mode"
    echo "    -k|--no-check-certificate - skip SSL certificate chain support"
    echo "    -U|-A|--user-agent - send browser like UserAgent"
    echo "    -O file  - download to this file (use filename from server if missed)"
    echo "    --list   - print files from url with mask"
    echo "    --check  - check if URL is accessible (returns HTTP 200 OK)"
    echo "    --latest - print only latest version of a file"
    echo
    echo "eget supports --list and download for https://github.com/owner/project urls"
    echo
    echo "Examples:"
    echo "  $ eget --list http://ftp.somesite.ru/package-*.tar"
    echo "  $ eget http://ftp.somesite.ru/package-*.x64.tar"
    echo "  $ eget --check http://ftp.somesite.ru/test"
    echo "  $ eget --list http://download.somesite.ru 'package-*.tar.xz'"
    echo "  $ eget --list --latest https://github.com/telegramdesktop/tdesktop/releases 'tsetup.*.tar.xz'"
#    echo "See $ wget --help for wget options you can use here"
    exit
fi

get_github_urls()
{
    # https://github.com/OWNER/PROJECT
    local owner="$(echo "$1" | sed -e "s|^https://github.com/||" -e "s|/.*||")" #"
    local project="$(echo "$1" | sed -e "s|^https://github.com/$owner/||" -e "s|/.*||")" #"
    [ -n "$owner" ] || fatal "Can't get owner from $1"
    [ -n "$project" ] || fatal "Can't get project from $1"
    local URL="https://api.github.com/repos/$owner/$project/releases"
    scat $URL | \
        grep -i -o -E '"browser_download_url": "https://.*"' | cut -d'"' -f4
}

if echo "$1" | grep -q "^https://github.com/" && \
   echo "$1" | grep -q -v "/download/" && [ -n "$2" ] ; then
    MASK="$2"

    if [ -n "$LISTONLY" ] ; then
        get_github_urls "$1" | filter_glob "$MASK" | filter_order
        exit
    fi

    for fn in $(get_github_urls "$1" | filter_glob "$MASK" | filter_order) ; do
        sget "$fn" || ERROR=1
    done
    exit
fi


# do not support /
if echo "$1" | grep -q "/$" && [ -z "$2" ] ; then
    fatal "Use http://example.com/e/* to download all files in dir"
fi

# TODO: curl?
# If ftp protocol, just download
if echo "$1" | grep -q "^ftp://" ; then
    [ -n "$LISTONLY" ] && fatal "TODO: list files for ftp:// do not supported yet"
    sget "$1" "$TARGETFILE"
    exit
fi

# mask allowed only in the last part of path
MASK=$(basename "$1")

# if mask are second arg
if [ -n "$2" ] ; then
    URL="$1"
    MASK="$2"
else
    # drop mask part
    URL="$(dirname "$1")"
fi

if echo "$URL" | grep -q "[*?]" ; then
    fatal "Error: there are globbing symbols (*?) in $URL"
fi

# If have no wildcard symbol like asterisk, just download
if echo "$MASK" | grep -qv "[*?]" || echo "$MASK" | grep -q "[?].*="; then
    sget "$1" "$TARGETFILE"
    exit
fi

is_url()
{
    echo "$1" | grep -q "://"
}

# Args: URL filename
make_fileurl()
{
    local url="$1"
    local fn="$2"
    fn="$(echo "$fn" | sed -e 's|^./||' -e 's|^/+||')"
    # workaround for a slash in the end of URL
    echo "$(echo "$url" | sed -e 's|/*$||')/$fn"
}

get_urls()
{
    # cat html, divide to lines by tags and cut off hrefs only
    scat $URL | sed -e 's|<|<\n|g' | \
         grep -i -o -E 'href="(.+)"' | cut -d'"' -f2
}

if [ -n "$LISTONLY" ] ; then
    for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
        is_url "$fn" && echo $fn && continue
        make_fileurl "$URL" "$fn"
    done
    exit
fi

ERROR=0
for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
    is_url "$fn" || fn="$(make_fileurl "$URL" "$(basename "$fn")" )" #"
    sget "$fn" || ERROR=1
done
exit $ERROR