#!/bin/sh # eget - simply shell on wget for loading directories over http (wget does not support wildcard for http) # Use: # eget http://ftp.altlinux.ru/pub/security/ssl/* # # Copyright (C) 2014-2014, 2016, 2020, 2022 Etersoft # Copyright (C) 2014 Daniil Mikhailov <danil@etersoft.ru> # Copyright (C) 2016-2017, 2020, 2022 Vitaly Lipatov <lav@etersoft.ru> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # fatal() { echo "FATAL: $*" >&2 exit 1 } # TODO: arch="$(uname -m)" # copied from eepm project # copied from /etc/init.d/outformat (ALT Linux) isatty() { # Set a sane TERM required for tput [ -n "$TERM" ] || TERM=dumb export TERM test -t 1 } isatty2() { # check stderr test -t 2 } check_tty() { isatty || return which tput >/dev/null 2>/dev/null || return # FreeBSD does not support tput -S echo | tput -S >/dev/null 2>/dev/null || return [ -z "$USETTY" ] || return export USETTY=1 } : ${BLACK:=0} ${RED:=1} ${GREEN:=2} ${YELLOW:=3} ${BLUE:=4} ${MAGENTA:=5} ${CYAN:=6} ${WHITE:=7} set_boldcolor() { [ "$USETTY" = "1" ] || return { echo bold echo setaf $1 } |tput -S } restore_color() { [ "$USETTY" = "1" ] || return { echo op; # set Original color Pair. echo sgr0; # turn off all special graphics mode (bold in our case). } |tput -S } echover() { [ -n "$verbose" ] || return echo "$*" >&2 } # Print command line and run command line showcmd() { if [ -z "$quiet" ] ; then set_boldcolor $GREEN local PROMTSIG="\$" [ "$UID" = 0 ] && PROMTSIG="#" echo " $PROMTSIG $@" restore_color fi >&2 } # Print command line and run command line docmd() { showcmd "$@" "$@" } check_tty WGETNOSSLCHECK='' CURLNOSSLCHECK='' WGETUSERAGENT='' CURLUSERAGENT='' WGETQ='' #-q CURLQ='' #-s WGETNAMEOPTIONS='--content-disposition' CURLNAMEOPTIONS='--remote-name --remote-header-name' set_quiet() { WGETQ='-q' CURLQ='-s' } # TODO: parse options in a good way # TODO: passthrou all wget options if [ "$1" = "-q" ] ; then set_quiet shift fi if [ "$1" = "-k" ] || [ "$1" = "--no-check-certificate" ] ; then WGETNOSSLCHECK='--no-check-certificate' CURLNOSSLCHECK='-k' shift fi if [ "$1" = "-U" ] || [ "$1" = "-A" ] || [ "$1" = "--user-agent" ] ; then user_agent="Mozilla/5.0 (X11; Linux $arch) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36" WGETUSERAGENT="-U '$user_agent'" CURLUSERAGENT="-A '$user_agent'" shift fi # check man glob filter_glob() { [ -z "$1" ] && cat && return # translate glob to regexp grep "$(echo "$1" | sed -e "s|\*|.*|g" -e "s|?|.|g")$" } filter_order() { if [ -n "$SECONDLATEST" ] ; then sort -V | tail -n2 | head -n1 return fi [ -z "$LATEST" ] && cat && return sort -V | tail -n1 } is_fileurl() { echo "$1" | grep -q "^/" && return echo "$1" | grep -q "file:/" } dir_from_url() { echo "$1" | sed -e 's|^file://*|/|' } is_url() { echo "$1" | grep -q ":/" } # args: cmd <URL> <options> # will run cmd <options> <URL> download_with_mirroring() { local CMD="$1" shift local URL="$1" shift local res $CMD "$@" "$URL" && return res=$? [ -n "$CHECKMIRRORS" ] || return $res MIRROR="https://mirror.eterfund.ru" SECONDURL="$(echo "$URL" | sed -e "s|^.*://|$MIRROR/|")" $CMD "$@" "$SECONDURL" && URL="$SECONDURL" && return MIRROR="https://mirror.eterfund.org" SECONDURL="$(echo "$URL" | sed -e "s|^.*://|$MIRROR/|")" $CMD "$@" "$SECONDURL" && URL="$SECONDURL" && return } LISTONLY='' if [ "$1" = "--list" ] ; then LISTONLY="$1" set_quiet shift fi if [ "$1" = "--check" ] ; then set_quiet shift check_url_is_accessible "$1" exit fi LATEST='' if [ "$1" = "--latest" ] ; then LATEST="$1" shift fi SECONDLATEST='' if [ "$1" = "--second-latest" ] ; then SECONDLATEST="$1" shift fi CHECKMIRRORS='' if [ "$1" = "--check-mirrors" ] ; then CHECKMIRRORS="$1" shift fi # download to this file TARGETFILE='' if [ "$1" = "-O" ] ; then TARGETFILE="$2" shift 2 elif [ "$1" = "-O-" ] ; then TARGETFILE="-" shift 1 fi WGET="$(which wget 2>/dev/null)" if is_fileurl "$1" ; then # put remote content to stdout scat() { local URL="$1" cat "$(dir_from_url "$URL")" } # download to default name of to $2 sget() { local URL="$1" if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then scat "$URL" return elif [ -n "$2" ] ; then cp -av "$(dir_from_url "$URL")" "$2" return fi cp -av "$(dir_from_url "$URL")" . } check_url_is_accessible() { local URL="$1" test -f "$(dir_from_url "$URL")" } elif [ -n "$WGET" ] ; then __wget() { if [ -n "$WGETUSERAGENT" ] ; then docmd $WGET $WGETQ $WGETNOSSLCHECK "$WGETUSERAGENT" "$@" else docmd $WGET $WGETQ $WGETNOSSLCHECK "$@" fi } # put remote content to stdout scat() { local URL="$1" download_with_mirroring __wget "$URL" -O- } # download to default name of to $2 sget() { local URL="$1" if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then scat "$URL" return elif [ -n "$2" ] ; then download_with_mirroring __wget "$URL" -O "$2" return fi # TODO: поддержка rsync для известных хостов? # Не качать, если одинаковый размер и дата # -nc # TODO: overwrite always download_with_mirroring __wget "$URL" $WGETNAMEOPTIONS } check_url_is_accessible() { local URL="$1" __wget --spider -S "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q "200" } else CURL="$(which curl 2>/dev/null)" [ -n "$CURL" ] || fatal "There are no wget nor curl in the system. Install it with $ epm install curl" __curl() { if [ -n "$CURLUSERAGENT" ] ; then docmd $CURL --fail -L $CURLQ "$CURLUSERAGENT" $CURLNOSSLCHECK "$@" else docmd $CURL --fail -L $CURLQ $CURLNOSSLCHECK "$@" fi } # put remote content to stdout scat() { local URL="$1" download_with_mirroring __curl "$URL" --output - } # download to default name of to $2 sget() { local URL="$1" local res if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then scat "$1" return elif [ -n "$2" ] ; then download_with_mirroring __curl "$URL" --output "$2" return fi download_with_mirroring __curl "$URL" $CURLNAMEOPTIONS } check_url_is_accessible() { local URL="$1" __curl -LI "$URL" 2>&1 | grep "HTTP/" | tail -n1 | grep -q -w "200\|404" } fi # TODO: # -P support if [ -z "$1" ] ; then echo "eget - wget like downloader wrapper with wildcard support" >&2 fatal "Run $0 --help to get help" fi if [ "$1" = "-h" ] || [ "$1" = "--help" ] ; then echo "eget - wget like downloader wrapper with wildcard support in filename part of URL" echo "Usage: eget [-q] [-k] [-U] [-O target file] [--list] http://somesite.ru/dir/na*.log" echo echo "Options:" echo " -q - quiet mode" echo " -k|--no-check-certificate - skip SSL certificate chain support" echo " -U|-A|--user-agent - send browser like UserAgent" echo " -O file - download to this file (use filename from server if missed)" echo " --list - print files from url with mask" echo " --check - check if URL is accessible (returns HTTP 200 OK)" echo " --latest - print only latest version of a file" echo " --second-latest - print only second to latest version of a file" echo " --allow-mirrors - check mirrors if url is not accessible" echo echo "eget supports --list and download for https://github.com/owner/project urls" echo echo "Examples:" echo " $ eget --list http://ftp.somesite.ru/package-*.tar" echo " $ eget http://ftp.somesite.ru/package-*.x64.tar" echo " $ eget --check http://ftp.somesite.ru/test" echo " $ eget --list http://download.somesite.ru 'package-*.tar.xz'" echo " $ eget --list --latest https://github.com/telegramdesktop/tdesktop/releases 'tsetup.*.tar.xz'" # echo "See $ wget --help for wget options you can use here" exit fi get_github_urls() { # https://github.com/OWNER/PROJECT local owner="$(echo "$1" | sed -e "s|^https://github.com/||" -e "s|/.*||")" #" local project="$(echo "$1" | sed -e "s|^https://github.com/$owner/||" -e "s|/.*||")" #" [ -n "$owner" ] || fatal "Can't get owner from $1" [ -n "$project" ] || fatal "Can't get project from $1" local URL="https://api.github.com/repos/$owner/$project/releases" scat $URL | \ grep -i -o -E '"browser_download_url": "https://.*"' | cut -d'"' -f4 } if echo "$1" | grep -q "^https://github.com/" && \ echo "$1" | grep -q -v "/download/" && [ -n "$2" ] ; then MASK="$2" if [ -n "$LISTONLY" ] ; then get_github_urls "$1" | filter_glob "$MASK" | filter_order exit fi ERROR=0 for fn in $(get_github_urls "$1" | filter_glob "$MASK" | filter_order) ; do sget "$fn" "$TARGETFILE" || ERROR=1 [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break done exit fi # do not support / if echo "$1" | grep -q "/$" && [ -z "$2" ] ; then fatal "Use http://example.com/e/* to download all files in dir" fi # TODO: curl? # If ftp protocol, just download if echo "$1" | grep -q "^ftp://" ; then [ -n "$LISTONLY" ] && fatal "TODO: list files for ftp:// do not supported yet" sget "$1" "$TARGETFILE" exit fi # mask allowed only in the last part of path MASK=$(basename "$1") # if mask are second arg if [ -n "$2" ] ; then URL="$1" MASK="$2" else # drop mask part URL="$(dirname "$1")" fi if [ -z "$MASK" ] && echo "$URL" | grep -q "[*?]" ; then fatal "Error: there are globbing symbols (*?) in $URL" fi # If have no wildcard symbol like asterisk, just download if echo "$MASK" | grep -qv "[*?]" || echo "$MASK" | grep -q "[?].*="; then if is_fileurl "$1" ; then cp -v "$(dir_from_url)" "$TARGEFILE" exit fi sget "$1" "$TARGETFILE" exit fi # drop file path from URL get_host_only() { echo "$1/" | grep -Eo '(.*://[^/]+)' } # Args: URL filename make_fileurl() { local url="$1" local fn="$2" fn="$(echo "$fn" | sed -e 's|^./||' -e 's|^/+||')" if is_fileurl "$url" ; then echo "$url/$fn" return fi # if there is file path from the root of the site if echo "$fn" | grep -q "^/" ; then echo "$(get_host_only "$url")$fn" return fi # workaround for a slash in the end of URL echo "$(echo "$url" | sed -e 's|/*$||')/$fn" } get_urls() { if is_fileurl "$URL" ; then ls -1 "$(dir_from_url "$URL")" return fi # cat html, divide to lines by tags and cut off hrefs only scat $URL | sed -e 's|<|<\n|g' -e 's|data-file=|href=|g' | \ grep -i -o -E 'href="(.+)"' | cut -d'"' -f2 } if [ -n "$LISTONLY" ] ; then for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do is_url "$fn" && echo "$fn" && continue make_fileurl "$URL" "$fn" done exit fi ERROR=0 for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do is_url "$fn" || fn="$(make_fileurl "$URL" "$(basename "$fn")" )" #" sget "$fn" "$TARGETFILE" || ERROR=1 [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break done exit $ERROR