Commit a07ec217 authored by Ivan Ivlev's avatar Ivan Ivlev Committed by Vitaly Lipatov

Added parameters needed to replace wget with eget in winetricks eterbug#16749

parent 704c7d9e
#!/bin/sh #!/bin/sh
# eget - simply shell on wget for loading directories over http (wget does not support wildcard for http) # eget - simply shell on wget for loading directories over http (wget does not support wildcard for http)
# Use: # Use:
# eget http://ftp.altlinux.ru/pub/security/ssl/* # eget http://ftp.altlinux.ru/pub/security/ssl/*
...@@ -320,6 +321,18 @@ WGETNAMEOPTIONS='--content-disposition' ...@@ -320,6 +321,18 @@ WGETNAMEOPTIONS='--content-disposition'
CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name' CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name'
AXELNAMEOPTIONS='' AXELNAMEOPTIONS=''
WGETNODIRECTORIES=''
WGETCONTINUE=''
CURLCONTINUE=''
WGETTIMEOUT=''
CURLMAXTIME=''
WGETREADTIMEOUT=''
WGETRETRYCONNREFUSED=''
CURLRETRYCONNREFUSED=''
WGETTRIES=''
CURLRETRY=''
WGETLOADCOOKIES=''
CURLCOOKIE=''
LISTONLY='' LISTONLY=''
CHECKURL='' CHECKURL=''
...@@ -362,6 +375,13 @@ Options: ...@@ -362,6 +375,13 @@ Options:
-6|--ipv6|--inet6-only - use only IPV6 -6|--ipv6|--inet6-only - use only IPV6
-O-|-O - - output downloaded file to stdout -O-|-O - - output downloaded file to stdout
-O file - download to this file -O file - download to this file
-nd|--no-directories - do not create a hierarchy of directories when retrieving recursively
-c|--continue - continue getting a partially-downloaded file
-T|--timeout=N - set the network timeout to N seconds
--read-timeout=N - set the read (and write) timeout to N seconds
--retry-connrefused - consider “connection refused” a transient error and try again
-t|--tries - set number of tries to number. Specify 0 or ‘inf’ for infinite retrying
--load-cookies file - load cookies from file before the first HTTP retrieval
--latest - print only latest version of a file --latest - print only latest version of a file
--second-latest - print only second to latest version of a file --second-latest - print only second to latest version of a file
--allow-mirrors - check mirrors if url is not accessible --allow-mirrors - check mirrors if url is not accessible
...@@ -403,7 +423,9 @@ fi ...@@ -403,7 +423,9 @@ fi
while [ -n "$1" ] ; do while [ -n "$1" ] ; do
case "$1" in argument="$(echo $1 | cut -d= -f1)"
argvalue="$(echo $1 | cut -s -d= -f2)"
case "$argument" in
-h|--help) -h|--help)
eget_help eget_help
exit exit
...@@ -420,10 +442,14 @@ while [ -n "$1" ] ; do ...@@ -420,10 +442,14 @@ while [ -n "$1" ] ; do
AXELNOSSLCHECK='--insecure' AXELNOSSLCHECK='--insecure'
;; ;;
-H|--header) -H|--header)
shift #TODO: error if header value contains spaces
WGETHEADER="--header=$1" if [ -z "$argvalue" ];then
CURLHEADER="--header $1" shift
AXELHEADER="--header=$1" argvalue="$1"
fi
WGETHEADER="--header=\"$argvalue\""
CURLHEADER="--header \"$argvalue\""
AXELHEADER="--header=\"$argvalue\""
;; ;;
-U|-A|--user-agent) -U|-A|--user-agent)
user_agent="Mozilla/5.0 (X11; Linux $arch) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36" user_agent="Mozilla/5.0 (X11; Linux $arch) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
...@@ -481,6 +507,58 @@ while [ -n "$1" ] ; do ...@@ -481,6 +507,58 @@ while [ -n "$1" ] ; do
-O-) -O-)
TARGETFILE="-" TARGETFILE="-"
;; ;;
-nd|--no-directories)
WGETNODIRECTORIES="$1"
;;
-c|--continue)
WGETCONTINUE="$1"
CURLCONTINUE="-C -"
;;
-T|--timeout)
if [ -z "$argvalue" ];then
shift
argvalue="$1"
fi
WGETTIMEOUT="--timeout $argvalue"
CURLMAXTIME="--max-time $argvalue"
;;
--read-timeout)
if [ -z "$argvalue" ];then
shift
argvalue="$1"
fi
WGETREADTIMEOUT="--read-timeout $argvalue"
if [ -z "$CURLMAXTIME" ];then
CURLMAXTIME="--max-time $argvalue"
fi
;;
--retry-connrefused)
WGETRETRYCONNREFUSED="$1"
CURLRETRYCONNREFUSED="$1"
;;
-t|--tries)
if [ -z "$argvalue" ];then
shift
argvalue="$1"
fi
case "$argvalue" in
0|inf)
CURLRETRY="--retry 1000"
WGETTRIES="--tries $argvalue"
;;
*)
WGETTRIES="--tries $argvalue"
CURLRETRY="--retry $(($argvalue-1))"
;;
esac
;;
--load-cookies)
shift;
WGETLOADCOOKIES="--load-cookies $1"
CURLCOOKIE="--cookie $1"
;;
-*) -*)
fatal "Unknown option '$1', check eget --help." fatal "Unknown option '$1', check eget --help."
;; ;;
...@@ -948,9 +1026,9 @@ elif [ "$EGET_BACKEND" = "wget" ] ; then ...@@ -948,9 +1026,9 @@ elif [ "$EGET_BACKEND" = "wget" ] ; then
__wget() __wget()
{ {
if [ -n "$WGETUSERAGENT" ] ; then if [ -n "$WGETUSERAGENT" ] ; then
docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK "$WGETUSERAGENT" "$@" docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES "$WGETUSERAGENT" "$@"
else else
docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK "$@" docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES "$@"
fi fi
} }
...@@ -998,9 +1076,9 @@ elif [ "$EGET_BACKEND" = "curl" ] ; then ...@@ -998,9 +1076,9 @@ elif [ "$EGET_BACKEND" = "curl" ] ; then
__curl() __curl()
{ {
if [ -n "$CURLUSERAGENT" ] ; then if [ -n "$CURLUSERAGENT" ] ; then
docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER "$CURLUSERAGENT" $CURLNOSSLCHECK "$@" docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE "$CURLUSERAGENT" "$@"
else else
docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK "$@" docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE "$@"
fi fi
} }
# put remote content to stdout # put remote content to stdout
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment