#!/bin/bash # filter - arguments are to wget - quoted? prog=$( basename $0 .bash ) prog=ScurlU ROOTDIR=/mnt/i/net/Http ROLE=base CACHE=/usr/portage/distfiles . /usr/local/bin/proxy_curl_lib.bash route | grep -q ^def || { echo ERROR: not connected ; exit 1 ; } . /usr/local/bin/usr_local_tput.bash FETCHCOMMAND='/usr/local/bin/scurl.bash --force-directories --directory-prefix "\${DISTDIR}" -- "\${URI}"' # RARGS="--retry 1 --connect-timeout 10" if [ "$#" -eq 0 ] ; then LARGS="--force-directories --directory-prefix $ROOTDIR" else LARGS="$@" fi cp /dev/null /tmp/$prog$$.urls # //www.simplesystems.org/users/bfriesen/public-key.txt no https: # https://opencoder.net/WayneDavison.key cloudflare 403 # https://www.simplesystems.org/users/bfriesen/public-key.txt 503 # https://tiswww.case.edu/php/chet/gpgkey.asc 500 timeout # https://botan.randombit.net/pgpkey.txt no tls1.3 # https://sourceware.org/elfutils/ftp/gpgkey-1AA44BE649DE760A.gpg no tls1.3 # https://gnutls.org/gnutls-release-keyring.gpg no tls1.3 retval=0 # NOT 1.3 -e 's@^https://distfiles.gentoo.org/distfiles/[^ ]* https://pypi.python.org/@https://pypi.python.org/@' grep ^http | \ sed -e 's@ftp://[^ ]*@@' \ -e 's/http:/https:/' \ -e 's@^https://distfiles.gentoo.org/distfiles/openpgp-keys-[^ ]*.asc @@' \ -e 's@https*://distfiles.gentoo.org@https://gentoo.osuosl.org@g' \ -e 's@https://gentoo.osuosl.org@https://mirror.leaseweb.com/gentoo@g' \ -e 's@https*://download.sourceforge.net@https://download.sourceforge.net@g' | \ while read urls ; do url=`echo $urls|sed -e 's@ .*@@'` base=`basename "$url"` [ -e $CACHE/$base ] && echo $CACHE/$base && continue base=`echo $url | sed -e 's@ .*@@' -e 's@https*://@@'` [ -e $ROOTDIR/"$base" ] && echo $ROOTDIR/"$base" && continue for url in $urls ; do for no in "${NOTLSV3[@]}" ; do [[ $url =~ $no ]] && continue done domain=`sed -e 's@/.*@@' <<< $base` ip=`tor-resolve $domain` if [ $? -eq 0 -a -n "$ip" ] ; then a=`proxy_ami_cloudflared $ip` [ $? -eq 0 -a "$a" = True ] && \ WARN $url Cloudflared $ip $no && \ continue fi DBUG $prog /usr/local/bin/scurl.bash $LARGS -- $RARGS $url /usr/local/bin/scurl.bash $LARGS -- $RARGS $url || { retval=$? continue } break done done exit $retval