This commit is contained in:
emdee 2024-01-15 12:44:06 +00:00
parent 2c8998aeb4
commit f63af45d10
81 changed files with 7849 additions and 218 deletions

View File

@ -1,5 +1,5 @@
[defaults] [defaults]
log_path = var/tmp/2024/01/08/gentoo_vm-2/base_proxy_toxcore.log log_path = var/tmp/2024/01/09/gentoo_vm-2/base_proxy_toxcore.log
callback_plugins = ./lib/plugins/ callback_plugins = ./lib/plugins/
# /i/data/DevOps/net/Http/docs.ansible.com/ansible/intro_configuration.html # /i/data/DevOps/net/Http/docs.ansible.com/ansible/intro_configuration.html
# http://docs.ansible.com/ansible/intro_configuration.html#command-warnings # http://docs.ansible.com/ansible/intro_configuration.html#command-warnings

View File

@ -2,7 +2,7 @@
--- ---
- hosts: "{{ BOX_HOST }}" # |default('localhost') - hosts: "{{ BOX_HOST|default('localhost')} }}" # |default('localhost')
#?? become: "{{ 'false' if ansible_connection|default('') == 'chroot' else 'true'}}" #?? become: "{{ 'false' if ansible_connection|default('') == 'chroot' else 'true'}}"
# become_method: "'' if ansible_connection|default('') == 'chroot' else 'sudo'" # become_method: "'' if ansible_connection|default('') == 'chroot' else 'sudo'"
gather_facts: true gather_facts: true
@ -43,7 +43,7 @@
# other things that use /usr/local, including some things from other OSes. # other things that use /usr/local, including some things from other OSes.
VAR_LOCAL: "/var/local" VAR_LOCAL: "/var/local"
VAR_LOG: "{{VAR_LOCAL}}/var/log/testforge" VAR_LOG: "{{VAR_LOCAL}}/var/log/testforge"
PLAY_TESTFORGE_YML: ''
PIP_CACHE: "/root/.cache/pip" PIP_CACHE: "/root/.cache/pip"
# lynx uses SSL_CERT_DIR/SSL_CERT_FILE # lynx uses SSL_CERT_DIR/SSL_CERT_FILE
PIP_CA_CERT: "{{USR_LOCAL}}/etc/ssl/cacert-testserver.pem" PIP_CA_CERT: "{{USR_LOCAL}}/etc/ssl/cacert-testserver.pem"

45
pyproject.toml Normal file
View File

@ -0,0 +1,45 @@
[project]
name = "stem_examples"
version = "2023.12"
description = "examples of using stem"
authors = [{ name = "emdee", email = "Ingvar@gitgub.com" } ]
requires-python = ">=3.6"
dependencies = [
'stem',
]
keywords = ["stem", "python3", "tor"]
classifiers = [
"License :: OSI Approved",
"Operating System :: POSIX :: BSD :: FreeBSD",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
]
dynamic = ["version", "readme", ] # cannot be dynamic ['license']
scripts = { exclude_badExits = "stem_examples.exclude_badExits:iMain" }
#[project.license]
#file = "LICENSE.md"
[project.urls]
repository = "https://git.plastiras.org/emdee/stem_examples"
[build-system]
requires = ["setuptools >= 61.0"]
build-backend = "setuptools.build_meta"
[tool.setuptools.dynamic]
version = {attr = "stem_examples.__version__"}
readme = {file = ["README.md", "exclude_badExits.md"]}
[tool.setuptools]
packages = ["stem_examples"]
#[tool.setuptools.packages.find]
#where = "src"

View File

@ -1,4 +1,6 @@
#!/bin/sh #!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
ROLE=toxcore ROLE=toxcore
#https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.sign #https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.sign

View File

@ -0,0 +1,8 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
PREFIX=/usr/local
ROLE=toxcore
iptables-legacy -F;iptables-legacy -F -t nat;iptables-legacy -F -t mangle
iptables-legacy-restore </etc/firewall.conf

View File

@ -1,80 +1,11 @@
#!/bin/sh #!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*- # -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash` prog=`basename $0`
PREFIX=/usr/local PREFIX=/usr/local
ROLE=toxcore ROLE=toxcore
[ -f $PREFIX/bin/gridfire.rc ] && . $PREFIX/bin/gridfire.rc
MOD=gridfire cd /usr/local/src/gridfire || exit 3
DIR=$MOD
GIT_HUB=github.com
GIT_USER=reid-k
GIT_DIR=gridfire
DESC="" exec /usr/local/bin/python3.sh gridfire.py "$@"
[ -f /usr/local/src/usr_local_src.bash ] && \
. /usr/local/src/usr_local_src.bash
cd $PREFIX/src || exit 2
WD=$PWD
if [ "$#" -eq 0 ] ; then
cd $DIR || exit 3
if [ ! -e $MOD.py ] ; then
route|grep -q ^default || exit 0
wget -c https://raw.githubusercontent.com/$GIT_USER/$GIT_DIR/master/$MOD.py
fi
#[ -f $MOD.sh ] || \
# cp -p $PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR/$MOD.sh .
for VER in 2 3 ; do
PYVER=$VER
PYTHON_EXE_MSYS=$PREFIX/bin/python$PYVER.bash
PYTHON_EXE=$PYTHON_EXE_MSYS
if [ ! -e $PREFIX/bin/$MOD$VER.bash ] ; then
cat > $PREFIX/bin/$MOD$VER.bash << EOF
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
ROLE=$ROLE
# https://$GIT_HUB/$GIT_USER/$GIT_DIR/
exec $PYTHON_EXE_MSYS $PREFIX/src/$DIR/$MOD.py "\$@"
EOF
chmod 755 $PREFIX/bin/$MOD$VER.bash
fi
done
# default to python2
BINS=$MOD
msys_install_python_scripts $BINS
cd bin || exit 4
for file in *.bash *.py ; do
[ $file = gridfire_ansible-vault.bash ] && continue
[ -x $PREFIX/bin/$file ] && diff -q $file $PREFIX/bin/$file && continue
cp -p $file $PREFIX/bin
[ -x $PREFIX/bin/$file ] || chmod 775 $PREFIX/bin/$file
done
cd ..
#[ -d /usr/lib64/misc/ ] && [ ! -e /usr/lib64/misc/ssh-askpass ] \
# && sudo ln -s $PREFIX/bin/$MOD.bash /usr/lib64/misc/ssh-askpass
retval=0
[ -z "$BOX_OS_FLAVOR" ] && BOX_OS_FLAVOR="Linux"
make all-$BOX_OS_FLAVOR
OPREFIX=$PREFIX/share/genkernel/overlay
dist=dist-$BOX_OS_FLAVOR
[ -d $OPREFIX/bin ] || { sudo mkdir -p $OPREFIX/bin ; sudo chmod 1777 $OPREFIX/bin ; }
[ ! -x $dist/$MOD ] || \
[ -x $OPREFIX/bin/$MOD -a $OPREFIX/bin/$MOD -nt $dist/$MOD ] || \
cp -p $dist/$MOD $OPREFIX/bin/ || exit 9
# libc.so.1 libz.so.1 libdl.so.1
exit 0
elif [ "$1" = 'test' ] ; then
$PREFIX/bin/$MOD.bash --help >/dev/null || exit 10
make test >/dev/null || exit 11
fi

View File

@ -0,0 +1,59 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/gridfire.rc
declare -a ELTS LARGS RARGS
ELTS=(
gridfire_ansible-vault.sh
gridfire_keepassxc-cli.sh
gridfire_keepassxc.sh
gridfire_keyring.sh
gridfire_openssl.sh
gridfire_secret-tool.sh
gridfire_staticgpg.sh
gridfire_truecrypt.sh
gridfire_veracrypt.sh
)
SHORTOPTS="ha:cgulbodfpwm:nv:s:D:P:H:A:"
OARGS="$@"
ARGS=$(getopt --options $SHORTOPTS -- "$@")
[ $? != 0 ] && error 2 "Aborting."
eval set -- "$ARGS"
while true; do
case "$1" in
-h|-c|-g|-u|-l|-b|-o|-d|-f|-p|-w|-n)
LARGS+=($1)
shift;;
-a|-m|-v|-s|-D|-P|-H|-A)
LARGS+=($1)
shift
LARGS+=($1)
shift;;
'--')
shift
RARGS=("$@")
break
;;
esac
done
#echo DEBUG: LARGS ${LARGS[@]}
#echo DEBUG: RARGS ${RARGS[@]}
case ${RARGS[0]} in
ansible-vault|keepassxc-cli|keepassxc|keyring|openssl|secret-tool|staticgpg|truecrypt|veracrypt|foo)
elt=gridfire_${RARGS[0]}.bash
unset ${RARGS[0]}
RARGS[0]=""
exec bash $elt ${LARGS[@]} ${RARGS[@]}
;;
esac
# echo ${RARGS[@]}
exec python3.sh $PREFIX/src/gridfire/gridfire.py "$OARGS"

View File

@ -0,0 +1,11 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
# https://github.com/reid-k/gridfire/
prog=$( basename $0 .bash )
PREFIX=/usr/local
ROLE=toxcore
PYVER=2
exec python$PYVER.sh /usr/local/src/gridfire/gridfire.py "$OARGS"

View File

@ -0,0 +1,9 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=$( basename $0 .bash )
PREFIX=/usr/local
ROLE=toxcore
PYVER=3
exec python$PYVER.sh /usr/local/src/gridfire/gridfire.py "$@"

View File

@ -0,0 +1,13 @@
#!/bin/sh
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
PREFIX=/usr/local
ROLE=toxcore
PYVER=3
. /usr/local/bin/gridfire.rc
export PYTHONPATH=$PREFIX/src/gridfire
exec $PREFIX/bin/gridfire -H "ansible-vault.py" -- \
$PREFIX/bin/python$PYVER.sh $PREFIX/src/gridfire/ansible-vault.py "$@"

View File

@ -0,0 +1,174 @@
#!/usr/local/bin/python2.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
########################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
__requires__ = ['ansible']
import os
import shutil
import sys
import traceback
from ansible import context
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.module_utils._text import to_text
from gridfire import getpass
ROLE=toxcore
# Used for determining if the system is running a new enough python version
# and should only restrict on our documented minimum versions
_PY3_MIN = sys.version_info[:2] >= (3, 5)
_PY2_MIN = (2, 6) <= sys.version_info[:2] < (3,)
_PY_MIN = _PY3_MIN or _PY2_MIN
if not _PY_MIN:
raise SystemExit('ERROR: Ansible requires a minimum of Python2 version 2.6 or Python3 version 3.5. Current version: %s' % ''.join(sys.version.splitlines()))
class LastResort(object):
# OUTPUT OF LAST RESORT
def display(self, msg, log_only=None):
print(msg, file=sys.stderr)
def error(self, msg, wrap_text=None):
print(msg, file=sys.stderr)
def prompt(self, msg, private=True):
return getpass(msg)
if __name__ == '__main__':
display = LastResort()
try: # bad ANSIBLE_CONFIG or config options can force ugly stacktrace
import ansible.constants as C
from ansible.utils.display import Display
except AnsibleOptionsError as e:
display.error(to_text(e), wrap_text=False)
sys.exit(5)
_Display = Display
class MyDisplay(_Display):
name = 'getpass'
def prompt(self, prompt, private=True):
return getpass(prompt)
Display = MyDisplay
display = MyDisplay()
from ansible.parsing import vault
vault.display = display
cli = None
me = os.path.basename(sys.argv[0])
try:
display.v("starting run")
sub = None
target = me.split('-')
if target[-1][0].isdigit():
# Remove any version or python version info as downstreams
# sometimes add that
target = target[:-1]
if len(target) > 1:
sub = target[1]
myclass = "%sCLI" % sub.capitalize()
elif target[0] == 'ansible':
sub = 'adhoc'
myclass = 'AdHocCLI'
else:
raise AnsibleError("Unknown Ansible alias: %s" % me)
try:
mycli = getattr(__import__("ansible.cli.%s" % sub, fromlist=[myclass]), myclass)
except ImportError as e:
# ImportError members have changed in py3
if 'msg' in dir(e):
msg = e.msg
else:
msg = e.message
if msg.endswith(' %s' % sub):
raise AnsibleError("Ansible sub-program not implemented: %s" % me)
raise
mycli.display = display
try:
args = [to_text(a, errors='surrogate_or_strict') for a in sys.argv]
except UnicodeError:
display.error('Command line args are not in utf-8, unable to continue. Ansible currently only understands utf-8')
display.display(u"The full traceback was:\n\n%s" % to_text(traceback.format_exc()))
exit_code = 6
else:
cli = mycli(args)
cli.parse()
cli.display = display
# import pdb; pdb.set_trace()
exit_code = cli.run()
except AnsibleOptionsError as e:
cli.parser.print_help()
display.error(to_text(e), wrap_text=False)
exit_code = 5
except AnsibleParserError as e:
display.error(to_text(e), wrap_text=False)
exit_code = 4
# TQM takes care of these, but leaving comment to reserve the exit codes
# except AnsibleHostUnreachable as e:
# display.error(str(e))
# exit_code = 3
# except AnsibleHostFailed as e:
# display.error(str(e))
# exit_code = 2
except AnsibleError as e:
display.error(to_text(e), wrap_text=False)
exit_code = 1
except KeyboardInterrupt:
display.error("User interrupted execution")
exit_code = 99
except Exception as e:
if C.DEFAULT_DEBUG:
# Show raw stacktraces in debug mode, It also allow pdb to
# enter post mortem mode.
raise
have_cli_options = bool(context.CLIARGS)
display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False)
if not have_cli_options or have_cli_options and context.CLIARGS['verbosity'] > 2:
log_only = False
if hasattr(e, 'orig_exc'):
display.vvv('\nexception type: %s' % to_text(type(e.orig_exc)))
why = to_text(e.orig_exc)
if to_text(e) != why:
display.vvv('\noriginal msg: %s' % why)
else:
display.display("to see the full traceback, use -vvv")
log_only = True
display.display(u"the full traceback was:\n\n%s" % to_text(traceback.format_exc()), log_only=log_only)
exit_code = 250
finally:
# Remove ansible tmpdir
shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
sys.exit(exit_code)

View File

@ -0,0 +1,17 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
DEBUG=1
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/gridfire.rc
COMMAND=$1
shift
RARGS="--pw-stdin"
tail=`echo $@ | sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/'`
exec $PREFIX/bin/gridfire -H "keepassxc-cli.bash $tail" -- \
keepassxc-cli.bash $COMMAND $RARGS "$@"

View File

@ -0,0 +1,20 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
DEBUG=1
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/gridfire.rc
tail=`echo $@ | sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/'`
LARGS="--bg"
LARGS=""
RARGS="--pw-stdin"
INFO $PREFIX/bin/gridfire -H "keepassxc $tail" $LARGS -- \
keepassxc $RARGS "$@"
exec $PREFIX/bin/gridfire -H "keepassxc $tail" $LARGS -- \
keepassxc $RARGS "$@"

View File

@ -0,0 +1,58 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/usr_local_tput.bash || exit 2
if [ "$#" -eq 0 ] ; then
echo USAGE: $0 [options]
cat << EOF
USAGE:
--password PASSWORD Database password.
--password-command PW_CMD
Password will be obtained from the output of this
command.
--keyfile KEYFILE Key file for unlocking database.
--pinentry PINENTRY Command used to run pinentry.
-c COMMAND, --command COMMAND
Command to execute. If command arguments contain
spaces, they must be enclosed in double quotes. With
this switch, kpsh will be started in non-interactive
mode. A list of available commands can be found by
running 'kpsh -c help':
{open,unlock,lock,db,ls,show,add,edit,delete,move,autotype,exit,echo,sleep,help}
open Change currently opened database.
unlock Unlock currently opened database.
lock Lock a database.
db Query opened database info.
ls List contents of database.
show Show contents of entry.
add Add a new entry if it doesn't exist yet.
edit Edit existing entry
delete Delete entry from database
move Move entry to the new path.
autotype Auto-type sequence of entry fields.
exit Exit shell.
echo Display a message.
sleep Sleep for a given number of seconds.
--prompt PROMPT Text used by shell for prompt.
-d, --daemon Start as a daemon listening on a socket given by
--socket-path
-s SOCKET_PATH, --socket-path SOCKET_PATH
Path to the socket which will be created in daemon
mode (default: /tmp/kpsh-$UID.sock).
USAGE: $0 -- kpsh-args
`basename $0` arguments go before the -- kpsh args go after
EOF
exit 1
fi
# FixMe: nonewline
exec $PREFIX/bin/gridfire -H "kpsh password on stdin" --stdin -- \
kpsh "$@"

View File

@ -0,0 +1,187 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/gridfire.rc
COMMANDS=(
asn1parse ca ciphers cms crl crl2pkcs7 dgst dhparam dsa dsaparam ec
ecparam enc engine errstr gendsa genpkey genrsa help list nseq ocsp
passwd pkcs12 pkcs7 pkcs8 pkey pkeyparam pkeyutl prime rand rehash req
rsa rsautl s_client s_server s_time sess_id smime speed spkac srp
storeutl ts verify version x509 dgst enc
)
# for elt in ${COMMANDS[*]}; do echo INFO: openssl $elt;openssl $elt -help;done
usage () {
echo "USAGE: recognized commands are - ${PASSIN_COMMANDS[*]} ${PASSOUT_COMMANDS[*]} ${PASS_COMMANDS[*]}"
return 0
}
if [ "$#" -eq 0 ] || [ "$1" = '--help' ] || [ "$1" = '-h' ] ; then
echo USAGE: $0 command [options]
cat << EOF
Recognized commands:
-passin commands: -passin pass:stdin
ca \
-passin val Input file pass phrase source
cms
-pwri_password val (No additional info)
-passin val Input file pass phrase source
dgst
-passin val Input file pass phrase source
pkeyutl
-passin val Input file pass phrase source
rsautl
-passin val Input file pass phrase source
smime
-passin val Input file pass phrase source
spkac
-passin val Input file pass phrase source
storeutl
-passin val Input file pass phrase source
ts
-passin val Input file pass phrase source
x509
-passin val Private key password/pass-phrase source
dgst
-passin val Input file pass phrase source
-passout commands: -passout pass:stdin
gendsa
-passout val Output file pass phrase source
genrsa
-passout val Output file pass phrase source
-pass commands: -pass pass:stdin
enc
-pass val Passphrase source
genpkey
-pass val Output file pass phrase source
Options:
pass:stdin
pass:fd0
EOF
exit 1
fi
COMMAND=$1
# FixMe: make sure command is first
if [ $COMMAND = '-help' ] || [ $COMMAND = '--help' ] ; then
usage
echo "USAGE: all openssl commands are - ${COMMANDS[*]}"
exit 0
fi
if [ "${COMMAND:0:1}" = "-" ] ; then
echo "USAGE: command args - command must precede args"
usage
exit 1
fi
case $COMMAND in \
# PASSIN_COMMANDS=(
ca \
| cms \
| dgst \
| pkeyutl \
| rsautl \
| smime \
| spkac \
| storeutl \
| ts \
| x509 \
| dgst \
) # FixMe: check if already there
LARGS="-passin pass:stdin"
$PREFIX/bin/gridfire -H "openssl $LARGS" -- openssl $LARGS "$@" || exit $?
;;
# PASSOUT_COMMANDS=(
gendsa \
| genrsa \
) # FixMe: check if already there
LARGS="-passout pass:stdin"
$PREFIX/bin/gridfire -H "openssl $LARGS" -- openssl $LARGS "$@" || exit $?
;;
# PASS_COMMANDS=( \
enc \
| genpkey \
) # FixMe: check if already there
LARGS="-pass pass:stdin"
$PREFIX/bin/gridfire -H "openssl $LARGS" -- openssl $LARGS "$@" || exit $?
;;
# PASSNOV_COMMANDS=( \
passwd \
| '-in infile Read passwords from file' \
| '-noverify Never verify when reading password from terminal' \
| '-stdin Read passwords from stdin' \
) # FixMe: check if already there
#? conflicts with -in?
LARGS=" -noverify -stdin"
bash $PREFIX/bin/gridfire -H "openssl $LARGS" -- openssl $LARGS "$@" || exit $?
;;
# PASSINOUT_COMMANDS=( \
pkcs8 \
| '-passin val Input file pass phrase source' \
| '-passout val Output file pass phrase source' \
| pkey \
| '-passin val Input file pass phrase source' \
| '-passout val Output file pass phrase source' \
| rsa \
| '-passout val Output file pass phrase source' \
| '-passin val Input file pass phrase source' \
| srp \
| '-passin val Input file pass phrase source' \
| '-passout val Output file pass phrase source' \
) # FixMe: check if already there
# FixMe: fd:
LARGS="--passin"
passin=`sh $PREFIX/bin/gridfire -H "openssl $LARGS" `
LARGS="-passin pass:$passin -passout pass:stdin"
bash $PREFIX/bin/gridfire -H "openssl -passout pass:stdin" -- openssl $LARGS "$@" || exit $?
esac
exit 0
# PASSDPASS_COMMANDS=( \
s_server \
# -pass val Private key file pass phrase source \
# -dpass val Second private key file pass phrase source \
) # FixMe: check if already there
# FixMe: fd:
pass=`sh $PREFIX/bin/gridfire.bash`
LARGS="-pass pass:$pass -dpass pass:stdin"
bash $PREFIX/bin/gridfire -- openssl $LARGS "$@" || exit $?
;; # PASSKPASS_COMMANDS=( \
enc \
# -pass val Passphrase source \
# -kfile infile Read passphrase from file \
) # FixMe: check if already there
# FixMe: fd:
#?pass=`sh $PREFIX/bin/gridfire.bash`
#?LARGS="-pass pass:$pass -dpass pass:stdin"
LARGS="-pass pass:stdin"
$PREFIX/bin/gridfire -H "openssl $LARGS" -- openssl $LARGS "$@" || exit $?
;; # PASSINOUTWORD_COMMANDS=( \ \
pkcs12 \
# -twopass Separate MAC, encryption passwords \
# -passin val Input file pass phrase source \
# -passout val Output file pass phrase source \
# -password val Set import/export password source \
) # FixMe: check if already there
# FixMe: pass: prefix
$PREFIX/bin/gridfire -H "-passin pass:" --single "passin" -- sh $PREFIX/bin/gridfire -H "-passout stdin" -- openssl "$@" || exit $?
esac

View File

@ -0,0 +1,27 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/usr_local_tput.bash || exit 2
if [ "$#" -eq 0 ] ; then
echo USAGE: $0 [options]
cat << EOF
usage: secret-tool store --label='label' attribute value ...
secret-tool lookup attribute value ...
secret-tool clear attribute value ...
secret-tool search [--all] [--unlock] attribute value ...
USAGE: $0 -- secret-tool-args
`basename $0` arguments go before the -- secret-tool args go after
EOF
exit 1
fi
# FixMe: nonewline
exec $PREFIX/bin/gridfire -H "secret-tool password on stdin" --stdin -- \
secret-tool "$@"

View File

@ -0,0 +1,11 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
#? --pinentry-mode loopback
exec $PREFIX/bin/gridfire -H "staticgpg --passphrase-fd 0" -- \
staticgpg --passphrase-fd 0 "$@"

View File

@ -0,0 +1,104 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
PYVER=3
EXE=/var/local/bin/tomb.bash
. /usr/local/bin/usr_local_tput.bash || exit 2
. /usr/local/bin/gridfire.rc
# python3 -c "import keyring.util.platform_; print(keyring.util.platform_.config_root())"
# ~/.local/share/python_keyring
# what goes on stdin - the passwd to the keyfile with the keyfile as an arg?
# or open the keyfile?
# passwd from gridfire or from keepass
usage() {
echo "Syntax: tomb [options] command [arguments]"
echo
echo " // Creation:"
echo " dig create a new empty TOMB file of size -s in MiB"
echo " forge create a new KEY file and set its password"
echo " lock installs a lock on a TOMB to use it with KEY"
echo
echo " // Operations on tombs:"
echo " open open an existing TOMB (-k KEY file or - for stdin)"
echo " index update the search indexes of tombs"
echo " search looks for filenames matching text patterns"
echo " list list of open TOMBs and information on them"
echo " ps list of running processes inside open TOMBs"
echo " close close a specific TOMB (or 'all')"
echo " slam slam a TOMB killing all programs using it"
[[ $RESIZER == 1 ]] && {
echo " resize resize a TOMB to a new size -s (can only grow)"
}
echo
echo " // Operations on keys:"
echo " passwd change the password of a KEY (needs old pass)"
echo " setkey change the KEY locking a TOMB (needs old key and pass)"
echo
[[ $QRENCODE == 1 ]] && {
echo " // Backup on paper:"
echo " engrave makes a QR code of a KEY to be saved on paper"
echo
}
[[ $STEGHIDE == 1 || $CLOAKIFY == 1 || $DECLOAKIFY == 1 ]] && {
echo " // Steganography:"
[[ $STEGHIDE == 1 ]] && {
echo " bury hide a KEY inside a JPEG image (for use with -k)"
echo " exhume extract a KEY from a JPEG image (prints to stdout)"
}
[[ $CLOAKIFY == 1 ]] && {
echo " cloak transform a KEY into TEXT using CIPHER (for use with -k)"
}
[[ $DECLOAKIFY == 1 ]] && {
echo " uncloak extract a KEY from a TEXT using CIPHER (prints to stdout)"
}
echo
}
echo "Options:"
echo
echo " -s size of the tomb file when creating/resizing one (in MiB)"
echo " -k path to the key to be used ('-k -' to read from stdin)"
echo " -n don't launch the execution hooks found in tomb"
echo " -p preserve the ownership of all files in tomb"
echo " -o options passed to commands: open, lock, forge (see man)"
echo " -f force operation (i.e. even if swap is active)"
echo " -g use a GnuPG key to encrypt a tomb key"
echo " -r provide GnuPG recipients (separated by comma)"
echo " -R provide GnuPG hidden recipients (separated by comma)"
[[ $SPHINX == 1 ]] && {
echo " --sphx-user user associated with the key (for use with pitchforkedsphinx)"
echo " --sphx-host host associated with the key (for use with pitchforkedsphinx)"
}
[[ $KDF == 1 ]] && {
echo " --kdf forge keys armored against dictionary attacks"
}
echo
echo " -q run quietly without printing informations"
echo " -D print debugging information at runtime"
}
# FixMe: make sure command is first
if [ "$#" -eq 0 ] || [ "$1" = '--help' -o "$1" = 'help' ] ; then
# usage
# exit 0
:
fi
LARGS="-H \"tomb $tail\""
tail=`echo $@ | sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/'`
if [[ "$*" =~ "-- " ]];then
RARGS=`echo $*|sed -e "s/-- /-- $EXE/"`
exec $PREFIX/bin/gridfire $LARGS $RARGS
else
exec $PREFIX/bin/gridfire $LARGS -- $EXE "$@"
fi

View File

@ -0,0 +1,32 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
DEBUG=1
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/gridfire.rc
usage () {
echo USAGE: $0 [options]
cat << EOF
USAGE: $0 [--arg password ] -- truecrypt-args
`basename $0` arguments go before the -- truecrypt args go after
MINIMUM of 2 args for truecrypt
EOF
exit 1
}
if [ "$#" -eq 0 ] ; then
usage
fi
if [ "$#" -lt 2 ] ; then
usage
fi
tail=`sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/' <<< $@`
RARGS="--non-interactive"
exec $PREFIX/bin/gridfire --double password -E -B -H "truecrypt-console $tail" -- \
$PREFIX/bin/truecrypt-console.bash $RARGS "$@"
# FixMe: --new-password=<str> New password

View File

@ -0,0 +1,25 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/usr_local_tput.bash || exit 2
. /usr/local/bin/gridfire.rc
if [ "$#" -eq 0 ] ; then
echo USAGE: $0 [options]
cat << EOF
USAGE: $0 [--arg password ] -- truecrypt-args
`basename $0` arguments go before the -- truecrypt args go after
EOF
exit 1
fi
tail=`sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/' <<< $@`
exec $PREFIX/bin/gridfire -E --double password -H "truecrypt $tail" -- \
$PREFIX/bin/truecrypt.bash "$@"
# FixMe: --new-password=<str> New password

View File

@ -0,0 +1,36 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
DEBUG=1
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/gridfire.rc
usage () {
echo USAGE: $0 [options]
cat << EOF
USAGE: $0 [--arg password ] -- veracrypt-args
`basename $0` arguments go before the -- veracrypt args go after
MINIMUM of 2 args for veracrypt
EOF
exit 1
}
if [ "$#" -eq 0 ] ; then
usage
fi
if [ "$#" -lt 2 ] ; then
usage
fi
RARGS=""
[[ "$*" =~ "--stdin" ]] || LARGS="--stdin $LARGS"
#no [[ "$*" =~ "--create" ]] && LARGS="--repeat $LARGS"
#no [[ "$*" =~ "--new-password=" ]] && LARGS="--repeat $LARGS"
tail=`echo $@ | sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/'`
$PREFIX/bin/gridfire $LARGS -H "veracrypt-console $tail" -- \
$PREFIX/bin/veracrypt-console.bash $RARGS "$@"
# FixMe: --new-password=<str> New password

View File

@ -0,0 +1,17 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/gridfire.rc
tail=`echo $@ | sed -e 's/.* \([^ ]*\) \([^ ]*\)/\1 \2/'`
RARGS=""
[[ "$*" =~ "--stdin" ]] || RARGS="--stdin $RARGS"
exec $PREFIX/bin/gridfire -H "veracrypt $tail" -- \
$PREFIX/bin/veracrypt.bash $RARGS "$@"
# FixMe: --new-password=<str> New password

View File

@ -0,0 +1,15 @@
#!/bin/bash
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
[ -f $PREFIX/bin/gridfire.rc ] && . $PREFIX/bin/gridfire.rc
[ -e /run ] || exit 1
cd $PREFIX/src/gridfire || exit 3
export PYTHONPATH=$PREFIX/src/gridfire/pyassuan:$PREFIX/src/gridfire:$PWD
exec $PREFIX/bin/python3.sh bin/pinentry_gridfire.py "$@"

View File

@ -1,36 +1,52 @@
#!/bin/sh #!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*- # -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
# on stdout - messages on stderr # retval on stdout - messages on stderr
. /usr/local/bin/usr_local_tput.bash
prog=`basename $0 .bash` prog=`basename $0 .bash`
PREFIX=/usr/local PREFIX=/usr/local
ROLE=base ROLE=base
base=AnsI AnsI=AnsI
# quiet # quiet
[ "$#" -eq 0 ] && exit 1 [ "$#" -eq 0 ] && exit 1
VARIABLE=$1 VARIABLE=$1
shift
[ "$#" -eq 0 ] && base=`hostname` || base=$1
[ -f $PREFIX/etc/testforge/testforge.bash ] && . $PREFIX/etc/testforge/testforge.bash [ -f $PREFIX/etc/testforge/testforge.bash ] && . $PREFIX/etc/testforge/testforge.bash
[ -n "$TESTFORGE_ANSIBLE_SRC" ] || TESTFORGE_ANSIBLE_SRC=/g/TestForge/src/ansible [ -n "$PLAY_ANSIBLE_SRC" ] || PLAY_ANSIBLE_SRC=$BASE_ANSIBLE_SRC
[ -z "$PLAY_ANSIBLE_SRC" ] && ERROR export "PLAY_ANSIBLE_SRC" >&2 && exit 3
[ ! -d "$PLAY_ANSIBLE_SRC" ] && ERROR ! -d "PLAY_ANSIBLE_SRC" >&2 && exit 4
[ ! -f "$PLAY_ANSIBLE_SRC"/hosts.yml ] && ERROR ! -f "PLAY_ANSIBLE_SRC"/hosts.yml >&2 && exit 4
name=`hostname` DBUG ansible-inventory -i $PLAY_ANSIBLE_SRC/hosts.yml \
--playbook-dir=$PLAY_ANSIBLE_SRC \
if [ -d "$TESTFORGE_ANSIBLE_SRC" ] && [ -f $TESTFORGE_ANSIBLE_SRC/hosts.yml ] ; then --host=$base >&2
base=$name ansible-inventory -i $PLAY_ANSIBLE_SRC/hosts.yml \
ansible-inventory -i $TESTFORGE_ANSIBLE_SRC/hosts.yml \ --playbook-dir=$PLAY_ANSIBLE_SRC \
--playbook-dir=$TESTFORGE_ANSIBLE_SRC \
--host=$base >> /tmp/${AnsI}$$.json 2> /tmp/${AnsI}$$.err --host=$base >> /tmp/${AnsI}$$.json 2> /tmp/${AnsI}$$.err
if [ $? -eq 0 -a -f /tmp/${AnsI}$$.json ] ; then retval=$?
if [ $retval -eq 0 ] ; then
[ ! -s /tmp/${AnsI}$$.json ] && ERROR empty /tmp/${AnsI}$$.json >&2 && exit 4
#!? export #!? export
VALUE=`jq .$VARIABLE </tmp/${AnsI}$$.json | sed -e 's/,//'|xargs echo` VALUE=`jq .$VARIABLE < /tmp/${AnsI}$$.json | sed -e 's/,//'|xargs echo 2>/tmp/${AnsI}$$.err`
# [ -n "$DEBUG" ] && echo >&2 "DEBUG: $prog base=$base VALUE=$VALUE" jretval=$?
if [ $jretval -eq 0 ] ; then
[ -n "$DEBUG" ] && DBUG "$prog base=$base VALUE=$VALUE" >&2
[ "$VALUE" = "null" ] && VALUE="" [ "$VALUE" = "null" ] && VALUE=""
echo -n "$VALUE" echo -n "$VALUE"
else
WARN $VARIABLE jretval=$jretval /tmp/${AnsI}$$.err >&2
exit 7$retval
fi fi
rm -f /tmp/${AnsI}$$.json else
fi ERROR $VARIABLE retval=$retval /tmp/${AnsI}$$.json /tmp/${AnsI}$$.err >&2
cat /tmp/${AnsI}$$.err >&2
exit 8
fi
# rm -f /tmp/${AnsI}$$.json
exit 0 exit 0

View File

@ -1,68 +1,3 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; coding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
[ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash
[ -n "$TESTF_VAR_LOCAL" ] && PREFIX=$TESTF_VAR_LOCAL
# https://security.stackexchange.com/questions/46197/force-a-specific-ssl-cipher
# https://code.google.com/p/chromium/issues/detail?id=58831
DIR=testssl.sh
GITHUB_USER=drwetter
GITHUB_DIR=$DIR
. $PREFIX/src/var_local_src.bash
BINS=testssl
cd $PREFIX/src || exit 2
WD=$PWD
if [ "$#" -eq 0 ] ; then
[ -d $DIR ] || git clone --depth=1 https://github.com/$GITHUB_USER/$DIR
for elt in $BINS ; do
file=$PREFIX/bin/$elt.bash
if [ ! -f $file ] ; then
cat > $file << EOF
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*- # -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
cd $PREFIX/src/$DIR cd /usr/local/src/testssl.sh || exit 1
exec bash testssl.sh "\$@" exec bash testssl.sh "$@"
EOF
chmod +x $PREFIX/bin/testssl.bash
fi
done
exit 0
elif [ $1 = 'check' ] ; then # 1*
ols_test_bins && exit 0 || exit 1$?
elif [ $1 = 'lint' ] ; then # 2*
/var/local/bin/pydev_shellcheck.bash testssl.sh/testssl.sh || exit 2$?
elif [ "$1" = 'test' ] ; then # 3*
for bin in $BINS ; do
$PREFIX/bin/$bin.bash --help >/dev/null || exit 3$?
done
elif [ "$1" = 'update' ] ; then # 7*
ols_are_we_connected || exit 0
cd $PREFIX/src/$DIR || exit 70
git pull || exit 7$?
#error: RPC failed; curl 92 HTTP/2 stream 5 was not closed cleanly before end of the underlying stream
#error: 1970 bytes of body are still expected
#fetch-pack: unexpected disconnect while reading sideband packet
#fatal: early EOF
#fatal: fetch-pack: invalid index-pack output
fi
# wget -P https://testssl.sh/testssl.sh
exit 0

View File

@ -36,8 +36,6 @@ warns=0
WLOG="$TOXCORE_LOG_DIR"/$ly/W$prog$$.log WLOG="$TOXCORE_LOG_DIR"/$ly/W$prog$$.log
ELOG="$TOXCORE_LOG_DIR"/$ly/E$prog$$.log ELOG="$TOXCORE_LOG_DIR"/$ly/E$prog$$.log
#?ols_make_testforge_logs $TOXCORE_LOG_DIR
[ -d /usr/local/share/doc ] || mkdir -p /usr/local/share/doc [ -d /usr/local/share/doc ] || mkdir -p /usr/local/share/doc
[ -d /var/local/share/doc/txt ] && [ ! -d /usr/local/share/doc/txt ] && \ [ -d /var/local/share/doc/txt ] && [ ! -d /usr/local/share/doc/txt ] && \
mv /var/local/share/doc/txt /usr/local/share/doc/txt && \ mv /var/local/share/doc/txt /usr/local/share/doc/txt && \
@ -130,7 +128,6 @@ warns=`grep -c WARN: "$WLOG"`
fi fi
[ $warns -eq 0 -a $errs -eq 0 ] && \ [ $warns -eq 0 -a $errs -eq 0 ] && \
ols_clean_testforge_logs $TOXCORE_LOG_DIR && \
INFO "No $ly errors in $TOXCORE_LOG_DIR" INFO "No $ly errors in $TOXCORE_LOG_DIR"
exit 0 exit 0

View File

@ -0,0 +1,39 @@
#!/bin/sh
# -*- mode: sh; indent-tabs-mode: nil; tab-width: 2; coding: utf-8-unix -*-
ROLE=toxcore
prog=$(basename $0 .bash)
KEY=0x066DAFCB81E42C40
TIMEO=15
WARGS="-v -S --dns-timeout $TIMEO --connect-timeout $TIMEO --read-timeout $TIMEO"
. /usr/local/bin/proxy_export.bash
if [ is = dead ] ; then
# URL="http://hkps.pool.sks-keyservers.net:80/pks/lookup?op=get&options=mr&search=$KEY"
URL="http://pgp.mit.edu:80/pks/lookup?op=get&options=mr&search=$KEY"
DBUG wget $URL
wget $WARGS -o /tmp/2$$.log -O /tmp/2$$.html $URL || {
ERROR retval=$? ; cat /tmp/2$$.log; exit 2 ;
}
grep -q -e '-----BEGIN PGP PUBLIC KEY BLOCK' /tmp/2$$.html || exit 210
grep -q 'HTTP/1.1 200 OK' /tmp/2$$.log || exit 220
fi
URL="http://keyserver.ubuntu.com:80/pks/lookup?op=get&options=mr&search=$KEY"
DBUG wget $URL
wget $WARGS -o /tmp/3$$.log -O /tmp/3$$.html $URL || {
ERROR retval=$? /tmp/3$$.log
exit 3
}
grep -q -e '-----BEGIN PGP PUBLIC KEY BLOCK' /tmp/3$$.html || {
ERROR '-----BEGIN PGP PUBLIC KEY BLOCK' /tmp/3$$.html
exit 310
}
grep -q 'HTTP/1.1 200 OK' /tmp/3$$.log || {
ERROR NO 'HTTP/1.1 200 OK' /tmp/3$$.log
exit 320
}
exit 0

View File

@ -12,6 +12,9 @@ PREFIX=/usr/local
ROLE=toxcore ROLE=toxcore
[ -f /usr/local/etc/testforge/testforge.bash ] || \ [ -f /usr/local/etc/testforge/testforge.bash ] || \
. /usr/local/etc/testforge/testforge.bash . /usr/local/etc/testforge/testforge.bash
. /usr/local/bin/usr_local_tput.bash
TOXCORE_LOG_DIR=$PREFIX/var/log TOXCORE_LOG_DIR=$PREFIX/var/log
[ -d $TOXCORE_LOG_DIR ] || mkdir -p $TOXCORE_LOG_DIR [ -d $TOXCORE_LOG_DIR ] || mkdir -p $TOXCORE_LOG_DIR
@ -29,10 +32,11 @@ ELOG="$TOXCORE_LOG_DIR"/$ly/E$prog$$.log
#?ols_make_testforge_logs $TOXCORE_LOG_DIR #?ols_make_testforge_logs $TOXCORE_LOG_DIR
[ -d "$TOXCORE_LOG_DIR"/$ly/ ] && \
find "$TOXCORE_LOG_DIR"/$ly/ -type f -name W${prog}*.log \ find "$TOXCORE_LOG_DIR"/$ly/ -type f -name W${prog}*.log \
-o -name E${prog}*.log -mtime +1 -delete -o -name E${prog}*.log -mtime +1 -delete
if [ -d /etc/libvirt/qemu ] ; then if [ -d /etc/libvirt/qemu ] && [ $MYID -eq 0 ] ; then
elt=qemu elt=qemu
DBUG elt=$elt DBUG elt=$elt
[ -d /var/lib/libvirt/dnsmasq/ ] && \ [ -d /var/lib/libvirt/dnsmasq/ ] && \
@ -43,8 +47,8 @@ if [ -d /etc/libvirt/qemu ] ; then
fi fi
# -%d # -%d
if ls /var/log/libvirt/qemu/*.log 2>/dev/null ; then if ls /var/log/libvirt/qemu/*.log 2>/dev/null >/dev/null ; then
sudo grep ^`date +%Y-%m`.*warning /var/log/libvirt/qemu/*.log | tee -a $WLOG grep ^`date +%Y-%m`.*warning /var/log/libvirt/qemu/*.log | tee -a $WLOG
fi fi
fi fi

View File

@ -5,12 +5,13 @@
ROLE=toxcore ROLE=toxcore
RCFILE=/usr/local/etc/testforge/pylint.rc RCFILE=/usr/local/etc/testforge/pylint.rc
[ -n "$PREFIX" ] || PREFIX=/usr/local [ -n "$PREFIX" ] || PREFIX=/usr/local
[ -n "$PYVER" ] || PYVER=3 [ -n "$PYVER" ] || PYVER=3
[ -n "$PYTHON_EXE_MSYS" ] || PYTHON_EXE_MSYS=python$PYVER.sh [ -n "$PYTHON_EXE_MSYS" ] || PYTHON_EXE_MSYS=$PREFIX/bin/python$PYVER.sh
[ -x "$PYTHON_EXE_MSYS" ] || return 2 [ -x "$PYTHON_EXE_MSYS" ] || exit 2
[ -f . /usr/local/etc/testforge/testforge.bash ] && \ [ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash . /usr/local/etc/testforge/testforge.bash
[ -z "$PYVER" ] && PYVER=3 [ -z "$PYVER" ] && PYVER=3
@ -23,7 +24,7 @@ P="BASE_PYTHON${PYVER}_MINOR"
declare -a LARGS declare -a LARGS
LARGS=( --recursive y --verbose --py-version "$PYTHON_MINOR" --output-format colorized ) LARGS=( --recursive y --verbose --py-version "$PYTHON_MINOR" --output-format colorized )
[ -f $RCFILE ] || exit 2 [ -f $RCFILE ] || exit 3
LARGS+=( --rcfile $RCFILE ) LARGS+=( --rcfile $RCFILE )
export PYTHONPATH=$PWD export PYTHONPATH=$PWD

View File

@ -9,7 +9,7 @@ RCFILE=/usr/local/etc/testforge/pylint.rc
[ -n "$PREFIX" ] || PREFIX=/usr/local [ -n "$PREFIX" ] || PREFIX=/usr/local
[ -n "$PYVER" ] || PYVER=2 [ -n "$PYVER" ] || PYVER=2
[ -n "$PYTHON_EXE_MSYS" ] || PYTHON_EXE_MSYS=python$PYVER.sh [ -n "$PYTHON_EXE_MSYS" ] || PYTHON_EXE_MSYS=python$PYVER.sh
[ -x "$PYTHON_EXE_MSYS" ] || return 2 [ -x "$PYTHON_EXE_MSYS" ] || exit 2
export PYVER export PYVER
export PREFIX export PREFIX
export PYTHON_EXE_MSYS export PYTHON_EXE_MSYS

View File

@ -0,0 +1,11 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
[ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash
ROLE=toxcore
export PYVER=2
exec toxcore_run_doctest.bash "$@"

View File

@ -0,0 +1,11 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
[ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash
ROLE=toxcore
export PYVER=3
exec toxcore_run_doctest.bash "$@"

View File

@ -0,0 +1,529 @@
#!/bin/bash
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
[ -f /usr/local/bin/usr_local_tput.bash ] && \
. /usr/local/bin/usr_local_tput.bash
. /usr/local/bin/proxy_curl_lib.bash
[ -z "$TIMEOUT" ] && TIMEOUT=40
TIMEOUT3=`expr 3 \* $TIMEOUT`
SSLSCAN_ARGS="-4 --show-certificate --bugs --timeout $TIMEOUT"
[ $SSL_VER = 3 ] && SSLSCAN_ARGS="$SSLSCAN_ARGS --tls13" || \
SSLSCAN_ARGS="$SSLSCAN_ARGS --tls12"
# -cipher 'ALL:!ADH:!LOW:!EXP:!MD5:@STRENGTH' -debug
# no timeout -no_tls1_1 -no_tls1_2
OPENSSL_ARGS="-4 -showcerts -bugs -status -state -no_ign_eof"
[ $SSL_VER = 3 ] && OPENSSL_ARGS="$OPENSSL_ARGS -tls1_3" || \
OPENSSL_ARGS="$OPENSSL_ARGS -tls1_2"
# --no-colour ?--show-certificate ?--show-client-cas ?--show-ciphers ?--tlsall
TESTSSL_ARGS="-4 --server-defaults --protocols --grease --server-preference --heartbleed --ccs-injection --renegotiation --breach --tls-fallback --drown --assume-http --connect-timeout $TIMEOUT3 --openssl-timeout $TIMEOUT3 --standard --vulnerable --ssl-native --phone-out --nodns none"
ANALYZE_ARGS="--timeout $TIMEOUT --all-ciphers --verbose"
NMAP_ARGS="--script ssl-enum-ciphers -v --script-trace"
# no --cert-status -> ocsp
CURL_ARGS="--silent -vvv --head --connect-timeout $TIMEOUT"
CURL_HTTP_ARGS="$CURL_ARGS --fail --location --http2 --proto-redir https --proto-default https --proto =https"
# [ -d /usr/local/share/ca-certificates/mozilla ] && \
# CURL_ARGS="$CURL_ARGS --capath usr/local/share/ca-certificates/mozilla"
[ $SSL_VER = 3 ] && CURL_ARGS="$CURL_ARGS --tlsv1.3" || \
CURL_ARGS="$CURL_ARGS --tlsv1.2"
NOW=`date +%s`
DATE () {
local elt=$1
shift
# DEBUG=1
$elt $( expr `date +%s` - $NOW )s $*
return 0
}
ssltest_proxies () {
PROXY_SCHEME=`echo $SSLTEST_HTTPS_PROXY|sed -e 's@/@@g' -e 's/:/ /g'| cut -f 1 -d ' '`
PROXY_HOST=`echo $SSLTEST_HTTPS_PROXY|sed -e 's@/@@g' -e 's/:/ /g'| cut -f 2 -d ' '`
PROXY_PORT=`echo $SSLTEST_HTTPS_PROXY|sed -e 's@/@@g' -e 's/:/ /g'| cut -f 3 -d ' '`
# SocksPolicy Accept in /etc/tor/torrc - required and works with sslscan
TESTSSL_ENVS="env MAX_OSSL_FAIL=10 DNS_VIA_PROXY=true PROXY_WAIT=$TIMEOUT"
if [ -n "$SSLTEST_HTTP_PROXY" ] ; then
PROXY_HOST_PORT=`echo "$SSLTEST_HTTPS_PROXY" | sed -e 's@.*/@@'`
OPENSSL_ARGS="$OPENSSL_ARGS -proxy $PROXY_HOST_PORT"
elif [ -n "$SSLTEST_HTTPS_PROXY" ] ; then
# WTF HTTP CONNECT failed: 502 Bad Gateway (tor protocol violation)
PROXY_HOST_PORT=`echo "$SSLTEST_HTTPS_PROXY" | sed -e 's@.*/@@'`
OPENSSL_ARGS="$OPENSSL_ARGS -proxy $PROXY_HOST_PORT"
fi
# Make sure a firewall is not between you and your scanning target!
# `sed -e 's@.*/@@' <<< $SSLTEST_HTTPS_PROXY`
# timesout 3x
# TESTSSL_ARGS="$TESTSSL_ARGS --proxy=auto"
# use torsocks instead of
# ANALYZE_ARGS="ANALYZE_ARGS --starttls http_proxy:${PROXY_HOST}:$PROXY_PORT"
CURL_ARGS="$CURL_ARGS -x socks5h://${SOCKS_HOST}:$SOCKS_PORT"
#? NMAP_ARGS="$NMAP_ARGS -x socks4://${SOCKS_HOST}:$SOCKS_PORT"
# no proxy args and no _proxy strings
SSLSCAN_ENVS="$TORSOCKS "
ANALYZE_ENVS="$TORSOCKS "
# proxy timesout
TESTSSL_ENVS="sudo -u $BOX_BYPASS_PROXY_GROUP $TESTSSL_ENVS"
NMAP_ENVS="sudo -u $BOX_BYPASS_PROXY_GROUP "
CURL_ENVS=" "
return 0
}
ssltest_nmap () {
local elt=$1
local site=$2
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local exe=nmap
DATE DBUG $elt "$NMAP_ENVS $exe $NMAP_ELTS $site" $eltfile
INFO $elt "$NMAP_ENVS $exe $NMAP_ELTS $site" >> $eltfile
$NMAP_ENVS $exe $NMAP_ELTS $site >> $eltfile 2>&1
retval=$?
if grep -q '(1 host up)' $eltfile ; then
if grep -q TLS_AKE_WITH_AES_256_GCM_SHA384 $eltfile ; then
INFO "$elt TLS_AKE_WITH_AES_256_GCM_SHA384 = $eltfile" | tee -a $eltfile
else
INFO "$elt CA=$cacert = $eltfile" | tee -a $eltfile
fi
elif [ $retval -ne 0 ] ; then
ERROR "$elt retval=$retval timeout=$TIMEOUT CA=$cacert = $eltfile" | tee -a $eltfile
else
WARN $elt "NO '(1 host up)' in" $eltfile
fi
return 0
}
## ssltest_nmap
## no good for 1.3
ssltest_sslscan () {
local elt=$1
local site=$2
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local exe=sslscan
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
DATE DBUG "$SSLSCAN_ENVS $exe $SSLSCAN_ELTS $site" $eltfile
INFO "$SSLSCAN_ENVS $exe $SSLSCAN_ELTS $site" >> $eltfile
$SSLSCAN_ENVS $exe $SSLSCAN_ELTS $site:$SSL_PORT >> $eltfile 2>&1
retval=$?
# ECDHE-RSA-AES256-SHA pop.zoho.eu tls1.2
if [ $retval -ne 0 ] ; then
ERROR "$elt failed retval=$retval CA=$cacert = $eltfile" | tee -a $eltfile
elif grep ERROR $eltfile ; then
ERROR "$elt ERROR CA=$cacert = $eltfile" | tee -a $eltfile
retval=-1
elif grep EROR: $eltfile ; then
ERROR "$elt EROR: CA=$cacert = $eltfile" | tee -a $eltfile
retval=-2
elif grep "Certificate information cannot be retrieved." $eltfile ; then
WARN "$elt 'Certificate information cannot be retrieved' = $eltfile" | tee -a $eltfile
elif grep "TLSv1.$SSL_VER.*disabled" $eltfile ; then
ERROR "$elt TLSv1.$SSL_VER disabled = $eltfile" | tee -a $eltfile
retval=-3
elif ! grep '^\(Subject\|Altnames\).*'"$site" $eltfile ; then
# *.zoho.eu
WARN "$elt not 'Subject\|Altnames' = $eltfile" | tee -a $eltfile
elif ! grep -q Accepted $eltfile ; then
WARN "$elt not Accepted CA=$cacert = $eltfile" | tee -a $eltfile
elif [ $SSL_VER = 3 ] && ! grep -q TLS_AES_256_GCM_SHA384 $eltfile ; then
WARN "$elt not TLS_AES_256_GCM_SHA384 CA=$cacert = $eltfile" | tee -a $eltfile
else
DATE INFO "$elt Accepted CA=$cacert = $eltfile " | tee -a $eltfile
fi
return $retval
}
## ssltest_openssl
ssltest_openssl () {
local elt=$1
local site=$2
local exe=openssl
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local total_s=`expr 2 \* $TIMEOUT`
local domain
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
domain=`echo $site|sed -e 's/.*\([^.]*\)\.\([^.]*\)$/\1/'`
# -msg -msgfile $TMPDIR/$$.$site.s_client.msg
INFO "$exe s_client $OPENSSL_ELTS timeout=$total_s" -connect $site:443 >> $eltfile
timeout $total_s $exe s_client $OPENSSL_ELTS -connect $site:443 < /dev/null >> $eltfile 2>&1
retval=$?
if [ $retval -eq 124 ] ; then
DBUG $exe s_client $OPENSSL_ELTS $site
WARN "$elt failed timeout=$TIMEOUT CA=$cacert = $eltfile" | tee -a $eltfile
elif [ $retval -eq 1 ] ; then
num=`grep ':SSL alert number' $eltfile | sed -e 's/.*:SSL alert number //'`
if [ $? -eq 0 ] && [ -n "$num" ] ; then
ERROR "$elt failed retval=$retval SSL alert #$num ${SSL_ALERT_CODES[$num]} CA=$cacert = $eltfile" | tee -a $eltfile
else
ERROR "$elt failed retval=$retval err=${OPENSSL_X509_V[$retval]} CA=$cacert = $eltfile" | tee -a $eltfile
cat $eltfile
fi
elif grep ':error:' $eltfile ; then
a=`grep ':error:' $eltfile | sed -e 's/^[0-9]*:[^:]*:[^:]*:[^:]*:[^:]*://' -e 's/:.*//' |head -1 `
ERROR "$elt :error: $a CA=$cacert = $eltfile" | tee -a $eltfile
elif grep 'Cipher is (NONE)\|SSL handshake has read 0 bytes' $eltfile ; then
ERROR "$elt s_client Cipher is (NONE) CA=$cacert = $eltfile" | tee -a $eltfile
elif [ $retval -ne 0 ] ; then
ERROR "$elt failed retval=$retval err=${OPENSSL_X509_V[$retval]} CA=$cacert = $eltfile" | tee -a $eltfile
elif grep 'HTTP CONNECT failed:' $eltfile ; then
WARN "$elt failed HTTP CONNECT failed CA=$cacert = $eltfile" | tee -a $eltfile
elif grep 'unable to get local issuer certificate' $eltfile ; then
WARN "$elt s_client unable to get local issuer certificate CA=$cacert = $eltfile" | tee -a $eltfile
elif grep 'Verification error: certificate has expired' $eltfile ; then
WARN "$elt s_client Verification error: certificate has expired = $eltfile | tee -a $eltfile" | tee -a $eltfile
elif ! grep -q '^depth=0 CN.*'$site $eltfile &&
! grep -q '^depth=0 CN.*'$domain $eltfile ; then
DEBUG=1 DBUG $exe s_client $OPENSSL_ELTS -connect $site:443
WARN "$elt s_client CN NOT $site = $eltfile" | tee -a $eltfile
elif grep 'OSCP response: no response' $eltfile ; then
WARN "$elt s_client OSCP response: no response = $eltfile | tee -a $eltfile" | tee -a $eltfile
elif grep 'New, TLSv1.$SSL_VER, Cipher is TLS' $eltfile ; then
DATE INFO "$elt TLSv1.$SSL_VER, Cipher is TLS CA=$cacert = $eltfile " | tee -a $eltfile
else
DATE INFO "$elt client CA=$cacert = $eltfile " | tee -a $eltfile
fi
return $retval
}
## ssltest_testssl
ssltest_testssl () {
local elt=$1
local site=$2
local exe=/usr/local/bin/$elt.sh
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local total_s=`expr 2 \* $TIMEOUT3`
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
DATE DBUG $elt timeout $total_s "`basename $exe` $TESTSSL_ELTS $site:$SSL_PORT" $eltfile
INFO DBUG $elt timeout $total_s "`basename $exe` $TESTSSL_ELTS $site:$SSL_PORT" >> $eltfile 2>&1
# TLS 1.2 offered (OK)
# TLS 1.3 offered (OK)
# You should not proceed as no protocol was detected. If you still really really want to, say "YES" -->
echo YES | timeout $total_s env $TESTSSL_ENVS $exe $TESTSSL_ELTS $site:$SSL_PORT >>$eltfile 2>&1
retval=$?
subdir=`grep 'DEBUG (level 1): see files in' $eltfile | sed -e 's/.* //' -e "s/[$'].*//"`
if [ -n "$subdir" ] ; then
subdir="${subdir::19}"
if [ -d "$subdir" ] ; then
DBUG found \"$subdir\"
cat "$subdir"/*parse*txt >> $eltfile
fi
fi
if grep "Protocol.*TLSv1.$SSL_VER" $eltfile ; then
# timesout after success
DATE INFO "$elt $site Protocol : TLSv1.$SSL_VER CA=$cacert =$eltfile" | tee -a $eltfile
retval=0
elif grep 'TLS 1.$SSL_VER *.*offered.*(OK)' $eltfile ; then
DATE INFO "$elt $site TLS 1.$SSL_VER offered CA=$cacert =$eltfile" | tee -a $eltfile
retval=0
elif [ $retval -eq 124 ] ; then
WARN $elt $site "timedout timeout=$total_s CA=$cacert = $eltfile" | tee -a $eltfile
elif grep 'TLS 1.$SSL_VER.*not offered and downgraded to a weaker protocol' $eltfile ; then
DATE ERROR "$elt $site TLS 1.$SSL_VER NOT offered CA=$cacert =$eltfile" | tee -a $eltfile
retval=`expr 256 - 1`
elif grep -q 't seem to be a TLS/SSL enabled server' $eltfile ; then
DATE ERROR "$elt $site doesnt seem to be a TLS/SSL enabled server: CA=$cacert =$eltfile" | tee -a $eltfile
retval=`expr 256 - 2`
elif grep -q 'Client problem, No server cerificate could be retrieved' $eltfile ; then
WARN "$elt $site Client problem: CA=$cacert =$eltfile" | tee -a $eltfile
retval=`expr 256 - 3`
elif grep 'Fixme: something weird happened' $eltfile ; then
WARN "$elt $site Fixme: something weird happened CA=$cacert =$eltfile" | tee -a $eltfile
retval=`expr 256 - 4`
elif grep 'Oops: TCP connect problem' $eltfile ; then
WARN "$elt $site Oops: TCP connect problem CA=$cacert =$eltfile" | tee -a $eltfile
retval=`expr 256 - 5`
elif [ $retval -gt 5 ] ; then
# returns 5
WARN "$elt failed retval=$retval CA=$cacert = $eltfile" | tee -a $eltfile
elif grep ': unable to\| error:' $eltfile ; then
ERROR "$elt.bash unable to / error: CA=$cacert = $eltfile" | tee -a $eltfile
retval=`expr 256 - 6`
elif grep 'unexpected error' $eltfile ; then
ERROR "$elt.bash unexpected error CA=$cacert = $eltfile" | tee -a $eltfile
retval=`expr 256 - 7`
elif [ "$retval" -eq 1 ] ; then
DATE ERROR "$elt.bash error retval=$retval: CA=$cacert = $eltfile " | tee -a $eltfile
elif grep -q "Negotiated protocol.*TLSv1.$SSL_VER" $eltfile ; then
# TLS_AES_256_GCM_SHA384
DATE INFO "$elt.bash TLSv1.$SSL_VER retval=$retval: CA=$cacert = $eltfile " | tee -a $eltfile
elif [ "$retval" -ne 0 ] ; then
# 5 is success
DATE WARN "$elt.bash error retval=$retval: CA=$cacert = $eltfile " | tee -a $eltfile
else
DATE INFO "$elt.bash no error retval=$retval: CA=$cacert = $eltfile " | tee -a $eltfile
fi
if grep ' VULNERABLE ' $eltfile ; then
WARN "$elt.bash VULNERABLE: CA=$cacert = $eltfile " | tee -a $eltfile
fi
grep 'Overall Grade' $eltfile
return $retval
}
## ssltest_analyze_ssl $elt $site
ssltest_analyze_ssl () {
local elt=$1
local site=$2
local exe=/usr/local/bin/analyze-ssl.pl.bash
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local total_s=`expr 2 \* $TIMEOUT`
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
DATE DBUG $elt "timeout $total_s $ANALYZE_ENVS `basename $exe` $ANALYZE_ELTS $site:$SSL_PORT" $eltfile
INFO "timeout $total_s $ANALYZE_ENVS `basename $exe` $ANALYZE_ELTS $site:$SSL_PORT" >> $eltfile
timeout $total_s $ANALYZE_ENVS $exe $ANALYZE_ELTS $site:$SSL_PORT >> $eltfile 2>&1
retval=$?
if [ ! -s $eltfile ] ; then
ERROR "$elt failed empty $eltfile" | tee -a $eltfile
retval=`expr 256 - 1`
elif grep "successful connect with TLSv1_$SSL_VER" $eltfile && \
grep 'all certificates verified' $eltfile ; then
# succeeds but timesout
DATE INFO "$elt successful connect with TLSv1_$SSL_VER retval=$retval error = $eltfile" | tee -a $eltfile
elif [ $retval -eq 124 ] ; then
WARN "$elt timedout timeout=$total_s CA=$cacert = $eltfile" | tee -a $eltfile
elif [ $retval -ne 0 ] ; then
ERROR "$elt failed retval=$retval = $eltfile" | tee -a $eltfile
elif grep ERROR: $eltfile ; then
ERROR "$elt failed ERROR: = $eltfile" | tee -a $eltfile
retval=`expr 256 - 3`
elif grep 'certificate verify - name does not match' $eltfile ; then
ERROR "$elt failed name does not match = $eltfile" | tee -a $eltfile
retval=`expr 256 - 4`
elif ! grep 'certificate verified : ok' $eltfile ; then
ERROR "$elt failed NO certificate verified = $eltfile" | tee -a $eltfile
retval=`expr 256 - 5`
elif grep 'certificate verified : FAIL' $eltfile ; then
ERROR "$elt certificate verified : FAIL = $eltfile" | tee -a $eltfile
retval=`expr 256 - 6`
elif grep 'handshake failed with HIGH' $eltfile ; then
WARN "$elt failed handshake failed with HIGH = $eltfile" | tee -a $eltfile
retval=`expr 256 - 7`
elif grep '^ \! ' $eltfile ; then
ERROR "$elt failed \! = $eltfile" | tee -a $eltfile
retval=`expr 256 - 8`
else
DATE INFO "$elt no error = $eltfile" | tee -a $eltfile
fi
return $retval
}
## ssltest_curl
ssltest_curl () {
local elt=$1
local site=$2
local exe="/usr/local/bin/s$elt.bash -- "
local outfile=$3
[ -f "$outfile" ] || { WARN no outfile ; return 1 ; }
local eltfile=`sed -e "s/.out/_$elt.out/" <<< $outfile`
local total_s=`expr 2 \* $TIMEOUT`
local prot
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
[ -n "$SSL_PORT" ] || { WARN no SSL_PORT ; return 3 ; }
exe=curl
if [ "$SSL_PORT" = 443 ] ; then
prot=https
elif [ "$SSL_PORT" = 995 ] ; then
prot=pop3s
exe=curl
CURL_ELTS="$CURL_ELTS -l"
elif [ "$SSL_PORT" = 587 ] ; then
prot=smtps
exe=curl
# CURL_ELTS="$CURL_ELTS"
else
ERROR $elt unrecognized port protocol $SSL_PORT
return 3
fi
DATE DBUG $elt $CURL_ENVS "`basename $exe` $CURL_ELTS ${prot}://$site:$SSL_PORT" $eltfile
INFO $elt "$CURL_ENVS `basename $exe` $CURL_ELTS ${prot}://$site:$SSL_PORT" >> $eltfile
$CURL_ENVS $exe $CURL_ELTS ${prot}://$site:$SSL_PORT >> $eltfile 2>&1
retval=$?
# grep '= /tmp/scurl'
ERRF=$eltfile
domain=`echo $site|sed -e 's/.*\([^.]*\)\.\([^.]*\)$/\1/'`
if [ $SSL_VER -eq 3 ] && ! grep "SSL connection using TLSv1.$SSL_VER" $ERRF ; then
DEBUG=1 DBUG $CURL_ENVS $exe $CURL_ELTS ${prot}://$site:$SSL_PORT
ERROR "$elt NO 'using TLSv1.$SSL_VER' TLSv1.$SSL_VER CA=$cacert = $ERRF" | tee -a $eltfile
retval=`expr 256 - 1`
cat $eltfile
elif ! grep -q "SSL connection using TLSv1.[3$SSL_VER]" $ERRF ; then
ERROR "$elt NO SSL connection using TLSv1.$SSL_VER CA=$cacert = $ERRF" | tee -a $eltfile
retval=`expr 256 - 1`
cat $eltfile
elif [ $retval -eq 77 ] || grep -q 'CURLE_SSL_CACERT_BADFILE' $ERRF ; then
ERROR "$elt retval=$retval ${CURLE[$retval]} CAFILE=$CAFILE = $ERRF" | tee -a $eltfile
elif [ $retval -eq 28 ] || grep -q 'CURLE_OPERATION_TIMEDOUT' $ERRF ; then
WARN "$elt retval=$retval CURLE_OPERATION_TIMEDOUT ${CURLE[$retval]} CAFILE=$CAFILE = $ERRF" | tee -a $eltfile
elif [ $retval -eq 91 ] || grep -q 'CURLE_SSL_INVALIDCERTSTATUS' $ERRF ; then
WARN "$elt retval=$retval ${CURLE[$retval]} CAFILE=$CAFILE = $ERRF" | tee -a $eltfile
elif [ $retval -eq 28 ] || grep -q 'Connection timed out' $ERRF ; then
WARN "$elt retval=$retval ${CURLE[$retval]} CAFILE=$CAFILE = $ERRF" | tee -a $eltfile
elif [ $retval -eq 22 ] || grep -q 'curl: (22) The requested URL returned error:' $ERRF; then
# on 22 - change to HTTP code
code=`grep 'curl: (22) The requested URL returned error:' $ERRF | sed -s 's/.*returned error: //'`
if [ "$code" = 416 ] ; then
INFO "$elt retval=$retval ${CURLE[$retval]} code=$code CA=$cacert = $ERRF" | tee -a $eltfile
retval=$code
elif [ -n "$code" ] && [ "$code" -ge 400 ] ; then
# 403 Cloudflare
ERROR "$elt retval=$retval ${CURLE[$retval]} code=$code CA=$cacert = $ERRF" | tee -a $eltfile
retval=$code
else
WARN "$elt retval=$retval ${CURLE[$retval]} code=$code CA=$cacert = $ERRF" | tee -a $eltfile
fi
elif [ $retval -ne 0 ] ; then
# curl: (3) URL using bad/illegal format or missing URL - worked
WARN "$elt retval=$retval ${CURLE[$retval]} CA=$cacert = $ERRF" | tee -a $eltfile
elif ! grep -q "subject: CN=.*$site" $ERRF && \
! grep -q "subject: CN=.*$domain" $ERRF ; then
DBUG subject: `grep subject: $ERRF `
# CN can have wildcards *.pythonhosted.org etc.
# upgrade to ERROR when the matching works.
WARN "$elt NO subject: CN=$site CA=$cacert = $ERRF" | tee -a $eltfile
# retval=`expr 256 - 2`
elif grep -q "503 - Forwarding failure" $ERRF ; then
WARN "$elt 503 - Forwarding failure CA=$cacert = $ERRF" | tee -a $eltfile
retval=`expr 256 - 3`
elif grep -q 'we are not connected' $eltfile ; then
WARN "$elt CA=$cacert = $ERRF" | tee -a $eltfile
retval=0
else
INFO "$elt CA=$cacert = $ERRF" | tee -a $eltfile
retval=0
fi
# TLSv1.3 (IN), TLS handshake, Finished
return $retval
}
## ssllabs_analyze
ssltest_analyze () {
local elt=$1
local site=$2
local exe="/usr/local/bin/scurl.bash -- "
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.html/" <<< $outfile`
local total_s=`expr 2 \* $TIMEOUT`
local url="https://www.ssllabs.com/ssltest/analyze.html?d=$site"
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
umask 0022
DATE DBUG "$elt $CURL_ELTS SSL_PORT=$SSL_PORT $url" $eltfile
INFO "<\!-- $CURL_ENVS $elt $CURL_ELTS $url -->" >> $eltfile
$CURL_ENVS $exe $CURL_ELTS $url >> $eltfile 2>&1
retval=$?
if [ $retval -ne 0 ] ; then
DATE WARN "$elt retval=$retval $url" $eltfile >> $outfile
else
DATE INFO "$elt retval=$retval $url" $eltfile >> $outfile
fi
return $retval
}
## ssltest_ssllabs
ssltest_ssllabs() {
local elt=$1
local site=$2
local outfile=$3
[ -f "$outfile" ] || return 1
local site_ip=$4
local eltfile=`sed -e "s/.out/_$elt.html/" <<< $outfile`
local host=www.ssllabs.com
local url="ssltest/analyze.html?d=$site&s=$site_ip"
local exe="/usr/local/bin/scurl.bash -- "
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
umask 0022
DATE DBUG "$elt $CURL_ELTS $url" $eltfile
INFO "<\!-- $CURL_ENVS $elt $CURL_ELTS $url -->" >> $eltfile
$CURL_ENVS $exe $CURL_ELTS $url >> $eltfile 2>&1
retval=$?
if [ $retval -ne 0 ] ; then
DATE WARN "$elt retval=$retval $url" $eltfile | tee -a $eltfile
elif grep -A 2 ">TLS 1.$SSL_VER<" $eltfile | grep -q 'No' ; then
DATE ERROR "$elt retval=$retval $url" $eltfile | tee -a $eltfile
retval=`expr 256 - 1`
elif grep -A 2 ">TLS 1.$SSL_VER<" $eltfile | grep -q 'Yes' ; then
DATE INFO "$elt retval=$retval $url" $eltfile | tee -a $eltfile
retval=0
else
DATE WARN "$elt retval=$retval $url" $eltfile | tee -a $eltfile
fi
return $retval
}
## ssltest_http2_alt_svc
ssltest_http2_alt_svc() {
local elt=$1
local site=$2
local outfile=$3
[ -f "$outfile" ] || return 1
local eltfile=`sed -e "s/.out/_$elt.html/" <<< $outfile`
local exe="/usr/local/bin/scurl.bash -- "
local host=www.integralblue.com
local url=1.1.1.1/fun-stuff/dns-over-tor/
[ -n "$SSL_VER" ] || { WARN no SSL_VER ; return 2 ; }
umask 0022
if [ -n "$socks_proxy" ] ; then
export socks_proxy=`sed -e 's/socks[a-z0-9]*:/socks5h:/' <<< $socks_proxy`
$exe --head --http2 -x $socks_proxy https://$host/$url > $eltfile 2>&1
else
$exe --head --http2 https://$host/$url > $eltfile 2>&1
fi
#? grep '^HTTP/2 301' $eltfile || exit 1
grep -q '^HTTP/2 ' $eltfile || return 11
grep -q 'alt-svc:' $eltfile || return 12
onion=`grep 'alt-svc:' $eltfile | sed -e 's/.*h2=.//' -e 's/";.*//'` # || exit 3
if [ -n "$socks_proxy" ] ; then
$exe --head -x $socks_proxy https://$onion/$url >> $eltfile 2>&1
retval=$?
else
$exe --head https://$onion/$url >> $eltfile 2>&1
retval=$?
fi
if [ $retval -eq 0 ] ; then
DATE INFO $elt https://$host/$url | tee -a $eltfile
else
DATE WARN $elt https://$host/$url | tee -a $eltfile
fi
return $?
}

View File

@ -0,0 +1,342 @@
#!/bin/bash
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
export PATH=/sbin:$PATH
[ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash
#[ -n "$TESTF_VAR_LOCAL" ] && PREFIX=$TESTF_VAR_LOCAL
. $PREFIX/bin/usr_local_tput.bash || exit 2
. /usr/local/bin/proxy_ping_lib.bash >/dev/null || \
{ ERROR loading /usr/local/bin/proxy_ping_lib.bash ; exit 3; }
#? . $PREFIX/src/usr_local_src.bash || exit 2
DNS_TRIES=3
LOGP=TestSSL_`date -u +%y-%m-%d_%H_$$`
rm -f $TMPDIR/${LOGP}*
# analyze-ssl passed files.pythonhosted.org
# INFO: 226s analyze-ssl no error = /tmp/_files.pythonhosted.org_analyze-ssl.out
[ -z "$SSLTEST_TESTS" ] && SSLTEST_TESTS="curl openssl testssl nmap" # sslscan
[ -z "$SSLTEST_CERTS" ] && SSLTEST_CERTS="/etc/ssl/certs/ca-certificates.crt /usr/local/etc/ssl/cacert-testforge.pem"
[ -z "$SSLTEST_TIMEOUT" ] && SSLTEST_TIMEOUT=30
[ -z "$SSLTEST_SOCKS_PROXY" -a -n "$socks_proxy" ] && SSLTEST_SOCKS_PROXY=$socks_proxy \
&& DBUG SSLTEST_SOCKS_PROXY=$socks_proxy
if [ -z "$SSLTEST_HTTPS_PROXY" -a -n "$https_proxy" ] ; then
SSLTEST_HTTPS_PROXY=$https_proxy
DBUG SSLTEST_HTTPS_PROXY=$SSLTEST_HTTPS_PROXY
fi
[ -z "$SSLTEST_HTTP_PROXY" -a -n "$http_proxy" ] && SSLTEST_HTTP_PROXY=$http_proxy \
&& DBUG SSLTEST_HTTP_PROXY=$http_proxy
[ -z "$BOX_BYPASS_PROXY_GROUP" ] && BOX_BYPASS_PROXY_GROUP=bin
SSL_LIB=openssl
# [ "$MODE" ] && proxy_ping_test.bash $MODE
declare -a BADSSL_SITES
BADSSL_SITES=(
self-signed.badssl.com
expired.badssl.com
mixed.badssl.com
rc4.badssl.com
hsts.badssl.com
)
declare -a GOODSSL_SITES
GOODSSL_SITES=(
files.pythonhosted.org
mirrors.dotsrc.org
deb.devuan.org
# dfw.source.kernel.org
# cdn.kernel.org
)
badssl=0
goodssl=0
[ "$#" -eq 0 ] && goodssl=1
tests="$SSLTEST_TESTS"
verbosity=2
outdir=/tmp
timeout=$SSLTEST_TIMEOUT
onion=0
TMPDIR=/tmp
SSL_PORT=443
SSL_VER=3
usage() {
echo "Usage: $0 [OPTIONS] dirs-or-files"
echo
echo " -B | --badssl - test badssl.org sites"
echo " -G | --goodssl - test good sites"
echo " -S | --ssl - tls version v1.x - 2 or 3"
echo " -O | --onion - onion"
echo " -o | --outdir=$TMPDIR - output directory"
echo " -v | --verbosity=$verbosity - verbosity 0 least 5 most"
echo " -T | --timeout=$timeout - timeout in sec."
echo " -E | --tests=`sed -e 's/ /,/g' <<< $tests` - tests, comma separated"
echo " -C | --certs=`sed -e 's/ /,/g' <<< $SSLTEST_CERTS` - tests, comma separated"
echo " -Y | --ciphers - comma sep list of ciphers"
echo " -P | --port - port default $SSL_PORT"
echo " -N | --connect - connect"
echo
echo " -V | --version - print version of this script"
echo " -h | --help - print this help"
}
SHORTOPTS="hVGBv:T:C:P:S:E:Y:ON:"
LONGOPTS="help,version:,goodssl,badssl,verbosity:,timeout,certs:,port:,ssl:,tests:,ciphers:,onion,connect:"
declare -a SITES
SITES=()
ARGS=$(getopt --options $SHORTOPTS --longoptions $LONGOPTS -- "$@")
[ $? != 0 ] && { ERROR "error parsing getopt" ; exit 4 ; }
eval set -- "$ARGS"
while true; do
case "$1" in
-o|--outdir)
shift
TMPDIR="$1"
;;
-v|--verbosity)
shift
verbosity="$1"
;;
-T|--timeout)
shift
timeout="$1"
;;
-S|--ssl)
shift
SSL_VER="$1"
;;
-P|--port)
shift
SSL_PORT="$1"
;;
-N|--connect)
shift
SSL_CONNECT="$1"
;;
-C|--certs)
shift
SSLTEST_CERTS="`sed -e 's/,/ /g' <<< $1`"
;;
-Y|--ciphers)
shift
SSLTEST_CIPHERS="`sed -e 's/,/ /g' <<< $1`"
;;
-t|--tests)
shift
tests="`sed -e 's/,/ /g' <<< $1`"
;;
-O|--onion)
onion=1
;;
-G|--goodssl)
goodssl=1
badssl=0
;;
-B|--badssl)
badssl=1
goodssl=0
;;
-V|--version)
usage
exit 0
;;
-h|--help)
usage
exit 0
;;
'--')
shift
SITES=("$@")
break
;;
*)
{ ERROR "unrecognized arguments $*" ; exit 5 ; }
break
;;
esac
shift
done
[ "${#SITES[*]}" -eq 0 -a $badssl -gt 0 ] && SITES=("${BADSSL_SITES[@]}")
[ "${#SITES[*]}" -eq 0 -a $goodssl -gt 0 ] && SITES=("${GOODSSL_SITES[@]}")
[ "${#SITES[@]}" -eq 0 ] && { ERROR "no arguments $*" ; exit 7 ; }
[ "$SSL_VER" -ge 2 -a "$SSL_VER" -le 3 ] || { ERROR "SSL_VER $SSL_VER" ; exit 6 ; }
[ -d "$TMPDIR" ] || mkdir -p "$TMPDIR" || { ERROR "mkdir $TMPDIR" ; exit 8 ; }
[ $onion -eq 0 ] && TIMEOUT=$timeout || TIMEOUT=`expr $timeout \* 2`
SSLTEST_TESTS="$tests"
declare -a tests_ran
tests_ran=()
grep -q "^wlan[1-9][ ]00000000" /proc/net/route || { WARN "not connected" ; exit 0 ; }
IF=`route | grep ^def |sed -e 's/.* //'`
[ -n "$IF" ] || { ERROR "no IF" ; exit 10 ; }
IP=`ifconfig $IF|grep -A 2 ^wlan |grep inet | sed -e 's/.*inet //' -e 's/ .*//'`
[ -n "$IP" ] || { ERROR "no IP" ; exit 11 ; }
[ -z "$socks_proxy" ] || . /usr/local/bin/proxy_export.bash
netstat -nle4 | grep -v grep | grep -q 0.1:53 || \
{ WARN "DNS not running - netstat " ; }
# iptables-legacy-save | grep "OUTPUT -o wlan4 -m owner --gid-owner 2 -j ACCEPT"
# uses TIMEOUT=30
. $PREFIX/bin/toxcore_ssl_lib.bash
if [ "$USER" = bin ] ; then
[ -z "$SOCKS_HOST" ] && SOCKS_HOST=
[ -z "$SOCKS_PORT" ] && SOCKS_PORT=
[ -z "$SOCKS_DNS" ] && SOCKS_DNS=9053
else
DEBUG=0 proxy_ping_get_socks >/dev/null
[ -z "$SOCKS_HOST" ] && SOCKS_HOST=127.0.0.1
[ -z "$SOCKS_PORT" ] && SOCKS_PORT=9050
[ -z "$SOCKS_DNS" ] && SOCKS_DNS=9053
fi
if [ "$USER" = bin ] ; then
TORSOCKS=""
elif [ $SOCKS_HOST != 127.0.0.1 ] ; then
TORSOCKS="torsocks --address $SOCKS_HOST --port $SOCKS_PORT "
elif [ $SOCKS_PORT != 9050 ] ; then
TORSOCKS="torsocks --port $SOCKS_PORT "
else
TORSOCKS="torsocks "
fi
if [ -n "$SSLTEST_HTTPS_PROXY" ] ; then
grep -q "SocksPolicy *accept *$IP" /etc/tor/torrc || \
{ WARN "need SocksPolicy accept $IP in /etc/tor/torrc" ; }
fi
# This works off the $https_proxy environment variable in the form http://127.0.0.1:9128
# so you can test trans routing by call this with that unset.
ssltest_proxies $onion
rm -f $TMPDIR/${LOGP}.*.*
OUTF=$TMPDIR/${LOGP}.out
for CAFILE in $SSLTEST_CERTS ; do
grep -q "^wlan[1-9][ ]00000000" /proc/net/route || {
WARN $prog we are not connected >&2
exit `expr 256 - 1`
}
[ -f $CAFILE ] || { ERROR "CAfile not found $CAFILE" ; continue ; }
DATE DBUG CAFILE=$CAFILE --address $SOCKS_HOST --port $SOCKS_PORT
cacert=`basename $CAFILE`
for site in "${SITES[@]##*/}" ; do
warns=0
IF=`route | grep ^def |sed -e 's/.* //'`
[ -n "$IF" ] || { WARN "$site no route" ; continue ; }
SITE_OUTF=$TMPDIR/${LOGP}_${site}.out
DEBUG=1 DATE DBUG $site CAFILE=$CAFILE $SITE_OUTF | tee -a $SITE_OUTF
# ERROR: Could not resolve hostname www.devuan.org.
i=0
while [ $i -le $DNS_TRIES ] ; do
if [ $onion -eq 0 ] ; then
site_ip=`dig $site +retry=5 +tries=2 +noall +answer +short | awk '{ print $1 }'` && break
else
site_ip=`tor-resolve -4 $site` && break
fi
i=`expr $i + 1`
sleep 5
done
[ $i -ge $DNS_TRIES ] && ERROR failed resolve $site | tee -a $SITE_OUTF
[ $i -ge $DNS_TRIES ] && site_ip=$site
elt=sslscan
SSLSCAN_ELTS="$SSLSCAN_ARGS --certs $CAFILE --sni-name $site"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
tests_ran+=($elt) && \
ssltest_sslscan $elt $site $SITE_OUTF $site_ip
elt=openssl
OPENSSL_ELTS="$OPENSSL_ARGS -CAfile $CAFILE -servername $site"
[ -n "$SSL_CONNECT" ] && OPENSSL_ELTS="$OPENSSL_ARGS -connect ${SSL_CONNECT}:$SSL_PORT"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
[ $onion -eq 0 ] && \
tests_ran+=($elt) && \
ssltest_openssl $elt $site $SITE_OUTF $site_ip
elt=testssl
rm -f $TMPDIR/${LOGP}.$site.$elt.json # --jsonfile-pretty $TMPDIR/${LOGP}.$site.$elt.json
TESTSSL_ELTS="$TESTSSL_ARGS --add-ca $CAFILE --append --ip $site_ip"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
[ $onion -eq 0 ] && \
tests_ran+=($elt) && \
ssltest_testssl $elt $site $SITE_OUTF $site_ip
elt=analyze-ssl
ANALYZE_ELTS="$ANALYZE_ARGS --CApath $CAFILE --name $site"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
[ $SSL_PORT = 443 ] && \
tests_ran+=($elt) && \
ssltest_analyze_ssl $elt $site $SITE_OUTF $site_ip
elt=curl
CURL_ELTS="$CURL_ARGS --cacert $CAFILE --output /dev/null"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
tests_ran+=($elt) && \
ssltest_curl $elt $site $SITE_OUTF $site_ip
elt=nmap
NMAP_ELTS="$NMAP_ARGS --host-timeout $TIMEOUT -p $SSL_PORT"
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
tests_ran+=($elt) && \
ssltest_nmap $elt $site $SITE_OUTF $site_ip
elt=ssllabs
[ $SSL_PORT = 443 ] && \
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
tests_ran+=($elt) && \
ssltest_ssllabs $elt $site $SITE_OUTF $site_ip
done
done
# bonus
elt=alt_svc
[ $SSL_PORT = 443 ] && \
[[ $SSLTEST_TESTS =~ .*${elt}.* ]] && \
tests_ran+=($elt) && \
ssltest_http2_alt_svc $elt - $SITE_OUTF -
cat $TMPDIR/${LOGP}_*.out > $OUTF
# https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
a=`openssl ciphers -v 'ECDH+AESGCM:ECDH+CHACHA20:ECDH+AES256:ECDH+AES128:!aNULL:!SHA1:!AESCCM' | wc -l | sed -e 's/ .*//'`
[ $? -eq 0 ] && [ "$a" -eq 0 ] && \
WARN "no openssl ciphers" | tee -a $OUTF
DEBUG=1 DBUG "${#tests_ran[@]}" TESTS="${tests_ran[@]}"
warns=`grep -c WARN: $OUTF`
[ $? -eq 0 ] && [ "$warns" -gt 0 ] && DATE WARN "$warns warns for $site in $OUTF"
errs=`grep -c 'ERROR:\|EROR:' $OUTF`
[ $? -eq 0 ] && [ "$errs" -gt 0 ] && DATE ERROR "$errs errs for $site in $OUTF"
[ $? -eq 0 ] && [ "$warns" -eq 0 -a "$errs" -eq 0 ] && \
DATE INFO "NO warns/errs for $site in $OUTF"
exit $errs
# pysslscan scan --scan=protocol.http --scan=vuln.heartbleed --scan=server.renegotiation \
# --scan=server.preferred_ciphers --scan=server.ciphers \
# --report=term:rating=ssllabs.2009e --ssl2 --ssl3 --tls10 --tls11 --tls12
# /usr/local/bin/ssl-cipher-check.pl

View File

@ -0,0 +1,148 @@
INFO:(B curl curl --silent -vvv --head --connect-timeout 30 --tlsv1.3 -x socks5h://127.0.0.1:9050 --cacert /etc/ssl/certs/ca-certificates.crt --output /dev/null https://files.pythonhosted.org:443
* Uses proxy env variable no_proxy == 'localhost,127.0.0.1'
* Trying 127.0.0.1:9050...
* Connected to 127.0.0.1 (127.0.0.1) port 9050
* SOCKS5 connect to files.pythonhosted.org:443 (remotely resolved)
* SOCKS5 request granted.
* Connected to 127.0.0.1 (127.0.0.1) port 9050
* ALPN: curl offers h2,http/1.1
} [5 bytes data]
* TLSv1.3 (OUT), TLS handshake, Client hello (1):
} [512 bytes data]
* CAfile: /etc/ssl/certs/ca-certificates.crt
* CApath: /etc/ssl/certs
{ [5 bytes data]
* TLSv1.3 (IN), TLS handshake, Server hello (2):
{ [122 bytes data]
* TLSv1.3 (IN), TLS handshake, Encrypted Extensions (8):
{ [19 bytes data]
* TLSv1.3 (IN), TLS handshake, Certificate (11):
{ [2831 bytes data]
* TLSv1.3 (IN), TLS handshake, CERT verify (15):
{ [264 bytes data]
* TLSv1.3 (IN), TLS handshake, Finished (20):
{ [36 bytes data]
* TLSv1.3 (OUT), TLS change cipher, Change cipher spec (1):
} [1 bytes data]
* TLSv1.3 (OUT), TLS handshake, Finished (20):
} [36 bytes data]
* SSL connection using TLSv1.3 / TLS_AES_128_GCM_SHA256
* ALPN: server accepted h2
* Server certificate:
* subject: CN=*.pythonhosted.org
* start date: Jul 1 20:50:25 2023 GMT
* expire date: Aug 1 20:50:24 2024 GMT
* subjectAltName: host "files.pythonhosted.org" matched cert's "*.pythonhosted.org"
* issuer: C=BE; O=GlobalSign nv-sa; CN=GlobalSign Atlas R3 DV TLS CA 2023 Q2
* SSL certificate verify ok.
{ [5 bytes data]
* using HTTP/2
* [HTTP/2] [1] OPENED stream for https://files.pythonhosted.org:443/
* [HTTP/2] [1] [:method: HEAD]
* [HTTP/2] [1] [:scheme: https]
* [HTTP/2] [1] [:authority: files.pythonhosted.org]
* [HTTP/2] [1] [:path: /]
* [HTTP/2] [1] [user-agent: curl/8.4.0]
* [HTTP/2] [1] [accept: */*]
} [5 bytes data]
> HEAD / HTTP/2
> Host: files.pythonhosted.org
> User-Agent: curl/8.4.0
> Accept: */*
>
{ [1 bytes data]
* TLSv1.3 (IN), TLS handshake, Newsession Ticket (4):
{ [193 bytes data]
< HTTP/2 200
< content-type: text/html
< accept-ranges: bytes
< date: Tue, 09 Jan 2024 15:32:40 GMT
< age: 0
< x-served-by: cache-iad-kiad7000029-IAD, cache-bma1641-BMA
< x-cache: HIT, MISS
< x-cache-hits: 25, 0
< x-timer: S1704814361.674330,VS0,VE105
< strict-transport-security: max-age=31536000; includeSubDomains; preload
< x-frame-options: deny
< x-xss-protection: 1; mode=block
< x-content-type-options: nosniff
< x-permitted-cross-domain-policies: none
< x-robots-header: noindex
< content-length: 1853
<
* Connection #0 to host 127.0.0.1 left intact
EROR:(B curl NO subject: CN=files.pythonhosted.org CA=ca-certificates.crt = /tmp/TestSSL_24-01-09_15_1564286_files.pythonhosted.org_curl.out
INFO:(B curl curl --silent -vvv --head --connect-timeout 30 --tlsv1.3 -x socks5h://127.0.0.1:9050 --cacert /usr/local/etc/ssl/cacert-testforge.pem --output /dev/null https://files.pythonhosted.org:443
* Uses proxy env variable no_proxy == 'localhost,127.0.0.1'
* Trying 127.0.0.1:9050...
* Connected to 127.0.0.1 (127.0.0.1) port 9050
* SOCKS5 connect to files.pythonhosted.org:443 (remotely resolved)
* SOCKS5 request granted.
* Connected to 127.0.0.1 (127.0.0.1) port 9050
* ALPN: curl offers h2,http/1.1
} [5 bytes data]
* TLSv1.3 (OUT), TLS handshake, Client hello (1):
} [512 bytes data]
* CAfile: /usr/local/etc/ssl/cacert-testforge.pem
* CApath: /etc/ssl/certs
{ [5 bytes data]
* TLSv1.3 (IN), TLS handshake, Server hello (2):
{ [122 bytes data]
* TLSv1.3 (IN), TLS handshake, Encrypted Extensions (8):
{ [19 bytes data]
* TLSv1.3 (IN), TLS handshake, Certificate (11):
{ [2831 bytes data]
* TLSv1.3 (IN), TLS handshake, CERT verify (15):
{ [264 bytes data]
* TLSv1.3 (IN), TLS handshake, Finished (20):
{ [36 bytes data]
* TLSv1.3 (OUT), TLS change cipher, Change cipher spec (1):
} [1 bytes data]
* TLSv1.3 (OUT), TLS handshake, Finished (20):
} [36 bytes data]
* SSL connection using TLSv1.3 / TLS_AES_128_GCM_SHA256
* ALPN: server accepted h2
* Server certificate:
* subject: CN=*.pythonhosted.org
* start date: Jul 1 20:50:25 2023 GMT
* expire date: Aug 1 20:50:24 2024 GMT
* subjectAltName: host "files.pythonhosted.org" matched cert's "*.pythonhosted.org"
* issuer: C=BE; O=GlobalSign nv-sa; CN=GlobalSign Atlas R3 DV TLS CA 2023 Q2
* SSL certificate verify ok.
{ [5 bytes data]
* using HTTP/2
* [HTTP/2] [1] OPENED stream for https://files.pythonhosted.org:443/
* [HTTP/2] [1] [:method: HEAD]
* [HTTP/2] [1] [:scheme: https]
* [HTTP/2] [1] [:authority: files.pythonhosted.org]
* [HTTP/2] [1] [:path: /]
* [HTTP/2] [1] [user-agent: curl/8.4.0]
* [HTTP/2] [1] [accept: */*]
} [5 bytes data]
> HEAD / HTTP/2
> Host: files.pythonhosted.org
> User-Agent: curl/8.4.0
> Accept: */*
>
{ [1 bytes data]
* TLSv1.3 (IN), TLS handshake, Newsession Ticket (4):
{ [193 bytes data]
< HTTP/2 200
< content-type: text/html
< accept-ranges: bytes
< date: Tue, 09 Jan 2024 15:34:47 GMT
< age: 0
< x-served-by: cache-iad-kiad7000029-IAD, cache-bma1672-BMA
< x-cache: HIT, MISS
< x-cache-hits: 27, 0
< x-timer: S1704814487.995090,VS0,VE103
< strict-transport-security: max-age=31536000; includeSubDomains; preload
< x-frame-options: deny
< x-xss-protection: 1; mode=block
< x-content-type-options: nosniff
< x-permitted-cross-domain-policies: none
< x-robots-header: noindex
< content-length: 1853
<
* Connection #0 to host 127.0.0.1 left intact
INFO:(B curl CA=cacert-testforge.pem = /tmp/TestSSL_24-01-09_15_1564286_files.pythonhosted.org_curl.out

View File

@ -207,7 +207,7 @@ enable =
logging-format-interpolation, logging-format-interpolation,
# logging-not-lazy, # logging-not-lazy,
multiple-imports, multiple-imports,
multiple-statements, # multiple-statements,
no-classmethod-decorator, no-classmethod-decorator,
no-staticmethod-decorator, no-staticmethod-decorator,
protected-access, protected-access,
@ -223,7 +223,7 @@ enable =
unneeded-not, unneeded-not,
useless-else-on-loop, useless-else-on-loop,
deprecated-method, # deprecated-method,
deprecated-module, deprecated-module,
too-many-boolean-expressions, too-many-boolean-expressions,
@ -248,12 +248,15 @@ enable =
disable = disable =
bad-indentation, bad-indentation,
consider-using-f-string, consider-using-f-string,
consider-using-with,
deprecated-method,
duplicate-code, duplicate-code,
file-ignored, file-ignored,
fixme, fixme,
global-statement, global-statement,
invalid-name, invalid-name,
locally-disabled, locally-disabled,
multiple-statements,
no-else-return, no-else-return,
## no-self-use, ## no-self-use,
suppressed-message, suppressed-message,
@ -271,10 +274,15 @@ disable =
unused-wildcard-import, unused-wildcard-import,
use-maxsplit-arg, use-maxsplit-arg,
logging-not-lazy,
line-too-long,
import-outside-toplevel,
logging-fstring-interpolation, logging-fstring-interpolation,
# new # new
missing-module-docstring, missing-module-docstring,
missing-class-docstring, missing-class-docstring,
missing-function-docstring,
use-dict-literal,
[REPORTS] [REPORTS]
output-format = text output-format = text
@ -372,4 +380,4 @@ ext-import-graph =
int-import-graph = int-import-graph =
[EXCEPTIONS] [EXCEPTIONS]
overgeneral-exceptions = BaseException overgeneral-exceptions = builtins.BaseException

View File

@ -35,7 +35,7 @@ if [ "$#" -eq 0 ] ; then
cat > $PREFIX/bin/$PKG.bash << EOF cat > $PREFIX/bin/$PKG.bash << EOF
#!/bin/sh #!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*- # -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
ROLE=text ROLE=$ROLE
# https://$GIT_HUB/$GIT_USER/$GIT_DIR/ # https://$GIT_HUB/$GIT_USER/$GIT_DIR/
cd $PREFIX/src/ || exit 1 cd $PREFIX/src/ || exit 1
exec perl $PKG "\$@" exec perl $PKG "\$@"

View File

@ -25,10 +25,10 @@ EXT="tar.gz"
URL="files.pythonhosted.org/packages/03/4f/cccab1ec2e0ecb05120184088e00404b38854809cf35aa76889406fbcbad/ansible-2.9.10.tar.gz" URL="files.pythonhosted.org/packages/03/4f/cccab1ec2e0ecb05120184088e00404b38854809cf35aa76889406fbcbad/ansible-2.9.10.tar.gz"
TODIR=/o/data/TestForge/src/ansible TODIR=/o/data/TestForge/src/ansible
if [ -f /var/local/src/var_local_src.bash ] ; then if [ -f /usr/local/src/usr_local_src.bash ] ; then
. /var/local/src/var_local_src.bash . /usr/local/src/usr_local_src.bash
else else
ols_are_we_connected () { route | grep -q ^default ; return $? ; } msys_are_we_connected () { route | grep -q ^default ; return $? ; }
fi fi
cd $PREFIX/src || exit 2 cd $PREFIX/src || exit 2
@ -37,15 +37,11 @@ WD=$PWD
if [ "$#" -eq 0 ] ; then if [ "$#" -eq 0 ] ; then
if [ ! -d "$DIR" ] ; then if [ ! -d "$DIR" ] ; then
if [ ! -f "$HTTP_DIR/$URL" ] ; then if [ ! -f "$HTTP_DIR/$URL" ] ; then
ols_are_we_connected || { DEBUG not connected ; exit 0 ; } msys_are_we_connected || { DEBUG not connected ; exit 0 ; }
wget -xc -P "$HTTP_DIR" "https://$URL" || exit 2 wget -xc -P "$HTTP_DIR" "https://$URL" || exit 2
fi fi
if [ "$EXT" = "zip" ] ; then
unzip "$HTTP_DIR/$URL" || exit 3
else
tar xfvz "$HTTP_DIR/$URL" || exit 3 tar xfvz "$HTTP_DIR/$URL" || exit 3
fi fi
fi
cd "$DIR" || exit 4 cd "$DIR" || exit 4
@ -57,15 +53,23 @@ if [ "$#" -eq 0 ] ; then
pip3.sh install . >> install.log 2>&1\ pip3.sh install . >> install.log 2>&1\
|| { echo "ERROR: code $?" ; tail install.log ; exit 5 ; } || { echo "ERROR: code $?" ; tail install.log ; exit 5 ; }
rsync -vax build/lib/ $PREFIX/$LIB/python$PYTHON_MINOR/site-packages
"$PYTHON_EXE" -c "import $MOD" || exit 10 "$PYTHON_EXE" -c "import $MOD" || exit 10
rsync -vax bin/ /usr/local/bin/
pushd /usr/local/bin/
rm ansible
ln -s ../$LIB/python$PYTHON_MINOR//site-packages/ansible/cli/scripts/ansible_cli_stub.py ansible
popd
sed -e '1s/^#[!].*@#!'"$PYTHON_EXE"'@' -i ansible
chmod 755 ansible
grep -l '_tput\|_src' *sh ../bin*sh | \ grep -l '_tput\|_src' *sh ../bin*sh | \
xargs grep -l 'echo \(INFO\|DEBUG\|ERROR\|DEBUG\):' | \ xargs grep -l 'echo \(INFO\|DEBUG\|ERROR\|DEBUG\):' | \
xargs sed -e 's@echo \(INFO\|DEBUG\|ERROR\|DEBUG\):@\1 @' xargs sed -e 's@echo \(INFO\|DEBUG\|ERROR\|DEBUG\):@\1 @'
if [ -d $PREFIX/src/ansible-$AVER/docs/docsite ] ; then if [ -d $PREFIX/src/ansible/docs/docsite ] ; then
cd $PREFIX/src/ansible-$AVER/docs/docsite cd $PREFIX/src/ansible/docs/docsite
[ -f htmldocs.log ] || make -n -f Makefile htmldocs > htmldocs.log 2>&1 || exit 2$? [ -f htmldocs.log ] || make -n -f Makefile htmldocs > htmldocs.log 2>&1 || exit 2$?
[ -f info.log ] || make -n -f Makefile.sphinx info > info.log 2>&1 || exit 3$? [ -f info.log ] || make -n -f Makefile.sphinx info > info.log 2>&1 || exit 3$?
@ -82,8 +86,8 @@ elif [ $1 = 'test' ] ; then
elif [ "$1" = 'refresh' ] ; then elif [ "$1" = 'refresh' ] ; then
cd $PREFIX/src/$DIR || exit 60 cd $PREFIX/src/$DIR || exit 60
env PWD=$PREFIX/src/$DIR \ # env PWD=$PREFIX/src/$DIR \
/usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$? # /usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$?
fi fi
exit 0 exit 0

View File

@ -8,7 +8,7 @@ PREFIX=/usr/local
ROLE=toxcore ROLE=toxcore
DESC="" DESC=""
. $PREFIX/bin/usr_local_tput.bash || exit 1 . $PREFIX/bin/usr_local_src.bash || exit 1
PKG=toxcore PKG=toxcore
DIR=c-$PKG DIR=c-$PKG
@ -28,7 +28,7 @@ if [ "$#" -eq 0 ] ; then
if [ ! -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" ] ; then if [ ! -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" ] ; then
[ -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" ] || \ [ -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" ] || \
mkdir "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" mkdir "$PREFIX/net/Git/$GIT_HUB/$GIT_USER"
ols_are_we_connected || { DEBUG not connected ; exit 0 ; } msys_are_we_connected || { DEBUG not connected ; exit 0 ; }
cd "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" cd "$PREFIX/net/Git/$GIT_HUB/$GIT_USER"
git clone -b $GIT_BRAN --depth=1 https://$GIT_HUB/$GIT_USER/$GIT_DIR || exit 4 git clone -b $GIT_BRAN --depth=1 https://$GIT_HUB/$GIT_USER/$GIT_DIR || exit 4
git clone --depth=1 https://$GIT_HUB/$GIT_USER/dockerfiles git clone --depth=1 https://$GIT_HUB/$GIT_USER/dockerfiles
@ -42,7 +42,7 @@ if [ "$#" -eq 0 ] ; then
[ -f third_party/cmp/Makefile ] || git submodule update --init || exit 6 [ -f third_party/cmp/Makefile ] || git submodule update --init || exit 6
# ols_apply_testforge_patches # msys_apply_testforge_patches
# # [ -f CMakeLists.txt.dst ] || patch -b -z.dst < toxcore.diff || exit 7 # # [ -f CMakeLists.txt.dst ] || patch -b -z.dst < toxcore.diff || exit 7
[ -f cmake.sh ] || cat > cmake.sh << EOF [ -f cmake.sh ] || cat > cmake.sh << EOF
@ -80,6 +80,42 @@ make .. > make.log 2>&1
ls \$LIB/*so* || { echo ERROR \$LIB ; exit 2 ; } ls \$LIB/*so* || { echo ERROR \$LIB ; exit 2 ; }
EOF EOF
[ -f setup.sh ] || cat > setup.sh << \EOF
#!/bin/sh
PREFIX=$PREFIX
ROLE=$ROLE
CORE=$PREFIX/src/c-toxcore
DIR=_build
LIB=$CORE/$DIR/.libs
cd $CORE | exit 3
[ -f configure ] || autoreconf -i
if [ ! -d $LIB ] ; then
[ -d $DIR ] || mkdir $DIR
cd $DIR
grep -q -e '-g -O3' ../configure && \
sed -e 's/-g -O3/-g -02/' -i ../configure
../configure \
--disable-ipv6 \
--enable-daemon \
--enable-dht-bootstrap \
--enable-test-network \
--enable-av \
--enable-tests \
--disable-rt \
--with-log-level=trace \
--prefix=$PREFIX \
--cache-file=/dev/null \
>> c.log 2>&1
make >> make.log 2>&1
fi
[ -d $LIB ] || { echo ERROR $LIB ; exit 2 ; }
EOF
[ -d _build ] && exit 0
bash cmake.sh || { bash cmake.sh || {
retval=$? retval=$?
ERROR cmake $retval ERROR cmake $retval
@ -92,13 +128,13 @@ EOF
exit 3$retval exit 3$retval
} }
cp -p other/bootstrap_daemon/tox-bootstrapd $PREFIX/bin #? cp -p other/bootstrap_daemon/tox-bootstrapd $PREFIX/bin
cp -p other/bootstrap_daemon/tox-bootstrapd.sh $PREFIX/etc/init.d/tox-bootstrapd #? cp -p other/bootstrap_daemon/tox-bootstrapd.sh $PREFIX/etc/init.d/tox-bootstrapd
# ln -s $PREFIX/etc/init.d/tox-bootstrapd /etc/init.d #? ln -s $PREFIX/etc/init.d/tox-bootstrapd /etc/init.d
exit 0 exit 0
elif [ $1 = 'check' ] ; then # 1* elif [ $1 = 'check' ] ; then # 1*
# ols_test_bins && exit 0 || exit $? # msys_test_bins && exit 0 || exit $?
[ ! -d $DIR/_build ] && WARN not built yet $DIR && exit 11 [ ! -d $DIR/_build ] && WARN not built yet $DIR && exit 11
[ -f $DIR/_build/libtoxcore.so.${VERS} ] && WARN not compiled yet $DIR && exit 12 [ -f $DIR/_build/libtoxcore.so.${VERS} ] && WARN not compiled yet $DIR && exit 12
@ -115,7 +151,7 @@ elif [ "$1" = 'refresh' ] ; then # 6*
/usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$? /usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$?
elif [ "$1" = 'update' ] ; then # 7* elif [ "$1" = 'update' ] ; then # 7*
ols_are_we_connected || exit 0 msys_are_we_connected || exit 0
cd $PREFIX/src/$DIR || exit 70 cd $PREFIX/src/$DIR || exit 70
git pull || exit 7$? git pull || exit 7$?
fi fi

View File

@ -1,5 +1,268 @@
#!/bin/sh #!/bin/sh
ROLE=toxcore ROLE=toxcore
#!/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/usr/local
ROLE=toxcore
PKG=dracut
VER=050
DIR=${PKG}-$VER
URL=distfiles.gentoo.org/distfiles/$DIR.tar.xz
URI="https://www.kernel.org/pub/linux/utils/boot/${VER}/${DIR}.tar.xz"
ASC="https://www.kernel.org/pub/linux/utils/boot/${VER}/${DIR}.tar.xz"
#https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.sign #https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.sign
#https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.gz #https://mirrors.edge.kernel.org/pub/linux/utils/boot/dracut/dracut-055.tar.gz
gpg --recv-keys 9BAD8B9BBD1CBDEDE3443292900F3C4971086004
cd $PREFIX/src || exit 2
WD=$PWD
if [ -d /etc/apt -a $USER = root ] ; then
# old_debian_requires asciidoc libkmod-dev libkmod-dev xsltproc
which xsltproc 2>/dev/null || apt-get install xsltproc || exit 2
which asciidoc 2>/dev/null || apt-get install asciidoc || exit 2
elif [ -d /etc/portage -a $USER = root ] ; then
which cpio >/dev/null || emerge -fp app-arch/cpio || exit 2
[ -f /usr/lib64/libkmod.so ] || emerge -fp '>=sys-apps/kmod-23[tools]' || exit 2
fi
if [ ! -f $DIR/dracut-initramfs-restore ] ; then
if [ -e $PREFIX/net/Http/$URL ] ; then
ip route|grep -q ^default || { echo "DEBUG: $0 not connected" ; exit 0 ; }
wget -xc -P $PREFIX/net/Http https://$URL
fi
tar xvfJ $PREFIX/net/Http/$URL
fi
cd $DIR || exit 3
true || \
grep -q ^prefix=$PREFIX configure || \
sed -e 's/^KMOD_LIBS.*/KMOD_LIBS ?= -lkmod/' \
-e 's@^ exit 1@# exit 1@' \
-e "s@^prefix=/usr$@prefix=$PREFIX@" -i configure
src_configure() {
local PV=$VER
# tc-export CC PKG_CONFIG
sed -e "s@^prefix=/usr\$@prefix=$PREFIX@" -i configure
./configure \
--disable-documentation \
--prefix="${PREFIX}" \
--sysconfdir="${PREFIX}/etc" \
|| return 1
# --bashcompletiondir="$(get_bashcompdir)"
# --systemdsystemunitdir="$(systemd_get_systemunitdir)"
if [ ! -f dracut-version.sh ] ; then
# Source tarball from github doesn't include this file
echo "DRACUT_VERSION=${PV}" > dracut-version.sh
fi
return 0
}
if [ "$#" -eq 0 ] ; then
if [ ! -f dracut-initramfs-restore.sh.dst ] ; then
false && \
if [ -d /usr/local/patches/$ROLE/usr/local/src/$DIR/files ] ; then
find /usr/local/patches/$ROLE/usr/local/src/$DIR/files -type f -name \*.patch | \
while read file ; do
root=`echo $file | sed -e 's/.patch//' -e "s@$PREFIX/patches/$ROLE/usr/local/src/$DIR/@@"`
[ -f $root.dst ] && continue
patch -b -z.dst $root < $file
done || exit 5
fi
# patches
if [ -d /usr/local/patches/$ROLE/usr/local/src/$DIR/ ] ; then
find /usr/local/patches/$ROLE/usr/local/src/$DIR/ -type f -name \*.diff | \
while read file ; do
root=$( echo $file | sed -e 's/.diff//' \
-e "s@$PREFIX/patches/$ROLE/usr/local/src/$DIR/@@" )
[ -f $root.dst ] && continue
patch -b -z.dst $root < $file
done || exit 5
fi
find * -type f -name \*sh -exec grep -q /usr/lib/dracut {} \; -print | \
while read file ; do
[ -f $file.dst ] || cp -p $file $file.dst
sed -e "s@/usr/lib/dracut@$PREFIX/lib/dracut@" $file
chmod 755 $file
done
fi
[ -f Makefile.inc ] || \
src_configure || exit 6
grep -q systemdsystemunitdir Makefile.inc || \
cat >> Makefile.inc << EOF
systemdsystemunitdir ?= /usr/local/lib/systemd
EOF
grep -v =$ dracut-version.sh && sed -e "s/=/=$VER/" dracut-version.sh
[ -x install/dracut-install ] || make >> make.log 2>&1 || exit 7
[ -x $PREFIX/lib/dracut/dracut-install -a \
$PREFIX/lib/dracut/dracut-install -nt install/dracut-install ] || \
make install >> install.log 2>&1 || exit 8
elif [ "$1" = 'test' ] ; then
$PREFIX/bin/$PKG --help || exit 30
# Has tests
elif [ "$1" = 'refresh' ] ; then # 6*
cd $WD/$DIR || exit 6
find * -name \*.dst | while read file ; do
base=`echo $file |sed -e 's/.dst//'`
[ -f $base.diff -a $base.diff -nt $base ] && continue
diff -c -C 5 $file $base>$base.diff
done
find * -name \*.diff | tar cf - -T - | \
tar xfBv - -C ../../patches/gpgkey/usr/local/src/dracut-050/
fi
exit 0
cp -p install/dracut-install $PREFIX/bin
rm -f -- "lsinitrd.1.xml"
asciidoc -d manpage -b docbook -o "lsinitrd.1.xml" lsinitrd.1.asc
rm -f -- "lsinitrd.1"
xsltproc -o "lsinitrd.1" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl lsinitrd.1.xml
rm -f -- "dracut.conf.5.xml"
asciidoc -d manpage -b docbook -o "dracut.conf.5.xml" dracut.conf.5.asc
rm -f -- "dracut.conf.5"
xsltproc -o "dracut.conf.5" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut.conf.5.xml
rm -f -- "dracut.cmdline.7.xml"
asciidoc -d manpage -b docbook -o "dracut.cmdline.7.xml" dracut.cmdline.7.asc
rm -f -- "dracut.cmdline.7"
xsltproc -o "dracut.cmdline.7" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut.cmdline.7.xml
rm -f -- "dracut.bootup.7.xml"
asciidoc -d manpage -b docbook -o "dracut.bootup.7.xml" dracut.bootup.7.asc
rm -f -- "dracut.bootup.7"
xsltproc -o "dracut.bootup.7" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut.bootup.7.xml
rm -f -- "dracut.modules.7.xml"
asciidoc -d manpage -b docbook -o "dracut.modules.7.xml" dracut.modules.7.asc
rm -f -- "dracut.modules.7"
xsltproc -o "dracut.modules.7" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut.modules.7.xml
rm -f -- "dracut.8.xml"
asciidoc -d manpage -b docbook -o "dracut.8.xml" dracut.8.asc
rm -f -- "dracut.8"
xsltproc -o "dracut.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut.8.xml
rm -f -- "dracut-catimages.8.xml"
asciidoc -d manpage -b docbook -o "dracut-catimages.8.xml" dracut-catimages.8.asc
rm -f -- "dracut-catimages.8"
xsltproc -o "dracut-catimages.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl dracut-catimages.8.xml
rm -f -- "mkinitrd.8.xml"
asciidoc -d manpage -b docbook -o "mkinitrd.8.xml" mkinitrd.8.asc
rm -f -- "mkinitrd.8"
xsltproc -o "mkinitrd.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl mkinitrd.8.xml
rm -f -- "mkinitrd-suse.8.xml"
asciidoc -d manpage -b docbook -o "mkinitrd-suse.8.xml" mkinitrd-suse.8.asc
rm -f -- "mkinitrd-suse.8"
xsltproc -o "mkinitrd-suse.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl mkinitrd-suse.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-cmdline.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-cmdline.service.8.xml" modules.d/98dracut-systemd/dracut-cmdline.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-cmdline.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-cmdline.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-cmdline.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-initqueue.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-initqueue.service.8.xml" modules.d/98dracut-systemd/dracut-initqueue.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-initqueue.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-initqueue.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-initqueue.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-mount.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-mount.service.8.xml" modules.d/98dracut-systemd/dracut-mount.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-mount.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-mount.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-mount.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-shutdown.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-shutdown.service.8.xml" modules.d/98dracut-systemd/dracut-shutdown.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-shutdown.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-shutdown.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-shutdown.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-pre-mount.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-pre-mount.service.8.xml" modules.d/98dracut-systemd/dracut-pre-mount.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-pre-mount.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-pre-mount.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-pre-mount.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-pre-pivot.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-pre-pivot.service.8.xml" modules.d/98dracut-systemd/dracut-pre-pivot.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-pre-pivot.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-pre-pivot.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-pre-pivot.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-pre-trigger.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-pre-trigger.service.8.xml" modules.d/98dracut-systemd/dracut-pre-trigger.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-pre-trigger.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-pre-trigger.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-pre-trigger.service.8.xml
rm -f -- "modules.d/98dracut-systemd/dracut-pre-udev.service.8.xml"
asciidoc -d manpage -b docbook -o "modules.d/98dracut-systemd/dracut-pre-udev.service.8.xml" modules.d/98dracut-systemd/dracut-pre-udev.service.8.asc
rm -f -- "modules.d/98dracut-systemd/dracut-pre-udev.service.8"
xsltproc -o "modules.d/98dracut-systemd/dracut-pre-udev.service.8" -nonet http://docbook.sourceforge.net/release/xsl/current/manpages/docbook.xsl modules.d/98dracut-systemd/dracut-pre-udev.service.8.xml
rm -f -- dracut.xml
asciidoc -a numbered -d book -b docbook -o dracut.xml dracut.asc
rm -f -- dracut.html
xsltproc -o dracut.html --xinclude -nonet \
--stringparam custom.css.source dracut.css \
--stringparam generate.css.header 1 \
http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl dracut.xml
rm -f -- dracut.xml
[ -d /usr/lib/dracut ] || mkdir -p /usr/lib/dracut
mkdir -p /usr/lib/dracut/modules.d
mkdir -p /usr/share/man/man1 /usr/share/man/man5 /usr/share/man/man7 /usr/share/man/man8
install -m 0755 dracut.sh /usr/bin/dracut
install -m 0755 dracut-catimages.sh /usr/bin/dracut-catimages
install -m 0755 mkinitrd-dracut.sh /usr/bin/mkinitrd
install -m 0755 lsinitrd.sh /usr/bin/lsinitrd
install -m 0644 dracut.conf /usr/etc/dracut.conf
mkdir -p /usr/etc/dracut.conf.d
mkdir -p /usr/lib/dracut/dracut.conf.d
install -m 0755 dracut-init.sh /usr/lib/dracut/dracut-init.sh
install -m 0755 dracut-functions.sh /usr/lib/dracut/dracut-functions.sh
install -m 0755 dracut-version.sh /usr/lib/dracut/dracut-version.sh
ln -fs dracut-functions.sh /usr/lib/dracut/dracut-functions
install -m 0755 dracut-logger.sh /usr/lib/dracut/dracut-logger.sh
install -m 0755 dracut-initramfs-restore.sh /usr/lib/dracut/dracut-initramfs-restore
cp -arx modules.d /usr/lib/dracut
for i in lsinitrd.1; do install -m 0644 $i /usr/share/man/man1/${i##*/}; done
for i in dracut.conf.5; do install -m 0644 $i /usr/share/man/man5/${i##*/}; done
for i in dracut.cmdline.7 dracut.bootup.7 dracut.modules.7; do install -m 0644 $i /usr/share/man/man7/${i##*/}; done
for i in dracut.8 dracut-catimages.8 mkinitrd.8 mkinitrd-suse.8 modules.d/98dracut-systemd/dracut-cmdline.service.8 modules.d/98dracut-systemd/dracut-initqueue.service.8 modules.d/98dracut-systemd/dracut-mount.service.8 modules.d/98dracut-systemd/dracut-shutdown.service.8 modules.d/98dracut-systemd/dracut-pre-mount.service.8 modules.d/98dracut-systemd/dracut-pre-pivot.service.8 modules.d/98dracut-systemd/dracut-pre-trigger.service.8 modules.d/98dracut-systemd/dracut-pre-udev.service.8; do install -m 0644 $i /usr/share/man/man8/${i##*/}; done
ln -fs dracut.cmdline.7 /usr/share/man/man7/dracut.kernel.7
if [ -n "" ]; then \
mkdir -p ; \
ln -srf /usr/lib/dracut/modules.d/98dracut-systemd/dracut-shutdown.service /dracut-shutdown.service; \
mkdir -p /sysinit.target.wants; \
ln -s ../dracut-shutdown.service \
/sysinit.target.wants/dracut-shutdown.service; \
mkdir -p /initrd.target.wants; \
for i in \
dracut-cmdline.service \
dracut-initqueue.service \
dracut-mount.service \
dracut-pre-mount.service \
dracut-pre-pivot.service \
dracut-pre-trigger.service \
dracut-pre-udev.service \
; do \
ln -srf /usr/lib/dracut/modules.d/98dracut-systemd/$i ; \
ln -s ../$i \
/initrd.target.wants/$i; \
done \
fi
if [ -f install/dracut-install ]; then \
install -m 0755 install/dracut-install /usr/lib/dracut/dracut-install; \
fi
if [ -f skipcpio/skipcpio ]; then \
install -m 0755 skipcpio/skipcpio /usr/lib/dracut/skipcpio; \
fi
mkdir -p /usr/lib/kernel/install.d
install -m 0755 50-dracut.install /usr/lib/kernel/install.d/50-dracut.install
install -m 0755 51-dracut-rescue.install /usr/lib/kernel/install.d/51-dracut-rescue.install
mkdir -p /usr/share/bash-completion/completions
install -m 0644 dracut-bash-completion.sh /usr/share/bash-completion/completions/dracut
install -m 0644 lsinitrd-bash-completion.sh /usr/share/bash-completion/completions/lsinitrd
mkdir -p /usr/share/pkgconfig
install -m 0644 dracut.pc /usr/share/pkgconfig/dracut.pc
rm dracut.8.xml dracut.cmdline.7.xml modules.d/98dracut-systemd/dracut-mount.service.8.xml dracut.bootup.7.xml modules.d/98dracut-systemd/dracut-pre-mount.service.8.xml modules.d/98dracut-systemd/dracut-initqueue.service.8.xml mkinitrd.8.xml modules.d/98dracut-systemd/dracut-pre-pivot.service.8.xml dracut.modules.7.xml dracut.conf.5.xml lsinitrd.1.xml modules.d/98dracut-systemd/dracut-cmdline.service.8.xml dracut-catimages.8.xml modules.d/98dracut-systemd/dracut-pre-udev.service.8.xml modules.d/98dracut-systemd/dracut-pre-trigger.service.8.xml mkinitrd-suse.8.xml modules.d/98dracut-systemd/dracut-shutdown.service.8.xml

View File

@ -0,0 +1,59 @@
#!/bin/sh
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
prog=`basename $0 .bash`
PREFIX=/var/local
[ -f /usr/local/etc/testforge/testforge.bash ] && \
. /usr/local/etc/testforge/testforge.bash
ROLE=toxcore
. /var/local/src/var_local_src.bash || exit 2
PKG="kernel-expect"
GIT_HUB=github.com
GIT_USER="perkint"
GIT_DIR="kernelexpect"
DIR="${GIT_DIR}"
BINS=$PKG
ols_funtoo_requires dev-perl/Expect dev-perl/File-Which
cd $PREFIX/src || exit 2
WD=$PWD
if [ "$#" -eq 0 ] ; then
if [ ! -d "$DIR" ] ; then
if [ ! -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" ] ; then
[ -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" ] || \
mkdir "$PREFIX/net/Git/$GIT_HUB/$GIT_USER"
ip route | grep -q '^default' || { DEBUG not connected ; exit 0 ; }
cd "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" && \
git clone --depth=1 "https://$GIT_HUB/$GIT_USER/$GIT_DIR" || \
exit 2
fi
cp -rip "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" $DIR || exit 3
fi
if [ ! -f $PREFIX/bin/$PKG ] ; then
cp -p $DIR/$PKG $PREFIX/bin/$PKG
fi
if [ ! -f $PREFIX/bin/$PKG.bash ] ; then
echo -e "#!/bin/sh\n# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-\nROLE=$ROLE\nexec $PREFIX/bin/$PKG \"\$@\"" > $PREFIX/bin/$PKG.bash
chmod 755 $PREFIX/bin/$PKG.bash
fi
[ -d "$HTTP_DIR/$GIT_HUB/$GIT_USER/$GIT_DIR/" ] || \
ols_are_we_connected || exit 0
ols_wget_xc -P $HTTP_DIR/ \
https://$GIT_HUB/$GIT_USER/$GIT_DIR/
elif [ $1 = 'check' ] ; then # 1*
ols_test_bins && exit 0 || exit 1$?
if [ $1 = 'test' ] ; then # 3*
$PREFIX/bin/$PKG.bash --help
fi

View File

@ -79,7 +79,7 @@ elif [ "$1" = 'refresh' ] ; then # 6*
/usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$? /usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$?
elif [ "$1" = 'update' ] ; then # 7* elif [ "$1" = 'update' ] ; then # 7*
ols_are_we_connected || exit 0 msys_are_we_connected || exit 0
cd $PREFIX/src/$DIR || exit 70 cd $PREFIX/src/$DIR || exit 70
git pull || exit 7$? git pull || exit 7$?
fi fi

View File

@ -28,6 +28,10 @@ cd $PREFIX/src || exit 2
WD=$PWD WD=$PWD
cd $DIR || exit 3 cd $DIR || exit 3
cd $PREFIX/src || exit 2
WD=$PWD
if [ "$#" -eq 0 ] ; then
site_packages=$PREFIX/$LIB/python$PYTHON_MINOR/site-packages site_packages=$PREFIX/$LIB/python$PYTHON_MINOR/site-packages
@ -93,4 +97,14 @@ EOF
chmod 755 $PREFIX/bin/${ROLE}_${PKG}.bash chmod 755 $PREFIX/bin/${ROLE}_${PKG}.bash
fi fi
elif [ $1 = 'check' ] ; then # 1*
"$PYTHON_EXE_MSYS" -c "import $MOD" 2>/dev/null || exit 20
# ols_test_bins
exit $?
elif [ "$1" = 'test' ] ; then # 3*
cd $WD/$DIR
$PYTHON_EXE_MSYS -m unittest discover >>test.log || exit 31$?
fi
exit 0 exit 0

View File

@ -0,0 +1,37 @@
#!/bin/sh
# -*- mode: sh; tab-width: 8; coding: utf-8-unix -*-
PREFIX=/usr/local
. /usr/local/etc/testforge/testforge.bash
ROLE=toxcore
PYVER=3
P="BASE_PYTHON${PYVER}_MINOR"
PYTHON_MINOR="$(eval echo \$$P)"
PYTHON_EXE_MSYS=$PREFIX/bin/python$PYVER.bash
PYTHON_EXE=$PYTHON_EXE_MSYS
. /usr/local/src/usr_local_src.bash || exit 2
PKG=stem_examples
DIR=$PKG
#/var/local/bin/python3.bash setup.py install --prefix=/var/local
ols_funtoo_requires dev-libs/stem
# requires: py.test
#? ols_pip${PYVER}_requires readme_renderer
cd $PREFIX/src || exit 2
WD=$PWD
if [ $# -eq 0 ] ; then
:
elif [ "$1" = 'lint' ] ; then # 2*
cd $PREFIX/src/$DIR || exit 20
bash .pylint.sh 2>&1 || exit 2$?
elif [ "$1" = 'test' ] ; then # 3*
cd $PREFIX/src/$DIR || exit 30
fi

View File

@ -0,0 +1,37 @@
up:: up.phantompy up.badexits
up.phantompy::
cp -up phantompy.md /o/var/local/src/phantompy.git/README.md
cp -up phantompy.setup /o/var/local/src/phantompy.git/setup.py
cp -up setup.cfg lookupdns.py qasync_phantompy.py phantompy.py support_phantompy.py \
/o/var/local/src/phantompy.git/
up.badexits:: refresh
cp -up exclude_badExits.md /o/var/local/src/exclude_badExits.git/README.md
cp -up setup.cfg exclude_badExits.py exclude_badExits.bash \
support_onions.py trustor_poc.py \
/o/var/local/src/exclude_badExits.git
lint.phantompy::
/var/local/bin/pydev_flake8.bash lookupdns.py qasync_phantompy.py phantompy.py support_phantompy.py
lint.badexits::
/var/local/bin/pydev_flake8.bash exclude_badExits.py \
support_onions.py trustor_poc.py
isort -c -diff exclude_badExits.py \
support_onions.py trustor_poc.py
lint:: lint.badexits lint.phantompy
sh .pylint.sh
refresh::
/var/local/bin/python3.bash -c \
'import exclude_badExits; print(exclude_badExits.__doc__)' \
> exclude_badExits.md
echo "\n## Usage \n\`\`\`\n" \
>> exclude_badExits.md
/var/local/bin/python3.bash exclude_badExits.py --help \
| sed -e '/^[^uo ]/d' \
>> exclude_badExits.md
echo "\n\`\`\`\n" \
>> exclude_badExits.md

View File

@ -0,0 +1,113 @@
This extends nusenu's basic idea of using the stem library to
dynamically exclude nodes that are likely to be bad by putting them
on the ExcludeNodes or ExcludeExitNodes setting of a running Tor.
* https://github.com/nusenu/noContactInfo_Exit_Excluder
* https://github.com/TheSmashy/TorExitRelayExclude
The basic idea is to exclude Exit nodes that do not have ContactInfo:
* https://github.com/nusenu/ContactInfo-Information-Sharing-Specification
That can be extended to relays that do not have an email in the contact,
or to relays that do not have ContactInfo that is verified to include them.
But there's a problem, and your Tor notice.log will tell you about it:
you could exclude the relays needed to access hidden services or mirror
directories. So we need to add to the process the concept of a whitelist.
In addition, we may have our own blacklist of nodes we want to exclude,
or use these lists for other applications like selektor.
So we make two files that are structured in YAML:
```
/etc/tor/yaml/torrc-goodnodes.yaml
{sGOOD_NODES}
By default all sections of the goodnodes.yaml are used as a whitelist.
Use the GoodNodes/Onions list to list onion services you want the
Introduction Points whitelisted - these points may change daily
Look in tor's notice.log for warnings of 'Every introduction point for service'
```--hs_dir``` ```default='/var/lib/tor'``` will make the program
parse the files named ```hostname``` below this dir to find
Hidden Services to whitelist.
The Introduction Points can change during the day, so you may want to
rerun this program to freshen the list of Introduction Points. A full run
that processes all the relays from stem can take 30 minutes, or run with:
```--saved_only``` will run the program with just cached information
on the relats, but will update the Introduction Points from the Services.
/etc/tor/yaml/torrc-badnodes.yaml
{sBAD_NODES}
```
That part requires [PyYAML](https://pyyaml.org/wiki/PyYAML)
https://github.com/yaml/pyyaml/ or ```ruamel```: do
```pip3 install ruamel``` or ```pip3 install PyYAML```;
the advantage of the former is that it preserves comments.
(You may have to run this as the Tor user to get RW access to
/run/tor/control, in which case the directory for the YAML files must
be group Tor writeable, and its parent's directories group Tor RX.)
Because you don't want to exclude the introduction points to any onion
you want to connect to, ```--white_onions``` should whitelist the
introduction points to a comma sep list of onions; we fixed stem to do this:
* https://github.com/torproject/stem/issues/96
* https://gitlab.torproject.org/legacy/trac/-/issues/25417
Use the GoodNodes/Onions list in goodnodes.yaml to list onion services
you want the Introduction Points whitelisted - these points may change daily.
Look in tor's notice.log for 'Every introduction point for service'
```notice_log``` will parse the notice log for warnings about relays and
services that will then be whitelisted.
```--torrc``` will read a file like /etc/tor/torrc and make some
suggestions based on what it finds; it will not edit or change the file.
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
```--good_contacts``` will write the contact info as a ciiss dictionary
to a YAML file. If the proof is uri-rsa, the well-known file of fingerprints
is downloaded and the fingerprints are added on a 'fps' field we create
of that fingerprint's entry of the YAML dictionary. This file is read at the
beginning of the program to start with a trust database, and only new
contact info from new relays are added to the dictionary.
Now for the final part: we lookup the Contact info of every relay
that is currently in our Tor, and check it the existence of the
well-known file that lists the fingerprints of the relays it runs.
If it fails to provide the well-know url, we assume its a bad
relay and add it to a list of nodes that goes on ```ExcludeNodes```
(not just ExcludeExitNodes```). If the Contact info is good, we add the
list of fingerprints to ```ExitNodes```, a whitelist of relays to use as exits.
```--bad_on``` We offer the users 3 levels of cleaning:
1. clean relays that have no contact ```=Empty```
2. clean relays that don't have an email in the contact (implies 1)
```=Empty,NoEmail```
3. clean relays that don't have "good' contactinfo. (implies 1)
```=Empty,NoEmail,NotGood```
The default is ```Empty,NoEmail,NotGood``` ; ```NoEmail``` is inherently imperfect
in that many of the contact-as-an-email are obfuscated, but we try anyway.
To be "good" the ContactInfo must:
1. have a url for the well-defined-file to be gotten
2. must have a file that can be gotten at the URL
3. must support getting the file with a valid SSL cert from a recognized authority
4. (not in the spec but added by Python) must use a TLS SSL > v1
5. must have a fingerprint list in the file
6. must have the FP that got us the contactinfo in the fingerprint list in the file.
```--wait_boot``` is the number of seconds to wait for Tor to booststrap
```--wellknown_output``` will make the program write the well-known files
(```/.well-known/tor-relay/rsa-fingerprint.txt```) to a directory.
```--relays_output write the download relays in json to a file. The relays
are downloaded from https://onionoo.torproject.org/details
For usage, do ```python3 exclude_badExits.py --help`
See [exclude_badExits.txt](./exclude_badExits.txt)

View File

@ -0,0 +1,55 @@
# -*-mode: python; py-indent-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*-
# http://vt5hknv6sblkgf22.onion/tutorials/examples/check_digests.html
import sys
import stem.descriptor.remote
import stem.util.tor_tools
def download_descriptors(fingerprint):
"""
Downloads the descriptors we need to validate this relay. Downloads are
parallelized, providing the caller with a tuple of the form...
(router_status_entry, server_descriptor, extrainfo_descriptor)
"""
conensus_query = stem.descriptor.remote.get_consensus()
server_desc_query = stem.descriptor.remote.get_server_descriptors(fingerprint)
extrainfo_query = stem.descriptor.remote.get_extrainfo_descriptors(fingerprint)
router_status_entries = filter(lambda desc: desc.fingerprint == fingerprint, conensus_query.run())
if len(router_status_entries) != 1:
raise IOError("Unable to find relay '%s' in the consensus" % fingerprint)
return (
router_status_entries[0],
server_desc_query.run()[0],
extrainfo_query.run()[0],
)
if __name__ == '__main__':
fingerprint = input("What relay fingerprint would you like to validate?\n")
print('') # blank line
if not stem.util.tor_tools.is_valid_fingerprint(fingerprint):
print("'%s' is not a valid relay fingerprint" % fingerprint)
sys.exit(1)
try:
router_status_entry, server_desc, extrainfo_desc = download_descriptors(fingerprint)
except Exception as exc:
print(exc)
sys.exit(1)
if router_status_entry.digest == server_desc.digest():
print("Server descriptor digest is correct")
else:
print("Server descriptor digest invalid, expected %s but is %s" % (router_status_entry.digest, server_desc.digest()))
if server_desc.extra_info_digest == extrainfo_desc.digest():
print("Extrainfo descriptor digest is correct")
else:
print("Extrainfo descriptor digest invalid, expected %s but is %s" % (server_desc.extra_info_digest, extrainfo_desc.digest()))

View File

@ -0,0 +1,51 @@
import collections
import stem.descriptor
import stem.descriptor.remote
import stem.directory
# Query all authority votes asynchronously.
downloader = stem.descriptor.remote.DescriptorDownloader(
document_handler = stem.descriptor.DocumentHandler.DOCUMENT,
)
# An ordered dictionary ensures queries are finished in the order they were
# added.
queries = collections.OrderedDict()
for name, authority in stem.directory.Authority.from_cache().items():
if authority.v3ident is None:
continue # authority doesn't vote if it lacks a v3ident
queries[name] = downloader.get_vote(authority)
# Wait for the votes to finish being downloaded, this produces a dictionary of
# authority nicknames to their vote.
votes = dict((name, query.run()[0]) for (name, query) in queries.items())
# Get a superset of all the fingerprints in all the votes.
all_fingerprints = set()
for vote in votes.values():
all_fingerprints.update(vote.routers.keys())
# Finally, compare moria1's votes to maatuska's votes.
for fingerprint in all_fingerprints:
moria1_vote = votes['moria1'].routers.get(fingerprint)
maatuska_vote = votes['maatuska'].routers.get(fingerprint)
if not moria1_vote and not maatuska_vote:
print("both moria1 and maatuska haven't voted about %s" % fingerprint)
elif not moria1_vote:
print("moria1 hasn't voted about %s" % fingerprint)
elif not maatuska_vote:
print("maatuska hasn't voted about %s" % fingerprint)
elif 'Running' in moria1_vote.flags and 'Running' not in maatuska_vote.flags:
print("moria1 has the Running flag but maatuska doesn't: %s" % fingerprint)
elif 'Running' in maatuska_vote.flags and 'Running' not in moria1_vote.flags:
print("maatuska has the Running flag but moria1 doesn't: %s" % fingerprint)

View File

@ -0,0 +1,25 @@
#!/bin/sh
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
ROLE=toxcore
PROG=exclude_badExits
build=build
dist=dist
# pyinstaller
if [ ! -e ${dist}/${PROG}.pyi -o ! ${dist}/${PROG}.pyi -nt ./${PROG}.py ] ; then
[ -f ${PROG}.spec ] || pyi-makespec ./${PROG}.py -F -c
[ -d ${build} ] || mkdir -p ${build}
[ -d ${dist} ] || mkdir -p ${dist}
[ -e ${dist}/${PROG}.pyi -a ${dist}/${PROG}.pyi -nt ./${PROG}.py ] || \
pyinstaller --distpath ${dist} --workpath ${build} \
--exclude tkinter --exclude matplotlib \
--exclude twisted --exclude jedi --exclude jaraco \
--exclude sphinx --exclude coverage --exclude nose \
--exclude PIL --exclude numpy --exclude OpenGL \
--exclude PySide2 --exclude PyQt5 --exclude IPython \
--onefile -c --ascii \
$PROG.py
# AttributeError: 'NoneType' object has no attribute 'groups'
# utils.py #400
fi
# cx_Freeze exclude_badExits.py

View File

@ -0,0 +1,26 @@
#!/bin/sh
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
ROLE=toxcore
PROG=exclude_badExits
build=build
dist=dist
# pyinstaller
if [ ! -e ${dist}/${PROG}.pyi -o ! ${dist}/${PROG}.pyi -nt ./${PROG}.py ] ; then
[ -f ${PROG}.spec ] || pyi-makespec ./${PROG}.py -F -c
[ -d ${build} ] || mkdir -p ${build}
[ -d ${dist} ] || mkdir -p ${dist}
[ -e ${dist}/${PROG}.pyi -a ${dist}/${PROG}.pyi -nt ./${PROG}.py ] || \
pyinstaller --distpath ${dist} --workpath ${build} \
--exclude tkinter --exclude matplotlib \
--exclude twisted --exclude jedi --exclude jaraco \
--exclude sphinx --exclude coverage --exclude nose \
--exclude PIL --exclude numpy --exclude OpenGL \
--exclude PySide2 --exclude PyQt5 --exclude IPython \
--onefile -c --ascii \
$PROG.py
# AttributeError: 'NoneType' object has no attribute 'groups'
# utils.py #400
fi
# cx_Freeze exclude_badExits.py

View File

@ -0,0 +1,42 @@
#!/bin/bash
# -*- mode: sh; fill-column: 75; tab-width: 8; coding: utf-8-unix -*-
PROG=exclude_badExits.py
SOCKS_PORT=9050
CAFILE=/etc/ssl/certs/ca-certificates.crt
ROLE=toxcore
# an example of running exclude_badExits with full debugging
# expected to take an hour or so
declare -a LARGS
LARGS=(
--log_level 10
)
# you may have a special python for installed packages
EXE=`which python3.bash`
LARGS+=(
--strict_nodes 1
--points_timeout 120
--proxy-host 127.0.0.1
--proxy-port $SOCKS_PORT
--https_cafile $CAFILE
)
if [ -f '/run/tor/control' ] ; then
LARGS+=(--proxy-ctl '/run/tor/control' )
else
LARGS+=(--proxy-ctl 9051 )
fi
ddg=duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad
# for example, whitelist the introduction points to DuckDuckGo
LARGS+=( --white_onions $ddg )
# you may need to be the tor user to read /run/tor/control
grep -q ^debian-tor /etc/group && TORU=debian-tor || {
grep -q ^tor /etc/group && TORU=tor
}
sudo -u $TORU $EXE exclude_badExits.py "${LARGS[@]}" \
2>&1|tee exclude_badExits6.log
# The DEBUG statements contain the detail of why the relay was considered bad.

View File

@ -0,0 +1,151 @@
This extends nusenu's basic idea of using the stem library to
dynamically exclude nodes that are likely to be bad by putting them
on the ExcludeNodes or ExcludeExitNodes setting of a running Tor.
* https://github.com/nusenu/noContactInfo_Exit_Excluder
* https://github.com/TheSmashy/TorExitRelayExclude
The basic idea is to exclude Exit nodes that do not have ContactInfo:
* https://github.com/nusenu/ContactInfo-Information-Sharing-Specification
That can be extended to relays that do not have an email in the contact,
or to relays that do not have ContactInfo that is verified to include them.
But there's a problem, and your Tor notice.log will tell you about it:
you could exclude the relays needed to access hidden services or mirror
directories. So we need to add to the process the concept of a whitelist.
In addition, we may have our own blacklist of nodes we want to exclude,
or use these lists for other applications like selektor.
So we make two files that are structured in YAML:
```
/etc/tor/yaml/torrc-goodnodes.yaml
GoodNodes:
Relays:
IntroductionPoints:
- NODEFINGERPRINT
...
By default all sections of the goodnodes.yaml are used as a whitelist.
/etc/tor/yaml/torrc-badnodes.yaml
BadNodes:
ExcludeExitNodes:
BadExit:
# $0000000000000000000000000000000000000007
```
That part requires [PyYAML](https://pyyaml.org/wiki/PyYAML)
https://github.com/yaml/pyyaml/ or ```ruamel```: do
```pip3 install ruamel``` or ```pip3 install PyYAML```;
the advantage of the former is that it preserves comments.
(You may have to run this as the Tor user to get RW access to
/run/tor/control, in which case the directory for the YAML files must
be group Tor writeable, and its parents group Tor RX.)
Because you don't want to exclude the introduction points to any onion
you want to connect to, ```--white_onions``` should whitelist the
introduction points to a comma sep list of onions; we fixed stem to do this:
* https://github.com/torproject/stem/issues/96
* https://gitlab.torproject.org/legacy/trac/-/issues/25417
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
```--good_contacts``` will write the contact info as a ciiss dictionary
to a YAML file. If the proof is uri-rsa, the well-known file of fingerprints
is downloaded and the fingerprints are added on a 'fps' field we create
of that fingerprint's entry of the YAML dictionary. This file is read at the
beginning of the program to start with a trust database, and only new
contact info from new relays are added to the dictionary.
Now for the final part: we lookup the Contact info of every relay
that is currently in our Tor, and check it the existence of the
well-known file that lists the fingerprints of the relays it runs.
If it fails to provide the well-know url, we assume its a bad
relay and add it to a list of nodes that goes on ```ExcludeNodes```
(not just ExcludeExitNodes```). If the Contact info is good, we add the
list of fingerprints to ```ExitNodes```, a whitelist of relays to use as exits.
```--bad_on``` We offer the users 3 levels of cleaning:
1. clean relays that have no contact ```=Empty```
2. clean relays that don't have an email in the contact (implies 1)
```=Empty,NoEmail```
3. clean relays that don't have "good' contactinfo. (implies 1)
```=Empty,NoEmail,NotGood```
The default is ```=Empty,NotGood``` ; ```NoEmail``` is inherently imperfect
in that many of the contact-as-an-email are obfuscated, but we try anyway.
To be "good" the ContactInfo must:
1. have a url for the well-defined-file to be gotten
2. must have a file that can be gotten at the URL
3. must support getting the file with a valid SSL cert from a recognized authority
4. (not in the spec but added by Python) must use a TLS SSL > v1
5. must have a fingerprint list in the file
6. must have the FP that got us the contactinfo in the fingerprint list in the file,
For usage, do ```python3 exclude_badExits.py --help`
## Usage
```
usage: exclude_badExits.py [-h] [--https_cafile HTTPS_CAFILE]
[--proxy_host PROXY_HOST] [--proxy_port PROXY_PORT]
[--proxy_ctl PROXY_CTL] [--torrc TORRC]
[--timeout TIMEOUT] [--good_nodes GOOD_NODES]
[--bad_nodes BAD_NODES] [--bad_on BAD_ON]
[--bad_contacts BAD_CONTACTS]
[--strict_nodes {0,1}] [--wait_boot WAIT_BOOT]
[--points_timeout POINTS_TIMEOUT]
[--log_level LOG_LEVEL]
[--bad_sections BAD_SECTIONS]
[--white_onions WHITE_ONIONS]
[--torrc_output TORRC_OUTPUT]
[--relays_output RELAYS_OUTPUT]
[--good_contacts GOOD_CONTACTS]
optional arguments:
-h, --help show this help message and exit
--https_cafile HTTPS_CAFILE
Certificate Authority file (in PEM)
--proxy_host PROXY_HOST, --proxy-host PROXY_HOST
proxy host
--proxy_port PROXY_PORT, --proxy-port PROXY_PORT
proxy control port
--proxy_ctl PROXY_CTL, --proxy-ctl PROXY_CTL
control socket - or port
--torrc TORRC torrc to check for suggestions
--timeout TIMEOUT proxy download connect timeout
--good_nodes GOOD_NODES
Yaml file of good info that should not be excluded
--bad_nodes BAD_NODES
Yaml file of bad nodes that should also be excluded
--bad_on BAD_ON comma sep list of conditions - Empty,NoEmail,NotGood
--bad_contacts BAD_CONTACTS
Yaml file of bad contacts that bad FPs are using
--strict_nodes {0,1} Set StrictNodes: 1 is less anonymous but more secure,
although some sites may be unreachable
--wait_boot WAIT_BOOT
Seconds to wait for Tor to booststrap
--points_timeout POINTS_TIMEOUT
Timeout for getting introduction points - must be long
>120sec. 0 means disabled looking for IPs
--log_level LOG_LEVEL
10=debug 20=info 30=warn 40=error
--bad_sections BAD_SECTIONS
sections of the badnodes.yaml to use, comma separated,
'' BROKEN
--white_onions WHITE_ONIONS
comma sep. list of onions to whitelist their
introduction points - BROKEN
--torrc_output TORRC_OUTPUT
Write the torrc configuration to a file
--relays_output RELAYS_OUTPUT
Write the download relays in json to a file
--good_contacts GOOD_CONTACTS
Write the proof data of the included nodes to a YAML
file
```

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,52 @@
# -*-mode: python; py-indent-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*-
# https://stem.torproject.org/tutorials/examples/exit_used.html
import functools
import sys
import os
import getpass
from stem import StreamStatus
from stem.control import EventType, Controller
def main():
print("Tracking requests for tor exits. Press 'enter' to end.")
print("")
if os.path.exists('/var/run/tor/control'):
controller = Controller.from_socket_file(path='/var/run/tor/control')
else:
controller = Controller.from_port(port=9051)
try:
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
stream_listener = functools.partial(stream_event, controller)
controller.add_event_listener(stream_listener, EventType.STREAM)
print('Press Enter')
input() # wait for user to press enter
except Exception as e:
print(e)
finally:
del controller
def stream_event(controller, event):
if event.status == StreamStatus.SUCCEEDED and event.circ_id:
circ = controller.get_circuit(event.circ_id)
exit_fingerprint = circ.path[-1][0]
exit_relay = controller.get_network_status(exit_fingerprint)
print("Exit relay for our connection to %s" % (event.target))
print(" address: %s:%i" % (exit_relay.address, exit_relay.or_port))
print(" fingerprint: %s" % exit_relay.fingerprint)
print(" nickname: %s" % exit_relay.nickname)
print(" locale: %s" % controller.get_info("ip-to-country/%s" % exit_relay.address, 'unknown'))
print("")
if __name__ == '__main__':
main()

View File

@ -0,0 +1,42 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
# http://vt5hknv6sblkgf22.onion/tutorials/over_the_river.html
import sys
import os
import getpass
from stem.control import Controller
from stem.connection import MissingPassword
if len(sys.argv) <= 1:
sys.argv += ['']
if os.path.exists('/run/tor/control'):
controller = Controller.from_socket_file(path='/run/tor/control')
else:
controller = Controller.from_port(port=9051)
try:
controller.authenticate()
except (Exception, MissingPassword):
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
try:
for elt in sys.argv[1:]:
desc = controller.get_hidden_service_descriptor(elt, await_result=True, timeout=None)
print(f"{desc} get_hidden_service_descriptor\n")
l = desc.introduction_points()
if l:
print(f"{elt} NO introduction points\n")
continue
print(f"{elt} introduction points are...\n")
for introduction_point in l:
print(' %s:%s => %s' % (introduction_point.address,
introduction_point.port,
introduction_point.identifier))
except Exception as e:
print(e)
finally:
del controller

View File

@ -0,0 +1,41 @@
# -*-mode: python; py-indent-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*-
# http://vt5hknv6sblkgf22.onion/tutorials/examples/list_circuits.html
import sys
import os
import getpass
from stem import CircStatus
from stem.control import Controller
# port(port = 9051)
if os.path.exists('/var/run/tor/control'):
controller = Controller.from_socket_file(path='/var/run/tor/control')
else:
controller = Controller.from_port(port=9051)
try:
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
for circ in sorted(controller.get_circuits()):
if circ.status != CircStatus.BUILT:
continue
print("")
print("Circuit %s (%s)" % (circ.id, circ.purpose))
for i, entry in enumerate(circ.path):
div = '+' if (i == len(circ.path) - 1) else '|'
fingerprint, nickname = entry
desc = controller.get_network_status(fingerprint, None)
address = desc.address if desc else 'unknown'
print(" %s- %s (%s, %s)" % (div, fingerprint, nickname, address))
except Exception as e:
print(e)
finally:
del controller

View File

@ -0,0 +1,84 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
"""
Looks for urls https://dns.google/resolve?
https://dns.google/resolve?name=domain.name&type=TXT&cd=true&do=true
and parses them to extract a magic field.
A good example of how you can parse json embedded in HTML with phantomjs.
"""
import sys
import os
from phantompy import Render
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
class LookFor(Render):
def __init__(self, app, do_print=True, do_save=False):
app.lfps = []
self._app = app
self.do_print = do_print
self.do_save = do_save
self.progress = 0
self.we_run_this_tor_relay = None
Render.__init__(self, app, do_print, do_save)
def _exit(self, val):
Render._exit(self, val)
self.percent = 100
LOG.debug(f"phantom.py: Exiting with val {val}")
i = self.uri.find('name=')
fp = self.uri[i+5:]
i = fp.find('.')
fp = fp[:i]
# threadsafe?
self._app.lfps.append(fp)
def _html_callback(self, *args):
"""print(self, QPrinter, Callable[[bool], None])"""
if type(args[0]) is str:
self._save(args[0])
i = self.ilookfor(args[0])
self._onConsoleMessage(i, "__PHANTOM_PY_SAVED__", 0 , '')
def ilookfor(self, html):
import json
marker = '<pre style="word-wrap: break-word; white-space: pre-wrap;">'
if marker not in html: return -1
i = html.find(marker) + len(marker)
html = html[i:]
assert html[0] == '{', html
i = html.find('</pre')
html = html[:i]
assert html[-1] == '}', html
LOG.debug(f"Found {len(html)} json")
o = json.loads(html)
if "Answer" not in o.keys() or type(o["Answer"]) != list:
LOG.warn(f"FAIL {self.uri}")
return 1
for elt in o["Answer"]:
assert type(elt) == dict, elt
assert 'type' in elt, elt
if elt['type'] != 16: continue
assert 'data' in elt, elt
if elt['data'] == 'we-run-this-tor-relay':
LOG.info(f"OK {self.uri}")
self.we_run_this_tor_relay = True
return 0
self.we_run_this_tor_relay = False
LOG.warn(f"BAD {self.uri}")
return 2
def _loadFinished(self, result):
LOG.debug(f"phantom.py: Loading finished {self.uri}")
self.toHtml(self._html_callback)

View File

@ -0,0 +1,42 @@
# -*-mode: python; py-indent-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*-
# https://stem.torproject.org/tutorials/examples/exit_used.html
import functools
import sys
import getpass
import os
from stem import StreamStatus
from stem.control import EventType, Controller
global LOG
import logging
LOG = logging.getLogger('app.'+'tox_factory')
def sMapaddressResolv(target, iPort=9051):
if os.path.exists('/var/run/tor/control'):
controller = Controller.from_socket_file(path='/var/run/tor/control')
else:
controller = Controller.from_port(port=iPort)
try:
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
map_dict = {"0.0.0.0": target}
map_ret = controller.map_address(map_dict)
return map_ret
except Exception as e:
LOG.exception(e)
finally:
del controller
if __name__ == '__main__':
if len(sys.argv) < 2:
target = "l2ct3xnuaiwwtoybtn46qp2av4ndxcguwupzyv6xrsmnwi647vvmwtqd"
else:
target = sys.argv[1]
print(sMapaddressResolv(target))

View File

@ -0,0 +1,21 @@
from stem.descriptor.remote import DescriptorDownloader
from stem.version import Version
downloader = DescriptorDownloader()
count, with_contact = 0, 0
print("Checking for outdated relays...")
print("")
for desc in downloader.get_server_descriptors():
if desc.tor_version < Version('0.2.3.0'):
count += 1
if desc.contact:
print(' %-15s %s' % (desc.tor_version, desc.contact.decode("utf-8", "replace")))
with_contact += 1
print("")
print("%i outdated relays found, %i had contact information" % (count, with_contact))
# http://vt5hknv6sblkgf22.onion/tutorials/examples/outdated_relays.htmlhttp://vt5hknv6sblkgf22.onion/tutorials/examples/outdated_relays.html

View File

@ -0,0 +1,97 @@
# phantompy
A simple replacement for phantomjs using PyQt.
This code is based on a brilliant idea of
[Michael Franzl](https://gist.github.com/michaelfranzl/91f0cc13c56120391b949f885643e974/raw/a0601515e7a575bc4c7d4d2a20973b29b6c6f2df/phantom.py)
that he wrote up in his blog:
* https://blog.michael.franzl.name/2017/10/16/phantomjs-alternative-write-short-pyqt-scripts-instead-phantom-py/
* https://blog.michael.franzl.name/2017/10/16/phantom-py/
## Features
* Generate a PDF screenshot of the web page after it is completely loaded.
* Optionally execute a local JavaScript file specified by the argument
```javascript-file``` after the web page is completely loaded, and before the
PDF is generated. (YMMV - it segfaults for me. )
* Generate a HTML save file screenshot of the web page after it is
completely loaded and the javascript has run.
* console.log's will be printed to stdout.
* Easily add new features by changing the source code of this script,
without compiling C++ code. For more advanced applications, consider
attaching PyQt objects/methods to WebKit's JavaScript space by using
QWebFrame::addToJavaScriptWindowObject().
If you execute an external ```javascript-file```, phantompy has no
way of knowing when that script has finished doing its work. For this
reason, the external script should execute at the end
```console.log("__PHANTOM_PY_DONE__");``` when done. This will trigger
the PDF generation or the file saving, after which phantompy will exit.
If you do not want to run any javascipt file, this trigger is provided
in the code by default.
It is important to remember that since you're just running WebKit, you can
use everything that WebKit supports, including the usual JS client
libraries, CSS, CSS @media types, etc.
Qt picks up proxies from the environment, so this will respect
```https_proxy``` or ```http_proxy``` if set.
## Dependencies
* Python3
* PyQt5 (this should work with PySide2 and PyQt6 - let us know.)
* [qasnyc](https://github.com/CabbageDevelopment/qasync) for the
standalone program ```qasync_phantompy.py```
## Standalone
A standalone program is a little tricky as PyQt PyQt5.QtWebEngineWidgets'
QWebEnginePage uses callbacks at each step of the way:
1) loading the page = ```Render.run```
2) running javascript in and on the page = ```Render._loadFinished```
3) saving the page = ```Render.toHtml and _html_callback```
4) printing the page = ```Render._print```
The steps get chained by printing special messages to the Python
renderer of the JavaScript console: ```Render. _onConsoleMessage```
So it makes it hard if you want the standalone program to work without
a GUI, or in combination with another Qt program that is responsible
for the PyQt ```app.exec``` and the exiting of the program.
We've decided to use the best of the shims that merge the Python
```asyncio``` and Qt event loops:
[qasyc](https://github.com/CabbageDevelopment/qasync). This is seen as
the successor to the sorta abandoned [quamash](https://github.com/harvimt/quamash).
The code is based on a
[comment](https://github.com/CabbageDevelopment/qasync/issues/35#issuecomment-1315060043)
by [Alex March](https://github.com/hosaka) who's excellent code helped me.
As this is my first use of ```asyncio``` and ```qasync``` I may have
introduced some errors and it may be improved on, but it works, and
it not a monolithic Qt program, so it can be used as a library.
## Usage
The standalone program is ```quash_phantompy.py```
### Arguments
```
--js_input (optional) Path and name of a JavaScript file to execute on the HTML
--html_output <html-file> (optional) Path a HTML output file to generate after JS is applied
--pdf_output <pdf-file> (optional) Path and name of PDF file to generate after JS is applied
--log_level 10=debug 20=info 30=warn 40=error
html_or_url - required argument, a http(s) URL or a path to a local file.
```
Setting ```DEBUG=1``` in the environment will give debugging messages
on ```stderr```.
## Postscript
When I think of all the trouble people went to compiling and
maintaining the tonnes of C++ code that went into
[phantomjs](https://github.com/ariya/phantomjs), I am amazed that it
can be replaced with a couple of hundred lines of Python!

View File

@ -0,0 +1,275 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 2; coding: utf-8 -*-
# https://gist.github.com/michaelfranzl/91f0cc13c56120391b949f885643e974/raw/a0601515e7a575bc4c7d4d2a20973b29b6c6f2df/phantom.py
# https://blog.michael.franzl.name/2017/10/16/phantomjs-alternative-write-short-pyqt-scripts-instead-phantom-py/
# https://blog.michael.franzl.name/2017/10/16/phantom-py/
 """
# phantom.py
Simple but fully scriptable headless QtWebKit browser using PyQt5 in Python3,
specialized in executing external JavaScript and generating PDF files. A lean
replacement for other bulky headless browser frameworks.
## Usage
If you have a display attached:
./phantom.py [--pdf_output <pdf-file>] [--js_input <javascript-file>] <url-or-html-file>
If you don't have a display attached (i.e. on a remote server), you can use
xvfb-run, or don't add --show_gui - it should work without a display.
Arguments:
[--pdf_output <pdf-file>] (optional) Path and name of PDF file to generate
[--html_output <html-file>] (optional) Path and name of HTML file to generate
[--js_input <javascript-file>] (optional) Path and name of a JavaScript file to execute
--log_level 10=debug 20=info 30=warn 40=error
<url> Can be a http(s) URL or a path to a local file
## Features
* Generate a PDF screenshot of the web page after it is completely loaded.
* Optionally execute a local JavaScript file specified by the argument
<javascript-file> after the web page is completely loaded, and before
the PDF is generated.
* console.log's will be printed to stdout.
* Easily add new features by changing the source code of this script, without
compiling C++ code. For more advanced applications, consider attaching
PyQt objects/methods to WebKit's JavaScript space by using
`QWebFrame::addToJavaScriptWindowObject()`.
If you execute an external <javascript-file>, phantom.py has no way of knowing
when that script has finished doing its work. For this reason, the external
script should execute `console.log("__PHANTOM_PY_DONE__");` when done. This will
trigger the PDF generation, after which phantom.py will exit. If no
`__PHANTOM_PY_DONE__` string is seen on the console for 10 seconds, phantom.py
will exit without doing anything. This behavior could be implemented more
elegantly without console.log's but it is the simplest solution.
It is important to remember that since you're just running WebKit, you can use
everything that WebKit supports, including the usual JS client libraries, CSS,
CSS @media types, etc.
## Dependencies
* Python3
* PyQt5
* [qasnyc](https://github.com/CabbageDevelopment/qasync) for the
standalone program ```qasnyc_phantompy.py```
* xvfb (optional for display-less machines)
Installation of dependencies in Debian Stretch is easy:
apt-get install xvfb python3-pyqt5 python3-pyqt5.qtwebkit
Finding the equivalent for other OSes is an exercise that I leave to you.
## Examples
Given the following file /tmp/test.html
<html>
<body>
<p>foo <span id="id1">foo</span> <span id="id2">foo</span></p>
</body>
<script>
document.getElementById('id1').innerHTML = "bar";
</script>
</html>
... and the following file /tmp/test.js:
document.getElementById('id2').innerHTML = "baz";
console.log("__PHANTOM_PY_DONE__");
... and running this script (without attached display) ...
xvfb-run python3 phantom.py /tmp/test.html /tmp/out.pdf /tmp/test.js
... you will get a PDF file /tmp/out.pdf with the contents "foo bar baz".
Note that the second occurrence of "foo" has been replaced by the web page's own
script, and the third occurrence of "foo" by the external JS file.
## License
Copyright 2017 Michael Karl Franzl
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import importlib
import os
import sys # noqa
from qasync import QtModuleName
from qasync.QtCore import QUrl
QPrinter = importlib.import_module(QtModuleName + ".QtPrintSupport.QPrinter", package=QtModuleName)
QWebEnginePage = importlib.import_module(QtModuleName + ".QtWebEngineWidgets.QWebEnginePage", package=QtModuleName)
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
def prepare(sdir='/tmp'):
sfile = os.path.join(sdir, 'test.js')
if not os.path.exists(sfile):
with open(sfile, 'wt') as ofd:
ofd.write("""
document.getElementById('id2').innerHTML = "baz";
console.log("__PHANTOM_PY_DONE__");
""")
LOG.debug(f"wrote {sfile} ")
sfile = os.path.join(sdir, 'test.html')
if not os.path.exists(sfile):
with open(sfile, 'wt') as ofd:
ofd.write("""
<html>
<body>
<p>foo <span id="id1">foo</span> <span id="id2">foo</span></p>
</body>
<script>
document.getElementById('id1').innerHTML = "bar";
</script>
</html>
""")
LOG.debug(f"wrote {sfile} ")
class Render(QWebEnginePage):
def __init__(self, app, do_print=False, do_save=True):
app.ldone = []
self._app = app
self.do_print = do_print
self.do_save = do_save
self.percent = 0
self.uri = None
self.jsfile = None
self.htmlfile = None
self.pdffile = None
QWebEnginePage.__init__(self)
def run(self, url, pdffile, htmlfile, jsfile):
self._app.lstart.append(id(self))
self.percent = 10
self.uri = url
self.jsfile = jsfile
self.htmlfile = htmlfile
self.pdffile = pdffile
self.outfile = pdffile or htmlfile
LOG.debug(f"phantom.py: URL={url} htmlfile={htmlfile} pdffile={pdffile} JSFILE={jsfile}")
qurl = QUrl.fromUserInput(url)
# The PDF generation only happens when the special string __PHANTOM_PY_DONE__
# is sent to console.log(). The following JS string will be executed by
# default, when no external JavaScript file is specified.
self.js_contents = "setTimeout(function() { console.log('__PHANTOM_PY_DONE__') }, 5000);"
if jsfile:
try:
with open(self.jsfile, 'rt') as f:
self.js_contents = f.read()
except Exception as e: # noqa
LOG.exception(f"error reading jsfile {self.jsfile}")
self.loadFinished.connect(self._loadFinished)
self.percent = 20
self.load(qurl)
self.javaScriptConsoleMessage = self._onConsoleMessage
LOG.debug(f"phantom.py: loading 10")
def _onConsoleMessage(self, *args):
if len(args) > 3:
level, txt, lineno, filename = args
else:
level = 1
txt, lineno, filename = args
LOG.debug(f"CONSOLE {lineno} {txt} {filename}")
if "__PHANTOM_PY_DONE__" in txt:
self.percent = 40
# If we get this magic string, it means that the external JS is done
if self.do_save:
self.toHtml(self._html_callback)
return
# drop through
txt = "__PHANTOM_PY_SAVED__"
if "__PHANTOM_PY_SAVED__" in txt:
self.percent = 50
if self.do_print:
self._print()
return
txt = "__PHANTOM_PY_PRINTED__"
if "__PHANTOM_PY_PRINTED__" in txt:
self.percent = 60
self._exit(level)
def _loadFinished(self, result):
# RenderProcessTerminationStatus ?
self.percent = 30
LOG.info(f"phantom.py: _loadFinished {result} {self.percent}")
LOG.debug(f"phantom.py: Evaluating JS from {self.jsfile}")
self.runJavaScript("document.documentElement.contentEditable=true")
self.runJavaScript(self.js_contents)
def _html_callback(self, *args):
"""print(self, QPrinter, Callable[[bool], None])"""
if type(args[0]) is str:
self._save(args[0])
self._onConsoleMessage(0, "__PHANTOM_PY_SAVED__", 0, '')
def _save(self, html):
sfile = self.htmlfile
# CompleteHtmlSaveFormat SingleHtmlSaveFormat MimeHtmlSaveFormat
with open(sfile, 'wt') as ofd:
ofd.write(html)
LOG.debug(f"Saved {sfile}")
def _printer_callback(self, *args):
"""print(self, QPrinter, Callable[[bool], None])"""
if args[0] is False:
i = 1
else:
i = 0
self._onConsoleMessage(i, "__PHANTOM_PY_PRINTED__", 0, '')
def _print(self):
sfile = self.pdffile
printer = QPrinter()
printer.setPageMargins(10, 10, 10, 10, QPrinter.Millimeter)
printer.setPaperSize(QPrinter.A4)
printer.setCreator("phantom.py by Michael Karl Franzl")
printer.setOutputFormat(QPrinter.PdfFormat)
printer.setOutputFileName(sfile)
self.print(printer, self._printer_callback)
LOG.debug("phantom.py: Printed")
def _exit(self, val):
self.percent = 100
LOG.debug(f"phantom.py: Exiting with val {val}")
# threadsafe?
self._app.ldone.append(self.uri)

View File

@ -0,0 +1,128 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
import asyncio
import os
import sys
# let qasync figure out what Qt we are using - we dont care
from qasync import QApplication, QEventLoop, QtWidgets
from phantompy import Render
# if you want an example of looking for things in downloaded HTML:
# from lookupdns import LookFor as Render
from support_phantompy import omain_argparser, vsetup_logging
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
try:
import shtab
except:
shtab = None
class Widget(QtWidgets.QWidget):
def __init__(self):
QtWidgets.QWidget.__init__(self)
self._label = QtWidgets.QLabel()
box = QtWidgets.QHBoxLayout()
self.setLayout(box)
box.addWidget(self._label)
self.progress = QtWidgets.QProgressBar()
self.progress.setRange(0, 99)
box.addWidget(self.progress)
def update(self, text):
i = len(asyncio.all_tasks())
self._label.setText(str(i))
self.progress.setValue(int(text))
class ContextManager:
def __init__(self) -> None:
self._seconds = 0
async def __aenter__(self):
LOG.debug("ContextManager enter")
return self
async def __aexit__(self, *args):
LOG.debug("ContextManager exit")
async def tick(self):
await asyncio.sleep(1)
self._seconds += 1
return self._seconds
async def main(widget, app, ilen):
LOG.debug("Task started")
try:
async with ContextManager() as ctx:
for i in range(1, 120):
seconds = await ctx.tick()
if widget:
widget.update(str(i))
if len(app.ldone) == ilen:
LOG.info(f"Finished with {app.ldone}")
print('\n'.join(app.ldone))
app.exit()
# raise asyncio.CancelledError
return
LOG.debug(f"{app.ldone} {seconds}")
except asyncio.CancelledError as ex: # noqa
LOG.debug("Task cancelled")
def iMain(largs):
parser = omain_argparser()
if shtab:
shtab.add_argument_to(parser, ["-s", "--print-completion"]) # magic!
oargs = parser.parse_args(largs)
bgui = oargs.show_gui
try:
d = int(os.environ.get('DEBUG', 0))
if d > 0:
oargs.log_level = 10
vsetup_logging(oargs.log_level, logfile='', stream=sys.stderr)
except: pass
app = QApplication([])
app.lstart = []
if bgui:
widget = Widget()
widget._app = app
widget.show()
else:
widget = None
loop = QEventLoop(app)
asyncio.set_event_loop(loop)
url = oargs.html_url
htmlfile = oargs.html_output
pdffile = oargs.html_output
jsfile = oargs.js_input
# run only starts the url loading
r = Render(app,
do_print=True if pdffile else False,
do_save=True if htmlfile else False)
uri = url.strip()
r.run(uri, pdffile, htmlfile, jsfile)
LOG.debug(f"{r.percent} {app.lstart}")
LOG.info(f"queued {len(app.lstart)} urls")
task = loop.create_task(main(widget, app, 1))
loop.run_forever()
# cancel remaining tasks and wait for them to complete
task.cancel()
tasks = asyncio.all_tasks()
loop.run_until_complete(asyncio.gather(*tasks))
if __name__ == '__main__':
iMain(sys.argv[1:])

View File

@ -0,0 +1,140 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
import sys
import os
import atexit
import traceback
import functools
import asyncio
import time
import qasync
import threading
from PyQt5.QtWidgets import (QProgressBar, QWidget, QVBoxLayout)
# from PySide2.QtWidgets import QApplication, QProgressBar
from qasync import QEventLoop, QThreadExecutor
from qasync import asyncSlot, asyncClose, QApplication
from phantompy import Render
from lookupdns import LookFor
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
class MainWindow(QWidget):
"""Main window."""
def __init__(self):
super().__init__()
self.setLayout(QVBoxLayout())
self.progress = QProgressBar()
self.progress.setRange(0, 99)
self.layout().addWidget(self.progress)
async def main(app):
def close_future(future, loop):
loop.call_later(10, future.cancel)
future.cancel()
loop = asyncio.get_running_loop()
future = asyncio.Future()
app.ldone = []
getattr(app, "aboutToQuit").connect(
functools.partial(close_future, future, loop)
)
if False:
progress = QProgressBar()
progress.setRange(0, 99)
progress.show()
else:
mw = MainWindow()
progress = mw.progress
mw.show()
# LOG.info(f"calling first_50 {r}")
# await first_50(progress, r)
LOG.info(f"calling last_50 {r}")
o = QThreadExecutor(max_workers=1)
app.o = o
with o as executor:
await loop.run_in_executor(executor, functools.partial(last_50, progress, sys.argv[1:], app), loop)
LOG.info(f" {dir(o)}")
LOG.info(f"awaiting {future}")
await future
return True
async def first_50(progress, r=None):
progress.setValue(5)
LOG.info(f"first_50 {r}")
if r is not None:
# loop = asyncio.get_running_loop()
# LOG.info(f"first_50.r.run {r}")
# loop.call_soon_threadsafe(r.run, r.url, r.outfile, r.jsfile)
# r.run( r.url, r.outfile, r.jsfile)
for i in range(50):
# LOG.info(f"first_50 {r.progress} {i}")
# if r.progress >= 100: break
# progress.setValue(max(r.progress,i))
progress.setValue(i)
await asyncio.sleep(.1)
return
for i in range(50):
LOG.info(f"first_50 {r} {i}")
loop.call_soon_threadsafe(progress.setValue, i)
time.sleep(1)
def last_50(progress, largs, app, loop):
url = largs[0]
outfile = largs[1]
jsfile = largs[2] if len(largs) > 2 else None
r = Render(app, do_print=False, do_save=True)
uri = url.strip()
loop.call_soon_threadsafe(r.run, uri, outfile, jsfile)
time.sleep(1)
for i in range(50, 100):
j = len(app.ldone) # r.progress
if j == 100:
LOG.info(f"last_50 None {i} {j}")
else:
LOG.debug(f"last_50 None {i} {j}")
loop.call_soon_threadsafe(progress.setValue, i)
time.sleep(1)
if __name__ == '__main__':
url = 'https://dns.google/resolve?name=6D6EC2A2E2ED8BFF2D4834F8D669D82FC2A9FA8D.for-privacy.net&type=TXT&cd=true&do=true'
outfile = '/tmp/test1.pdf'
jsfile = '/tmp/test1.js'
from exclude_badExits import vsetup_logging
vsetup_logging(10)
app = QApplication([])
#?
loop = qasync.QEventLoop(app)
#NOT loop = asyncio.get_event_loop()
asyncio._set_running_loop(loop)
asyncio.events._set_running_loop(loop)
r = Render(app, do_print=False, do_save=True)
#loop.call_soon_threadsafe(r.run, url, outfile, jsfile)
r.run(url, outfile, jsfile)
app.rs = [r]
for i in range(20):
for elt in app.rs:
print (elt.percent)
time.sleep(2)
try:
qasync.run(main(app))
except asyncio.exceptions.CancelledError:
sys.exit(0)
except RuntimeError as e:
LOG.debug('Fixme')
sys.exit(0)
except KeyboardInterrupt:
sys.exit(0)
else:
val = 0
sys.exit(val)

View File

@ -0,0 +1,49 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
import sys
import os
import traceback
from phantompy import Render
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
import sys
import asyncio
import time
from PyQt5.QtWidgets import QApplication, QProgressBar
from quamash import QEventLoop, QThreadExecutor
app = QApplication(sys.argv)
loop = QEventLoop(app)
asyncio.set_event_loop(loop) # NEW must set the event loop
asyncio.events._set_running_loop(loop)
progress = QProgressBar()
progress.setRange(0, 99)
progress.show()
async def master():
await first_50()
with QThreadExecutor(1) as executor:
await loop.run_in_executor(exec, last_50)
# TODO announce completion?
async def first_50():
for i in range(50):
progress.setValue(i)
await asyncio.sleep(.1)
def last_50():
for i in range(50,100):
loop.call_soon_threadsafe(progress.setValue, i)
time.sleep(.1)
with loop: ## context manager calls .close() when loop completes, and releases all resources
loop.run_until_complete(master())

View File

@ -0,0 +1,137 @@
# -*-mode: python; py-indent-offset: 2; indent-tabs-mode: nil; coding: utf-8-unix -*-
# http://vt5hknv6sblkgf22.onion/tutorials/examples/relay_connections.html
import argparse
import collections
import time
import stem.connection
import stem.util.system
import stem.util.str_tools
from stem.control import Listener
from stem.control import Controller
from stem.util.connection import get_connections, port_usage, is_valid_ipv4_address
HEADER_LINE = " {version} uptime: {uptime} flags: {flags}\n"
DIV = '+%s+%s+%s+' % ('-' * 30, '-' * 6, '-' * 6)
COLUMN = '| %-28s | %4s | %4s |'
INBOUND_ORPORT = 'Inbound to our ORPort'
INBOUND_DIRPORT = 'Inbound to our DirPort'
INBOUND_CONTROLPORT = 'Inbound to our ControlPort'
OUTBOUND_ORPORT = 'Outbound to a relay'
OUTBOUND_EXIT = 'Outbound exit traffic'
OUTBOUND_UNKNOWN = 'Outbound uncategorized'
def main(controller):
parser = argparse.ArgumentParser()
parser.add_argument("--ctrlport", help="default: 9051 or 9151")
parser.add_argument("--resolver", help="default: autodetected")
args = parser.parse_args()
control_port = int(args.ctrlport) if args.ctrlport else 'default'
controller = stem.connection.connect(control_port = ('127.0.0.1', control_port))
if not controller:
return
desc = controller.get_network_status(default = None)
pid = controller.get_pid()
version = str(controller.get_version()).split()[0],
uptime = stem.util.str_tools.short_time_label(time.time() - stem.util.system.start_time(pid))
print(HEADER_LINE.format(
version=version,
uptime=uptime,
flags = ', '.join(desc.flags if desc else ['none']),
))
policy = controller.get_exit_policy()
relays = {} # address => [orports...]
for desc in controller.get_network_statuses():
relays.setdefault(desc.address, []).append(desc.or_port)
# categorize our connections
categories = collections.OrderedDict((
(INBOUND_ORPORT, []),
(INBOUND_DIRPORT, []),
(INBOUND_CONTROLPORT, []),
(OUTBOUND_ORPORT, []),
(OUTBOUND_EXIT, []),
(OUTBOUND_UNKNOWN, []),
))
exit_connections = {} # port => [connections]
for conn in get_connections(resolver = args.resolver, process_pid = pid):
if conn.protocol == 'udp':
continue
if conn.local_port in controller.get_ports(Listener.OR, []):
categories[INBOUND_ORPORT].append(conn)
elif conn.local_port in controller.get_ports(Listener.DIR, []):
categories[INBOUND_DIRPORT].append(conn)
elif conn.local_port in controller.get_ports(Listener.CONTROL, []):
categories[INBOUND_CONTROLPORT].append(conn)
elif conn.remote_port in relays.get(conn.remote_address, []):
categories[OUTBOUND_ORPORT].append(conn)
elif policy.can_exit_to(conn.remote_address, conn.remote_port):
categories[OUTBOUND_EXIT].append(conn)
exit_connections.setdefault(conn.remote_port, []).append(conn)
else:
categories[OUTBOUND_UNKNOWN].append(conn)
print(DIV)
print(COLUMN % ('Type', 'IPv4', 'IPv6'))
print(DIV)
total_ipv4, total_ipv6 = 0, 0
for label, connections in categories.items():
if len(connections) == 0:
continue
ipv4_count = len([conn for conn in connections if is_valid_ipv4_address(conn.remote_address)])
ipv6_count = len(connections) - ipv4_count
total_ipv4, total_ipv6 = total_ipv4 + ipv4_count, total_ipv6 + ipv6_count
print(COLUMN % (label, ipv4_count, ipv6_count))
print(DIV)
print(COLUMN % ('Total', total_ipv4, total_ipv6))
print(DIV)
print('')
if exit_connections:
print(DIV)
print(COLUMN % ('Exit Port', 'IPv4', 'IPv6'))
print(DIV)
total_ipv4, total_ipv6 = 0, 0
for port in sorted(exit_connections):
connections = exit_connections[port]
ipv4_count = len([conn for conn in connections if is_valid_ipv4_address(conn.remote_address)])
ipv6_count = len(connections) - ipv4_count
total_ipv4, total_ipv6 = total_ipv4 + ipv4_count, total_ipv6 + ipv6_count
usage = port_usage(port)
label = '%s (%s)' % (port, usage) if usage else port
print(COLUMN % (label, ipv4_count, ipv6_count))
print(DIV)
print(COLUMN % ('Total', total_ipv4, total_ipv6))
print(DIV)
print('')
if __name__ == '__main__':
with Controller.from_socket_file(path='/var/run/tor/control') as controller:
main(controller)

View File

@ -0,0 +1,58 @@
[metadata]
classifiers =
License :: OSI Approved
License :: OSI Approved :: BSD 1-clause
Intended Audience :: Web Developers
Operating System :: Microsoft :: Windows
Operating System :: POSIX :: BSD :: FreeBSD
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: Implementation :: CPython
Framework :: AsyncIO
[options]
zip_safe = false
python_requires = ~=3.6
packages = find:
include_package_data = false
install_requires =
qasync
cryptography
rsa
stem
[options.entry_points]
console_scripts =
phantompy = phantompy.__main__:iMain
[easy_install]
zip_ok = false
[flake8]
jobs = 1
max-line-length = 88
ignore =
E111
E114
E128
E225
E225
E261
E302
E305
E402
E501
E502
E541
E701
E704
E722
E741
F508
F541
W503

View File

@ -0,0 +1,89 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
# https://stackoverflow.com/questions/5239797/python-smtplib-proxy-support
# https://stackoverflow.com/questions/19642726/testing-python-smtp-email-service
import socket
import smtplib
import socks
class ProxySMTP(smtplib.SMTP):
def __init__(self, host='', port=0, local_hostname=None,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, proxy_addr=None, proxy_port=None):
"""Initialize a new instance.
If specified, `host' is the name of the remote host to which to
connect. If specified, `port' specifies the port to which to connect.
By default, smtplib.SMTP_PORT is used. If a host is specified the
connect method is called, and if it returns anything other than a
success code an SMTPConnectError is raised. If specified,
`local_hostname` is used as the FQDN of the local host in the HELO/EHLO
command. Otherwise, the local hostname is found using
socket.getfqdn(). The `source_address` parameter takes a 2-tuple (host,
port) for the socket to bind to as its source address before
connecting. If the host is '' and port is 0, the OS default behavior
will be used.
"""
self._host = host
self.timeout = timeout
self.esmtp_features = {}
self.command_encoding = 'ascii'
self.source_address = source_address
self.proxy_addr = proxy_addr
self.proxy_port = proxy_port
if host:
(code, msg) = self.connect(host, port)
if code != 220:
self.close()
raise smtplib.SMTPConnectError(code, msg)
if local_hostname is not None:
self.local_hostname = local_hostname
else:
# RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
# if that can't be calculated, that we should use a domain literal
# instead (essentially an encoded IP address like [A.B.C.D]).
fqdn = socket.getfqdn()
if '.' in fqdn:
self.local_hostname = fqdn
else:
# We can't find an fqdn hostname, so use a domain literal
addr = '127.0.0.1'
try:
addr = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
pass
self.local_hostname = '[%s]' % addr
def _get_socket(self, host, port, timeout):
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
if self.debuglevel > 0:
self._print_debug('connect: to', (host, port), self.source_address)
return socks.create_connection((host, port),
proxy_type=socks.PROXY_TYPE_SOCKS5,
timeout=timeout,
proxy_addr=self.proxy_addr,
proxy_port=self.proxy_port)
# And to use:
if __name__ == '__main__':
user_email, user_pass = 'foo', 'bar'
email_server = ProxySMTP('smtp.gmail.com', 587,
proxy_addr='127.0.0.1',
proxy_port=9050,
timeout=20)
email_server.starttls()
try:
email_server.login(user_email, user_pass)
except smtplib.SMTPAuthenticationError as e:
if len(e.args) > 1:
code = e.args[0]
if code = 535:
# 5.7.8 Username and Password not accepted
pass
raise
email_server.sendmail(user_email, recipient_list, msg.as_string())
email_server.quit()

View File

@ -0,0 +1,445 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
import getpass
import os
import re
import select
import shutil
import socket
import sys
import time
if False:
import cepa as stem
from cepa.connection import MissingPassword
from cepa.control import Controller
from cepa.util.tor_tools import is_valid_fingerprint
else:
import stem
from stem.connection import MissingPassword
from stem.control import Controller
from stem.util.tor_tools import is_valid_fingerprint
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
bHAVE_TORR = shutil.which('tor-resolve')
# we check these each time but we got them by sorting bad relays
# in the wild we'll keep a copy here so we can avoid restesting
yKNOWN_NODNS = """
---
- for-privacy.net
- backup.spekadyon.org
- verification-for-nusenu.net
- prsv.ch
- ezyn.de
- dfri.se
- dtf.contact
- galtland.network
- dotsrc.org
- nicdex.com
- unzane.com
- a9.wtf
- tor.skankhunt42.pw
- tor-exit-3.aa78i2efsewr0neeknk.xyz
- privacysvcs.net
- apt96.com
- mkg20001.io
- kryptonit.org
- sebastian-elisa-pfeifer.eu
- nx42.de
- www.defcon.org
- 0x0.is
- transliberation.today
- tor-exit-2.aa78i2efsewr0neeknk.xyz
- interfesse.net
- axims.net
- a9.wtf
- heraldonion.org
- linkspartei.org
- pineapple.cx
- privacylayer.xyz
- prsv.ch
- thingtohide.nl
- tor-exit-2.aa78i2efsewr0neeknk.xyz
- tor-exit-3.aa78i2efsewr0neeknk.xyz
- tor.dlecan.com
- tuxli.org
- verification-for-nusenu.net
"""
# - 0x0.is
# - aklad5.com
# - artikel5ev.de
# - arvanode.net
# - dodo.pm
# - erjan.net
# - galtland.network
# - lonet.sh
# - moneneis.de
# - olonet.sh
# - or-exit-2.aa78i2efsewr0neeknk.xyz
# - or.wowplanet.de
# - ormycloud.org
# - plied-privacy.net
# - rivacysvcs.net
# - redacted.org
# - rofl.cat
# - sv.ch
# - tikel10.org
# - tor.wowplanet.de
# - torix-relays.org
# - tse.com
# - w.digidow.eu
# - w.cccs.de
def oMakeController(sSock='', port=9051):
import getpass
if sSock and os.path.exists(sSock):
controller = Controller.from_socket_file(path=sSock)
else:
controller = Controller.from_port(port=port)
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
return controller
oSTEM_CONTROLER = None
def oGetStemController(log_level=10, sock_or_pair='/run/tor/control'):
global oSTEM_CONTROLER
if oSTEM_CONTROLER: return oSTEM_CONTROLER
import stem.util.log
# stem.util.log.Runlevel = 'DEBUG' = 20 # log_level
if os.path.exists(sock_or_pair):
LOG.info(f"controller from socket {sock_or_pair}")
controller = Controller.from_socket_file(path=sock_or_pair)
else:
if type(sock_or_pair) == int:
port = sock_or_pair
elif ':' in sock_or_pair:
port = sock_or_pair.split(':')[1]
else:
port = sock_or_pair
try:
port = int(port)
except: port = 9051
LOG.info(f"controller from port {port}")
# stem.SocketError
controller = Controller.from_port(port=port)
try:
controller.authenticate()
except (Exception, MissingPassword):
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
oSTEM_CONTROLER = controller
LOG.debug(f"{controller}")
return oSTEM_CONTROLER
def bAreWeConnected():
# FixMe: Linux only
sFile = f"/proc/{os.getpid()}/net/route"
if not os.path.isfile(sFile): return None
i = 0
for elt in open(sFile, "r").readlines():
if elt.startswith('Iface'): continue
if elt.startswith('lo'): continue
i += 1
return i > 0
def sMapaddressResolv(target, iPort=9051, log_level=10):
if not stem:
LOG.warn('please install the stem Python package')
return ''
try:
controller = oGetStemController(log_level=log_level)
map_dict = {"0.0.0.0": target}
map_ret = controller.map_address(map_dict)
return map_ret
except Exception as e:
LOG.exception(e)
return ''
def vwait_for_controller(controller, wait_boot=10):
if bAreWeConnected() is False:
raise SystemExit("we are not connected")
percent = i = 0
# You can call this while boostrapping
while percent < 100 and i < wait_boot:
bootstrap_status = controller.get_info("status/bootstrap-phase")
progress_percent = re.match('.* PROGRESS=([0-9]+).*', bootstrap_status)
percent = int(progress_percent.group(1))
LOG.info(f"Bootstrapping {percent}%")
time.sleep(5)
i += 5
def bin_to_hex(raw_id, length=None):
if length is None: length = len(raw_id)
res = ''.join('{:02x}'.format(raw_id[i]) for i in range(length))
return res.upper()
def lIntroductionPoints(controller=None, lOnions=[], itimeout=120, log_level=10):
"""now working !!! stem 1.8.x timeout must be huge >120
'Provides the descriptor for a hidden service. The **address** is the
'.onion' address of the hidden service '
What about Services?
"""
try:
from cryptography.utils import int_from_bytes
except ImportError:
import cryptography.utils
# guessing - not in the current cryptography but stem expects it
def int_from_bytes(**args): return int.to_bytes(*args)
cryptography.utils.int_from_bytes = int_from_bytes
# this will fai if the trick above didnt work
from stem.prereq import is_crypto_available
is_crypto_available(ed25519=True)
from queue import Empty
from stem import Timeout
from stem.client.datatype import LinkByFingerprint
from stem.descriptor.hidden_service import HiddenServiceDescriptorV3
if type(lOnions) not in [set, tuple, list]:
lOnions = list(lOnions)
if controller is None:
controller = oGetStemController(log_level=log_level)
l = []
for elt in lOnions:
LOG.info(f"controller.get_hidden_service_descriptor {elt}")
try:
desc = controller.get_hidden_service_descriptor(elt,
await_result=True,
timeout=itimeout)
# LOG.log(40, f"{dir(desc)} get_hidden_service_descriptor")
# timeouts 20 sec
# mistakenly a HSv2 descriptor
hs_address = HiddenServiceDescriptorV3.from_str(str(desc)) # reparse as HSv3
oInnerLayer = hs_address.decrypt(elt)
# LOG.log(40, f"{dir(oInnerLayer)}")
# IntroductionPointV3
n = oInnerLayer.introduction_points
if not n:
LOG.warn(f"NO introduction points for {elt}")
continue
LOG.info(f"{elt} {len(n)} introduction points")
lp = []
for introduction_point in n:
for linkspecifier in introduction_point.link_specifiers:
if isinstance(linkspecifier, LinkByFingerprint):
# LOG.log(40, f"Getting fingerprint for {linkspecifier}")
if hasattr(linkspecifier, 'fingerprint'):
assert len(linkspecifier.value) == 20
lp += [bin_to_hex(linkspecifier.value)]
LOG.info(f"{len(lp)} introduction points for {elt}")
l += lp
except (Empty, Timeout,) as e: # noqa
LOG.warn(f"Timed out getting introduction points for {elt}")
continue
except Exception as e:
LOG.exception(e)
return l
def zResolveDomain(domain):
try:
ip = sTorResolve(domain)
except Exception as e: # noqa
ip = ''
if ip == '':
try:
lpair = getaddrinfo(domain, 443)
except Exception as e:
LOG.warn(f"{e}")
lpair = None
if lpair is None:
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
return ''
ip = lpair[0]
return ip
def sTorResolve(target,
verbose=False,
sHost='127.0.0.1',
iPort=9050,
SOCK_TIMEOUT_SECONDS=10.0,
SOCK_TIMEOUT_TRIES=3,
):
MAX_INFO_RESPONSE_PACKET_LENGTH = 8
if '@' in target:
LOG.warn(f"sTorResolve failed invalid hostname {target}")
return ''
target = target.strip('/')
seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00"
seb += bytes(target, 'US-ASCII') + b"\x00"
assert len(seb) == 10 + len(target), str(len(seb)) + repr(seb)
# LOG.debug(f"0 Sending {len(seb)} to The TOR proxy {seb}")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((sHost, iPort))
sock.settimeout(SOCK_TIMEOUT_SECONDS)
oRet = sock.sendall(seb) # noqa
i = 0
data = ''
while i < SOCK_TIMEOUT_TRIES:
i += 1
time.sleep(3)
lReady = select.select([sock.fileno()], [], [],
SOCK_TIMEOUT_SECONDS)
if not lReady[0]: continue
try:
flags=socket.MSG_WAITALL
data = sock.recv(MAX_INFO_RESPONSE_PACKET_LENGTH, flags)
except socket.timeout:
LOG.warn(f"4 The TOR proxy {(sHost, iPort)}" \
+" didnt reply in " + str(SOCK_TIMEOUT_SECONDS) + " sec."
+" #" +str(i))
except Exception as e:
LOG.error("4 The TOR proxy " \
+repr((sHost, iPort)) \
+" errored with " + str(e)
+" #" +str(i))
sock.close()
return ''
else:
if len(data) > 0: break
if len(data) == 0:
if i > SOCK_TIMEOUT_TRIES:
sLabel = "5 No reply #"
else:
sLabel = "5 No data #"
LOG.warn(f"sTorResolve: {sLabel} {i} on {sHost}:{iPort}")
sock.close()
return ''
assert len(data) >= 8
packet_sf = data[1]
if packet_sf == 90:
# , "%d" % packet_sf
assert f"{packet_sf}" == "90", f"packet_sf = {packet_sf}"
return f"{data[4]}.{data[5]}.{data[6]}.{data[7]}"
else:
# 91
LOG.warn(f"tor-resolve failed for {target} on {sHost}:{iPort}")
os.system(f"tor-resolve -4 {target} > /tmp/e 2>/dev/null")
# os.system("strace tor-resolve -4 "+target+" 2>&1|grep '^sen\|^rec'")
return ''
def getaddrinfo(sHost, sPort):
# do this the explicit way = Ive seen the compact connect fail
# >>> sHost, sPort = 'l27.0.0.1', 33446
# >>> sock.connect((sHost, sPort))
# socket.gaierror: [Errno -2] Name or service not known
try:
lElts = socket.getaddrinfo(sHost, int(sPort), socket.AF_INET)
lElts = list(filter(lambda elt: elt[1] == socket.SOCK_DGRAM, lElts))
assert len(lElts) == 1, repr(lElts)
lPair = lElts[0][-1]
assert len(lPair) == 2, repr(lPair)
assert type(lPair[1]) == int, repr(lPair)
except (socket.gaierror, OSError, BaseException) as e:
LOG.error(e)
return None
return lPair
def icheck_torrc(sFile, oArgs):
l = open(sFile, 'rt').readlines()
a = {}
for elt in l:
elt = elt.strip()
if not elt or ' ' not in elt: continue
(k, v,) = elt.split(' ', 1)
a[k] = v
keys = a
if 'HashedControlPassword' not in keys:
LOG.info('Add HashedControlPassword for security')
print('run: tor --hashcontrolpassword <TopSecretWord>')
if 'ExcludeExitNodes' in keys:
elt = 'BadNodes.ExcludeExitNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'GuardNodes' in keys:
elt = 'GoodNodes.GuardNodes'
LOG.warn(f"Remove GuardNodes and move then to {oArgs.good_nodes}")
print(f"move to the {elt} section as a list")
if 'ExcludeNodes' in keys:
elt = 'BadNodes.ExcludeNodes.BadExit'
LOG.warn(f"Remove ExcludeNodes and move then to {oArgs.bad_nodes}")
print(f"move to the {elt} section as a list")
if 'ControlSocket' not in keys and os.path.exists('/run/tor/control'):
LOG.info('Add ControlSocket /run/tor/control for us')
print('ControlSocket /run/tor/control GroupWritable RelaxDirModeCheck')
if 'UseMicrodescriptors' not in keys or keys['UseMicrodescriptors'] != '1':
LOG.info('Add UseMicrodescriptors 0 for us')
print('UseMicrodescriptors 0')
if 'AutomapHostsSuffixes' not in keys:
LOG.info('Add AutomapHostsSuffixes for onions')
print('AutomapHostsSuffixes .exit,.onion')
if 'AutoMapHostsOnResolve' not in keys:
LOG.info('Add AutoMapHostsOnResolve for onions')
print('AutoMapHostsOnResolve 1')
if 'VirtualAddrNetworkIPv4' not in keys:
LOG.info('Add VirtualAddrNetworkIPv4 for onions')
print('VirtualAddrNetworkIPv4 172.16.0.0/12')
return 0
def lExitExcluder(oArgs, iPort=9051, log_level=10):
"""
https://raw.githubusercontent.com/nusenu/noContactInfo_Exit_Excluder/main/exclude_noContactInfo_Exits.py
"""
if not stem:
LOG.warn('please install the stem Python package')
return ''
LOG.debug('lExcludeExitNodes')
try:
controller = oGetStemController(log_level=log_level)
# generator
relays = controller.get_server_descriptors()
except Exception as e:
LOG.error(f'Failed to get relay descriptors {e}')
return None
if controller.is_set('ExcludeExitNodes'):
LOG.info('ExcludeExitNodes is in use already.')
return None
exit_excludelist=[]
LOG.debug("Excluded exit relays:")
for relay in relays:
if relay.exit_policy.is_exiting_allowed() and not relay.contact:
if is_valid_fingerprint(relay.fingerprint):
exit_excludelist.append(relay.fingerprint)
LOG.debug("https://metrics.torproject.org/rs.html#details/%s" % relay.fingerprint)
else:
LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint)
try:
controller.set_conf('ExcludeExitNodes', exit_excludelist)
LOG.info('Excluded a total of %s exit relays without ContactInfo from the exit position.' % len(exit_excludelist))
except Exception as e:
LOG.exception('ExcludeExitNodes ' +str(e))
return exit_excludelist
if __name__ == '__main__':
target = 'duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad'
controller = oGetStemController(log_level=10)
lIntroductionPoints(controller, [target], itimeout=120)

View File

@ -0,0 +1,48 @@
#!/usr/local/bin/python3.sh
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*
import argparse
import os
import sys
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
def omain_argparser(_=None):
try:
from OpenSSL import SSL
lCAfs = SSL._CERTIFICATE_FILE_LOCATIONS
except:
lCAfs = []
CAfs = []
for elt in lCAfs:
if os.path.exists(elt):
CAfs.append(elt)
if not CAfs:
CAfs = ['']
parser = argparse.ArgumentParser(add_help=True,
epilog=__doc__)
parser.add_argument('--https_cafile', type=str,
help="Certificate Authority file (in PEM) (unused)",
default=CAfs[0])
parser.add_argument('--log_level', type=int, default=20,
help="10=debug 20=info 30=warn 40=error")
parser.add_argument('--js_input', type=str, default='',
help="Operate on the HTML file with javascript")
parser.add_argument('--html_output', type=str, default='',
help="Write loaded and javascripted result to a HTML file")
parser.add_argument('--pdf_output', type=str, default='',
help="Write loaded and javascripted result to a PDF file")
parser.add_argument('--show_gui', type=bool, default=False, store_action=True,
help="show a progress meter that doesn't work")
parser.add_argument('html_url', type=str, nargs='?',
required=True,
help='html file or url')
return parser

View File

@ -0,0 +1,37 @@
#!/usr/bin/python3 -u
## Copyright (C) 2012 - 2020 ENCRYPTED SUPPORT LP <adrelanos@riseup.net>
## See the file COPYING for copying conditions.
import sys
from stem.connection import connect
import re
controller = connect()
if not controller:
sys.exit(255)
bootstrap_status = controller.get_info("status/bootstrap-phase")
## Possible answer, if network cable has been removed:
## 250-status/bootstrap-phase=WARN BOOTSTRAP PROGRESS=80 TAG=conn_or SUMMARY="Connecting to the Tor network" WARNING="No route to host" REASON=NOROUTE COUNT=26 RECOMMENDATION=warn
## Possible answer:
## 250-status/bootstrap-phase=NOTICE BOOTSTRAP PROGRESS=85 TAG=handshake_or SUMMARY="Finishing handshake with first hop"
## Possible answer, when done:
## 250-status/bootstrap-phase=NOTICE BOOTSTRAP PROGRESS=100 TAG=done SUMMARY="Done"
## TODO: parse the messages above.
## 0
print(format(bootstrap_status))
progress_percent = re.match('.* PROGRESS=([0-9]+).*', bootstrap_status)
exit_code = int(progress_percent.group(1))
controller.close()
sys.exit(exit_code)

View File

@ -0,0 +1,613 @@
#!/usr/bin/env python3
"""
Tor Contact Info Parser - A tool/Python Class for parsing Tor ContactInfo Information Sharing v2 specification contacts
Written by Eran Sandler (https://twitter.com/erans) (C) 2018
Turned into a proper command-line tool with sub-commands and flags by @Someguy123 at Privex Inc. (C) 2021
(https://www.privex.io) (https://github.com/PrivexInc)
This is a parser for the Tor ContactInfo Information Sharing Specification v2 (https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/).
The parser can parse the ContactInfo field of Tor relays based on the specification.
Official Repo: https://github.com/erans/torcontactinfoparser
Privex Fork: https://github.com/Privex/torcontactinfoparser
Released under the MIT License.
"""
import argparse
import os
import re
import sys
import json
import requests
import textwrap
try:
from rich import print as rprint
HAS_RICH = True
except ImportError:
def rprint(value='', *args, **kwargs):
if value not in [None, False, True] and isinstance(value, (dict, list, set, tuple)):
value = json.dumps(value, indent=4)
return print(value, *args, **kwargs)
# rprint = print
HAS_RICH = False
global LOG
import logging
import warnings
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
class TorContactInfoParser(object):
email_regex = "^[a-zA-Z0-9.!#$%&*+/=?^_`{|}~-]+@[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)*$"
def _parse_string_value(self, value, min_length, max_length, valid_chars, raise_exception=False, field_name=None, deobfuscate_email=False):
value_length = len(value)
if value_length < min_length:
if raise_exception:
raise ValueError("value of field '{0}' is too short".format(field_name))
return None
if value_length > max_length:
if raise_exception:
raise ValueError("value of field '{0}' is too long".format(field_name))
return None
if valid_chars != "*":
m = re.search(valid_chars, value)
if not m:
if raise_exception:
raise ValueError("value of field '{0}' doesn't match valid chars restrictions".format(field_name))
else:
return None
return value
def _parse_email_value(self, value, field_name, raise_exception, deobfuscate_email):
if value:
v = value.replace("[]", "@")
if re.search(self.email_regex, v):
if not deobfuscate_email:
return v.replace("@", "[]")
return v
return None
_supported_fields_parsers = {
"email" : {
"fn": _parse_email_value,
"args": {}
},
"url" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 4,
"max_length" : 399,
"valid_chars" : "[_%/:a-zA-Z0-9.-]+"
}
},
"proof" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 7,
"max_length" : 7,
"valid_chars" : "[adinrsu-]+"
}
},
"ciissversion" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[12]+"
}
},
"pgp" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 40,
"max_length" : 40,
"valid_chars" : "[a-zA-Z0-9]+"
}
},
"abuse" : {
"fn": _parse_email_value,
"args": {}
},
"keybase" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 50,
"valid_chars" : "[a-zA-Z0-9]+"
}
},
"twitter" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 15,
"valid_chars" : "[a-zA-Z0-9_]+"
}
},
"mastodon" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 254,
"valid_chars" : "*"
}
},
"matrix" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 254,
"valid_chars" : "*"
}
},
"xmpp" : {
"fn": _parse_email_value,
"args": {}
},
"otr3" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 40,
"max_length" : 40,
"valid_chars" : "[a-z0-9]+"
}
},
"hoster" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 254,
"valid_chars" : "[a-zA-Z0-9.-]+"
}
},
"cost" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 13,
"valid_chars" : "[A-Z0-9.]+"
}
},
"uplinkbw" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 7,
"valid_chars" : "[0-9]+"
}
},
"trafficacct" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 9,
"valid_chars" : "[unmetrd0-9]+"
}
},
"memory" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 10,
"valid_chars" : "[0-9]+"
}
},
"cpu" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 50,
"valid_chars" : "[a-zA-Z0-9_-]+"
}
},
"virtualization" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 15,
"valid_chars" : "[a-z-]+"
}
},
"donationurl" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 254,
"valid_chars" : "*"
}
},
"btc" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 26,
"max_length" : 99,
"valid_chars" : "[a-zA-Z0-9]+"
}
},
"zec" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 95,
"valid_chars" : "[a-zA-Z0-9]+"
}
},
"xmr" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 99,
"valid_chars" : "[a-zA-Z0-9]+"
}
},
"offlinemasterkey" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
},
"signingkeylifetime" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 6,
"valid_chars" : "[0-9]+"
}
},
"sandbox" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 2,
"valid_chars" : "[yn]"
}
},
"os" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 20,
"valid_chars" : "[A-Za-z0-9/.]+"
}
},
"tls" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 0,
"max_length" : 14,
"valid_chars" : "[a-z]+"
}
},
"aesni" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
},
"autoupdate" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
},
"confmgmt" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 15,
"valid_chars" : "[a-zA-Z-]"
}
},
"dnslocation" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 5,
"max_length" : 100,
"valid_chars" : "[a-z,]"
}
},
"dnsqname" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
},
"dnssec" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
},
"dnslocalrootzone" : {
"fn" : _parse_string_value,
"args" : {
"min_length" : 1,
"max_length" : 1,
"valid_chars" : "[yn]"
}
}
}
def __init__(self):
pass
def parse(self, value: str, raise_exception_on_invalid_value=False, deobfuscate_email=True) -> dict:
# the ciissversion field is mandatory
if not 'ciissversion:' in value:
return None
result = {}
parts = value.split(" ")
for p in parts:
field_parts = p.split(":", 1)
if len(field_parts) <= 1:
continue
name, data = field_parts
if name in self._supported_fields_parsers:
field_parser = self._supported_fields_parsers[name]
if field_parser is None:
result[name] = data
continue
if callable(field_parser):
value = field_parser(self, data)
else:
field_parser["args"]["field_name"] = name
field_parser["args"]["value"] = data
field_parser["args"]["raise_exception"] = raise_exception_on_invalid_value
field_parser["args"]["deobfuscate_email"] = deobfuscate_email
value = field_parser["fn"](self, **field_parser["args"])
if not result.get(name, None):
result[name] = value
return result
def cmd_parse(opts: argparse.Namespace):
"""
ArgParser function for parsing a single ContactInfo string, and outputting it as JSON (or python-style dict's)
"""
if opts.contact is None or len(opts.contact) == 0 or opts.contact[0] == '-':
contact = sys.stdin.read()
else:
contact = ' '.join(opts.contact).strip()
tparser = TorContactInfoParser()
res = tparser.parse(contact)
if not opts.pretty:
return print(json.dumps(res))
if opts.json:
res = json.dumps(res, indent=4) if opts.pretty else json.dumps(res)
# if not HAS_RICH: res = json.dumps(res, indent=4)
rprint(res)
def vsetup_logging(log_level, logfile='', stream=sys.stderr):
global LOG
add = True
try:
if 'COLOREDLOGS_LEVEL_STYLES' not in os.environ:
os.environ['COLOREDLOGS_LEVEL_STYLES'] = 'spam=22;debug=28;verbose=34;notice=220;warning=202;success=118,bold;error=124;critical=background=red'
# https://pypi.org/project/coloredlogs/
import coloredlogs
except ImportError:
coloredlogs = False
logging._defaultFormatter = logging.Formatter(datefmt='%m-%d %H:%M:%S')
logging._defaultFormatter.default_time_format = '%m-%d %H:%M:%S'
logging._defaultFormatter.default_msec_format = ''
kwargs = dict(level=log_level,
force=True,
format='%(levelname)s %(message)s')
if logfile:
add = logfile.startswith('+')
sub = logfile.startswith('-')
if add or sub:
logfile = logfile[1:]
kwargs['filename'] = logfile
if coloredlogs:
# https://pypi.org/project/coloredlogs/
aKw = dict(level=log_level,
logger=LOG,
stream=stream,
fmt='%(levelname)s %(message)s'
)
coloredlogs.install(**aKw)
if logfile:
oHandler = logging.FileHandler(logfile)
LOG.addHandler(oHandler)
LOG.info(f"CSetting log_level to {log_level} {stream}")
else:
logging.basicConfig(**kwargs)
if add and logfile:
oHandler = logging.StreamHandler(stream)
LOG.addHandler(oHandler)
LOG.info(f"SSetting log_level to {log_level!s}")
def cmd_scan(opts: argparse.Namespace, adata=None):
"""
ArgParser function for scanning all ContactInfo strings from ``https://onionoo.torproject.org/details`` ,
and outputting each one as a Python-style Dict, or JSON.
"""
parser = TorContactInfoParser()
surl = "https://onionoo.torproject.org/details"
if not adata:
LOG.info(f"Getting relays from {surl}")
jdata = requests.get(surl)
try:
adata = jdata.json()
except Exception as e:
# simplejson.errors.JSONDecodeError
LOG.exception(f"JSON error {e}")
return
elts = adata["relays"]
else:
elts = json.loads(adata)['relays']
if not elts:
LOG.warn(f"NO relays - are we connected?")
return
LOG.info(f"{len(elts)} relays")
for relay in elts:
if 'fingerprint' not in relay.keys():
LOG.warn(f"fingerprint not in relay for {relay}")
continue
fp = relay['fingerprint']
verified_host_names = relay.get('verified_host_names', [])
contact = relay.get("contact", None)
if not contact:
LOG.warn(f"No contact for {fp} {verified_host_names}")
continue
if 'ciissversion' not in contact:
LOG.debug(f"No ciissversion in contact in {fp}")
continue
LOG.debug(f"parsing {fp}")
result = parser.parse(contact, False)
if not result:
LOG.warn(f"No result for {contact} in {fp}")
continue
if len(result) > 0:
if opts.json: result = json.dumps(result, indent=4) if opts.pretty else json.dumps(result)
if opts.pretty:
rprint(result)
else:
print(result)
ETC_DIR = '/etc/tor/yaml'
def oparser():
cparser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(f"""
Examples:
# 'scan' is the original behaviour of this script. It iterates over the data
# from https://onionoo.torproject.org/details , parses each contact, and prints it as Python dict-style JSON.
{sys.argv[0]} scan
# Same as previous. With no arguments, it's equivalent to running 'scan'.
{sys.argv[0]}
# If you pass '-p' after scan, it will enable pretty printing. For best pretty printing,
# make sure you have 'rich' installed from pypi.
{sys.argv[0]} scan -p
# If you need real JSON with double quotes, rather than Python dict-style JSON, you can
# use the '-j' flag to enable "real JSON" mode (you can combine with '-p' if you want pretty printed real json)
{sys.argv[0]} scan -j
# Using 'parse', you can parse an arbitrary ContactInfo string, and it will output the parsed result
# with pretty printing by default.
{sys.argv[0]} parse "contact Privex Inc. email:noc[]privex.io url:https://www.privex.io " \\
"proof:uri-rsa pgp:288DD1632F6E8951 keybase:privexinc twitter:PrivexInc hoster:www.privex.io " \\
"uplinkbw:500 memory:4096 virtualization:kvm btc:bc1qpst9uscvd8rpjjhzz9rau3trylh6e0wh76qrlhw3q9nj89ua728sn3t6a2 " \\
"xmr:89tukP3wfpH4FZAmC1D2GfArWwfPTz8Ap46NZc54Vyhy9YxEUYoFQ7HGQ74LrCMQTD3zxvwM1ewmGjH9WVmeffwR72m1Pps"
{{
'email': 'noc@privex.io',
'url': 'https://www.privex.io',
'proof': 'uri-rsa',
'pgp': None,
'keybase': 'privexinc',
'twitter': 'PrivexInc',
'hoster': 'www.privex.io',
'uplinkbw': '500',
'memory': '4096',
'virtualization': 'kvm',
'btc': 'bc1qpst9uscvd8rpjjhzz9rau3trylh6e0wh76qrlhw3q9nj89ua728sn3t6a2',
'xmr': '89tukP3wfpH4FZAmC1D2GfArWwfPTz8Ap46NZc54Vyhy9YxEUYoFQ7HGQ74LrCMQTD3zxvwM1ewmGjH9WVmeffwR72m1Pps'
}}
# You can also pipe a contact string into 'parse', and it will work just the same.
echo "Privex Inc. email:noc[]privex.io url:https://www.privex.io proof:uri-rsa pgp:288DD1632F6E8951 keybase:privexinc twitter:PrivexInc" | {sys.argv[0]} parse
{{'email': 'noc@privex.io', 'url': 'https://www.privex.io', 'proof': 'uri-rsa', 'pgp': None, 'keybase': 'privexinc', 'twitter': 'PrivexInc\n'}}
# If you need real JSON outputted, rather than Python dict-style output, you can pass -j to either 'parse' or 'scan'
{sys.argv[0]} parse -j "Privex Inc. email:noc[]privex.io url:https://www.privex.io proof:uri-rsa pgp:288DD1632F6E8951 keybase:privexinc twitter:PrivexInc"
{{
"email": "noc@privex.io",
"url": "https://www.privex.io",
"proof": "uri-rsa",
"pgp": null,
"keybase": "privexinc",
"twitter": "PrivexInc"
}}
# You can use '-np' to disable pretty printing for 'parse' - you can combine it with '-j' to get flat, plain JSON.
{sys.argv[0]} parse -np -j "Privex Inc. email:noc[]privex.io url:https://www.privex.io proof:uri-rsa pgp:288DD1632F6E8951 keybase:privexinc twitter:PrivexInc"
{{"email": "noc@privex.io", "url": "https://www.privex.io", "proof": "uri-rsa", "pgp": null, "keybase": "privexinc", "twitter": "PrivexInc"}}
"""))
cparser.set_defaults(func=cmd_scan, json=False, pretty=False)
subparse = cparser.add_subparsers()
subparse.required = False
sp_parse = subparse.add_parser('parse',
help="Parse a single contact string, either as an argument, or piped into stdin")
sp_parse.add_argument('contact', nargs='*')
sp_parse.add_argument('-np', '--no-pretty',
action='store_false', default=False, dest='pretty',
help="Disable pretty printing JSON")
sp_parse.add_argument('--relays_output', type=str,
dest='relays_output',
default=os.path.join(ETC_DIR, 'relays.json'),
help="Write the download relays in json to a file")
sp_parse.add_argument('-j', '--json', action='store_true',
default=False, dest='json',
help="Output real JSON, not Python dict format.")
sp_parse.set_defaults(func=cmd_parse)
sp_scan = subparse.add_parser('scan', help="Parse all contacts from https://onionoo.torproject.org/details")
sp_scan.add_argument('-p', action='store_true', default=False, dest='pretty', help="Enable pretty printing JSON")
sp_scan.add_argument('-j', '--json', action='store_true', default=False, dest='json', help="Output real JSON, not Python dict format.")
# sp_scan.set_defaults(func=cmd_scan)
return cparser
if __name__ == "__main__":
if os.environ.get('DEBUG', ''):
log_level = 10
else:
log_level = 20
vsetup_logging(log_level)
try:
cparser = oparser()
opts = cparser.parse_args(sys.argv[1:])
data = None
if opts.relays_output and os.path.exists(opts.relays_output):
data = open(opts.relays_output, 'rt').read()
cmd_scan(opts, data)
except (requests.exceptions.ProxyError, Exception,) as e:
LOG.error(f"{e}")
i = 0
# else:
# args = cparser.parse_args(sys.argv[1:])
# i = args.func(args)
sys.exit(i)

View File

@ -0,0 +1,627 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -
# from https://github.com/nusenu/trustor-poc
# with minor refactoring to make the code more Pythonic.
import datetime
import os
import re
import sys
import ipaddress
import warnings
import urllib3.util
from urllib3.util import parse_url as urlparse
from stem.control import Controller
# from stem.util.tor_tools import *
try:
# unbound is not on pypi
from unbound import RR_CLASS_IN, RR_TYPE_TXT, ub_ctx
except:
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
global LOG
import logging
warnings.filterwarnings('ignore')
LOG = logging.getLogger()
logging.getLogger("urllib3").setLevel(logging.INFO)
# import urllib3.contrib.pyopenssl
# urllib3.contrib.pyopenssl.inject_into_urllib3()
# download this python library from
# https://github.com/erans/torcontactinfoparser
# sys.path.append('/home/....')
try:
from torcontactinfo import TorContactInfoParser
except:
TorContactInfoParser = None
class TrustorError(Exception): pass
# https://stackoverflow.com/questions/2532053/validate-a-hostname-string
# FIXME this check allows non-fqdn names
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
if hostname[-1] == ".":
hostname = hostname[:-1] # strip exactly one dot from the right, if present
allowed = re.compile("(?!-)[A-Z0-9-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split("."))
def read_local_trust_config(trust_config):
'''
reads a local configuration file containing trusted domains
and returns them in an array
'''
result = []
# for now we support max_depth = 0 only
# this PoC version has no support for recursion
# https://github.com/nusenu/tor-relay-operator-ids-trust-information#trust-information-consumers
supported_max_depths = ['0']
if (os.path.isfile(trust_config)):
f = open(trust_config)
for line in f:
line = line.strip()
if line[0] == '#':
continue
try:
domain, max_depth = line.split(':')
except:
LOG.error('invalid trust config line detected: %s aborting!' % line)
sys.exit(8)
if max_depth in supported_max_depths:
if is_valid_hostname(domain) and domain not in result:
result.append(domain)
else:
LOG.error('invalid duplicate domain in trust config file: %s: %s aborting!' % (trust_config, domain))
sys.exit(9)
else:
LOG.error('unsupported max_depth value (%s) used, aborting!' % line)
sys.exit(10)
return result
else:
LOG.error("trust config file %s missing, aborting!" % trust_config)
sys.exit(11)
def read_local_validation_cache(validation_cache_file, trusted_domains=[]):
'''
reads the local validation cache and returns all fingerprints in the cache
for trusted domains
format of each entry in the cache:
domain:fingerprint:prooftype:date
'''
result = []
if trusted_domains == []:
return result
if os.path.isfile(validation_cache_file):
with open(validation_cache_file, 'rt') as f:
for line in f:
line = line.strip()
if line[0] == '#':
continue
try:
domain, fingerprint, prooftype, dt = line.split(':')
except:
LOG.error('invalid trust cache entry detected: %s aborting!' % line)
sys.exit(12)
if domain in trusted_domains:
result.append(fingerprint)
else:
LOG.warn('ignoring cached entry for untrusted domain %s' % domain)
else:
LOG.info("Validation cache file not present. It will be created.")
return result
def get_controller(address='127.0.0.1', port=9151, password=''):
'''
connects to a local tor client via the tor ControlPort
and returns a controller that allows us to easily set specific tor
configuration options or read tor relay ContactInfo strings for validation
'''
try:
# controller = Controller.from_socket_file(path=torsocketpath)
controller = Controller.from_port(address=address, port=port)
controller.authenticate(password=password)
except Exception as e:
LOG.error(f"Failed to connect to the tor process, {e}")
sys.exit(1)
if not controller.is_set('UseMicrodescriptors'):
LOG.error('"UseMicrodescriptors 0" is required in your torrc configuration. Exiting.')
sys.exit(2)
return controller
def find_validation_candidates(controller,
trusted_domains=[],
validation_cache=[],
CAfile='/etc/ssl/certs/ca-certificates.crt',
accept_all=False):
'''
connect to a tor client via controlport and return a dict of all
not yet validated fingerprints per trusted operators
format:
{ trusted_domain: { prooftype: [fingerprint, fingerprint, ...]} }
example content:
{ 'emeraldonion.org' : { 'uri-rsa': ['044600FD968728A6F220D5347AD897F421B757C0', '09DCA3360179C6C8A5A20DDDE1C54662965EF1BA']}}
'''
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#proof
accepted_proof_types = ['uri-rsa', 'dns-rsa']
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#ciissversion
accepted_ciissversions = ['2']
result = {}
try:
relays = controller.get_server_descriptors()
except:
LOG.error('Failed to get relay descriptors via tor\'s ControlPort. Exiting.')
sys.exit(3)
ci = TorContactInfoParser()
for relay in relays:
if relay.contact:
fingerprint = relay.fingerprint
# skip fingerprints we have already successfully validated in the past
# a future version would check the cache age as well
if fingerprint in validation_cache:
continue
contactstring = relay.contact.decode('utf-8')
parsed_ci = ci.parse(contactstring)
if len(parsed_ci) > 0:
if 'ciissversion' in parsed_ci and 'proof' in parsed_ci and 'url' in parsed_ci:
prooftype = parsed_ci['proof']
ciurl = parsed_ci['url']
if parsed_ci['ciissversion'] in accepted_ciissversions and prooftype in accepted_proof_types:
if ciurl.startswith('http://') or ciurl.startswith('https://'):
try:
domain = urlparse(ciurl).netloc
except:
LOG.warning('failed to parse domain %s' % ciurl)
domain = 'error'
continue
else:
domain = ciurl
if not is_valid_hostname(domain):
domain = 'error'
continue
# we can ignore relays that do not claim to be operated by a trusted operator
# if we do not accept all
if domain not in trusted_domains and not accept_all:
continue
if domain in result.keys():
if prooftype in result[domain].keys():
result[domain][prooftype].append(fingerprint)
else:
result[domain] = {prooftype: [fingerprint]}
# mixed proof types are not allowd as per spec but we are not strict here
LOG.warning('%s is using mixed prooftypes %s' % (domain, prooftype))
else:
result[domain] = {prooftype: [fingerprint]}
return result
def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050, content_type='text/plain', session=None):
import requests
# socks proxy used for outbound web requests (for validation of proofs)
proxy = {'https': "socks5h://{host}:{port}"}
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
LOG.debug("fetching %s...." % uri)
try:
# grr. fix urllib3
# urllib3.connection WARNING Certificate did not match expected hostname:
head = requests.head(uri, timeout=timeout, proxies=proxy, headers=headers)
except Exception as e:
LOG.exception(f"{e}")
raise TrustorError(f"HTTP HEAD request failed for {uri} {e}")
if head.status_code >= 300:
raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not head.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain")
if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}")
if session is None: session = requests.sessions.Session()
try:
oReqResp = session.request(method="get", url=uri,
proxies=proxy,
timeout=timeout,
headers=headers,
allow_redirects=False,
verify=True
)
except:
LOG.warn("HTTP GET request failed for %s" % uri)
raise
if oReqResp.status_code != 200:
raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not oReqResp.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain")
# check for redirects (not allowed as per spec)
if oReqResp.url != uri:
LOG.error(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
raise TrustorError(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
return oReqResp
# There's no point in using asyncio because of duplicate urls in the tasks
async def oDownloadUrlHttpx(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050, content_type='text/plain'):
import httpcore
import asyncio
import httpx
# socks proxy used for outbound web requests (for validation of proofs)
if host and port:
proxy = "socks5://{host}:{port}"
else:
proxy = ''
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
LOG.debug("fetching %s...." % uri)
async with httpx.AsyncClient(proxies=proxy) as client:
try:
# https://www.python-httpx.org/advanced/
head = await client.head(uri, timeout=timeout, headers=headers)
except Exception as e:
LOG.exception(f"{e}")
raise TrustorError(f"HTTP HEAD request failed for {uri} {e}")
if head.status_code >= 300:
raise TrustorError(f"HTTP Errorcode {head.status_code}")
if content_type and not head.headers['Content-Type'].startswith(content_type):
raise TrustorError(f"HTTP Content-Type != {content_type}" )
if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}")
try:
oReqResp = await client.get(url=uri,
timeout=timeout,
headers=headers,
max_redirects=0,
verify=sCAfile,
)
except (asyncio.exceptions.CancelledError,
httpcore.PoolTimeout,
Exception,) as e:
LOG.warn(f"HTTP GET request failed for %s {e}" % uri)
raise
if oReqResp.status_code != 200:
LOG.warn(f"HTTP Errorcode {head.status_code}")
raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not oReqResp.headers['Content-Type'].startswith('text/plain'):
LOG.warn(f"HTTP Content-Type != text/plain")
raise TrustorError(f"HTTP Content-Type != text/plain")
# check for redirects (not allowed as per spec)
if oReqResp.url != uri:
LOG.error(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
raise TrustorError(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
return oReqResp
def ballow_subdomain_matching(hostname, dnsnames):
for elt in dnsnames:
if len(hostname.split('.')) > len(elt.split('.')) and hostname.endswith(elt):
# parent
return True
return False
from urllib3.util.ssl_match_hostname import (CertificateError, _dnsname_match,
_ipaddress_match)
def my_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError(
"empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED"
)
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(hostname)
except (UnicodeError, ValueError):
# ValueError: Not an IP address (common case)
# UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
except AttributeError:
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
else: # Defensive
raise
dnsnames = []
san = cert.get("subjectAltName", ())
for key, value in san:
if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == "IP Address":
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get("subject", ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == "commonName":
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
# soften this to allow subdomain matching
if ballow_subdomain_matching(hostname, dnsnames):
LOG.warn(f"Allowing {hostname} in {dnsnames}")
return
raise CertificateError(
"hostname %r "
"doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames)))
)
elif len(dnsnames) == 1:
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
raise CertificateError(
"no appropriate commonName or subjectAltName fields were found"
)
urllib3.util.ssl_match_hostname.match_hostname = my_match_hostname
from urllib3.util.ssl_ import is_ipaddress
def _my_match_hostname(cert, asserted_hostname):
# Our upstream implementation of ssl.match_hostname()
# only applies this normalization to IP addresses so it doesn't
# match DNS SANs so we do the same thing!
stripped_hostname = asserted_hostname.strip("u[]")
if is_ipaddress(stripped_hostname):
asserted_hostname = stripped_hostname
try:
my_match_hostname(cert, asserted_hostname)
except CertificateError as e:
LOG.warning(
"Certificate did not match hostname: %s. Certificate: %s",
asserted_hostname,
cert,
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
urllib3.connection._match_hostname = _my_match_hostname
from urllib3.contrib.socks import SOCKSProxyManager
# from urllib3 import Retry
def oDownloadUrlUrllib3Socks(uri,
sCAfile,
timeout=30,
host='127.0.0.1',
port=9050,
session=None,
content_type='text/plain'):
"""Theres no need to use requests here and it
adds too many layers on the SSL to be able to get at things
"""
domain = urlparse(uri).netloc
# socks proxy used for outbound web requests (for validation of proofs)
proxy = SOCKSProxyManager(f'socks5h://{host}:{port}/',
num_pools=1,
timeout=timeout,
cert_reqs='CERT_REQUIRED',
assert_hostname=domain,
ca_certs=sCAfile)
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
LOG.debug("fetching %s...." % uri)
try:
# grr. fix urllib3
# Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
# retries are disabled, in which case the causing exception will be raised.
head = proxy.request('HEAD', uri,
headers=headers,
redirect=False,
retries=False)
except Exception as e:
LOG.error(f"HTTP HEAD request failed for {uri} {e}")
raise
if head.status >= 300:
raise TrustorError(f"HTTP Errorcode {head.status}")
if content_type and not head.headers['Content-Type'].startswith(content_type):
raise TrustorError(f"HTTP Content-Type != {content_type}")
if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}")
try:
oReqResp = proxy.request("GET", uri,
headers=headers,
redirect=False,
)
except Exception as e:
LOG.warn(f"HTTP GET request failed for {uri} {e}")
raise
if oReqResp.status != 200:
raise TrustorError(f"HTTP Errorcode {head.status}")
if content_type and not oReqResp.headers['Content-Type'].startswith(content_type):
raise TrustorError(f"HTTP Content-Type != {content_type}")
# check for redirects (not allowed as per spec)
if oReqResp.geturl() != uri:
LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
oReqResp.decode_content = True
return oReqResp
import urllib3.connectionpool
from urllib3.connection import HTTPSConnection
urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection
def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
uri = f"https://{domain}/.well-known/tor-relay/rsa-fingerprint.txt"
o = oDownloadUrlRequests(uri, sCAfile, timeout=timeout, host=host, port=port)
well_known_content = o.text.upper().strip().split('\n')
well_known_content = [i for i in well_known_content if i and len(i) == 40]
return well_known_content
def validate_proofs(candidates, validation_cache_file, timeout=20, host='127.0.0.1', port=9050):
'''
This function takes the return value of find_validation_candidates()
and validated them according to their proof type (uri-rsa, dns-rsa)
and writes properly validated relay fingerprints to the local validation cache
'''
dt_utc = datetime.datetime.now(datetime.timezone.utc).date()
f = open(validation_cache_file, mode='a')
count = 0
for domain in candidates.keys():
for prooftype in candidates[domain].keys():
if prooftype == 'uri-rsa':
well_known_content = lDownloadUrlFps(domain, timeout=timeout, host=host, port=port)
for fingerprint in candidates[domain][prooftype]:
if fingerprint in well_known_content:
# write cache entry
count += 1
f.write('%s:%s:%s:%s\n' % (domain, fingerprint, prooftype, dt_utc))
else:
LOG.error('%s:%s:%s' % (fingerprint, domain, prooftype))
elif prooftype == 'dns-rsa' and ub_ctx:
for fingerprint in candidates[domain][prooftype]:
fp_domain = fingerprint + '.' + domain
if idns_validate(fp_domain,
libunbound_resolv_file='resolv.conf',
dnssec_DS_file='dnssec-root-trust',
) == 0:
count += 1
f.write('%s:%s:%s:%s\n' % (domain, fingerprint, prooftype, dt_utc))
else:
LOG.error('%s:%s:%s' % (fingerprint, domain, prooftype))
f.close()
LOG.info('successfully validated %s new (not yet validated before) relays' % count)
def idns_validate(domain,
libunbound_resolv_file='resolv.conf',
dnssec_DS_file='dnssec-root-trust',
):
'''
performs DNS TXT lookups and verifies the reply
- is DNSSEC valid and
- contains only a single TXT record
- the DNS record contains a hardcoded string as per specification
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#dns-rsa
'''
if not ub_ctx: return -1
# this is not the system wide /etc/resolv.conf
# use dnscrypt-proxy to encrypt your DNS and route it via tor's SOCKSPort
ctx = ub_ctx()
if (os.path.isfile(libunbound_resolv_file)):
ctx.resolvconf(libunbound_resolv_file)
else:
LOG.error('libunbound resolv config file: "%s" is missing, aborting!' % libunbound_resolv_file)
return 5
if (os.path.isfile(dnssec_DS_file)):
ctx.add_ta_file(dnssec_DS_file)
else:
LOG.error('DNSSEC trust anchor file "%s" is missing, aborting!' % dnssec_DS_file)
return 6
status, result = ctx.resolve(domain, RR_TYPE_TXT, RR_CLASS_IN)
if status == 0 and result.havedata:
if len(result.rawdata) == 1 and result.secure:
# ignore the first byte, it is the TXT length
if result.data.as_raw_data()[0][1:] == b'we-run-this-tor-relay':
return 0
return 1
def configure_tor(controller, trusted_fingerprints, exitonly=True):
'''
takes the list of trusted fingerprints and configures a tor client
to only use trusted relays in a certain position
for now we only set exits.
we refuse to set the configuration if there are less then 40 trusted relays
'''
relay_count = len(trusted_fingerprints)
if relay_count < 41:
LOG.error('Too few trusted relays (%s), aborting!' % relay_count)
sys.exit(15)
try:
controller.set_conf('ExitNodes', trusted_fingerprints)
LOG.error('limited exits to %s relays' % relay_count)
except Exception as e: # noqa
LOG.exception('Failed to set ExitNodes tor config to trusted relays')
sys.exit(20)
if __name__ == '__main__':
CAfile = '/etc/ssl/certs/ca-certificates.crt'
trust_config = 'trust_config'
assert os.path.exists(trust_config)
trusted_domains = read_local_trust_config(trust_config)
validation_cache_file = 'validation_cache'
trusted_fingerprints = read_local_validation_cache(validation_cache_file,
trusted_domains=trusted_domains)
# tor ControlPort password
controller_password = ''
# tor ControlPort IP
controller_address = '127.0.0.1'
timeout = 20
port = 9050
controller = get_controller(address=controller_address, password=controller_password)
r = find_validation_candidates(controller,
validation_cache=trusted_fingerprints,
trusted_domains=trusted_domains,
CAfile=CAfile)
validate_proofs(r, validation_cache_file,
timeout=timeout,
host=controller_address,
port=port)
# refresh list with newly validated fingerprints
trusted_fingerprints = read_local_validation_cache(validation_cache_file,
trusted_domains=trusted_domains)
configure_tor(controller, trusted_fingerprints)

View File

@ -0,0 +1,222 @@
#!/usr/bin/env python
# -*-mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
"""
Runs doctests locallly
doctest files are in the tests/ directory.
Note that when writing new test files, it will be convenient to use the command-line flags to avoid time-consuming reprovisioning or to target particular boxes or tests.
"""
from __future__ import print_function
from sys import stderr
import argparse
import doctest
import glob
import re
import subprocess
import sys
import os
OPTIONS = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE
# Convenience items for testing.
# We'll pass these as globals to the doctests.
if os.path.exists('/dev/null'):
DEV_NULL = open('/dev/null', 'w')
EXE='vagrant'
else:
DEV_NULL = open('NUL:', 'w')
EXE='sh /i/bin/vagrant.msys'
# find all our available boxes
#with open('Vagrantfile', 'r') as f:
# avail_boxes = re.findall(r'^\s+config.vm.define "(.+?)"', f.read(), re.MULTILINE)
# unused because it could be a Ruby variable
parser = argparse.ArgumentParser(description='Run playbook tests.')
parser.add_argument(
'-f', '--force',
action='store_true',
help="Force tests to proceed if box already exists. Do not destroy box at end of tests."
)
parser.add_argument(
'-n', '--no-provision',
action='store_true',
help="Skip provisioning."
)
parser.add_argument(
'-F', '--fail-fast',
action='store_true',
help="REPORT_ONLY_FIRST_FAILURE."
)
parser.add_argument(
'-o', '--options',
help=""
)
parser.add_argument(
'--haltonfail',
action='store_true',
help="Stop multibox tests after a fail; leave box running."
)
parser.add_argument(
'--file',
help="Specify a single doctest file (default tests/*.txt).",
)
parser.add_argument(
'--box',
help="Specify a particular target box",
action="append",
)
args = parser.parse_args()
if args.box:
lBoxes = args.box
else:
# find all our available running boxes
# sed -e 's/ .*//'
try:
s = os.system("vagrant global-status 2>&1| grep running | cut -f 1 -d ' ' ")
except StandardError as e:
print("ERROR: Unable to find any running boxes. Rerun with the --box argument.", file=sys.stderr)
raise
assert s, "ERROR: Unable to find a running box. Rerun with the --box argument."
lBoxes = s.split(' ')
# mplatform = None
# def get_mplatform():
# global mplatform
# # Linux-4.14.80-gentoo-x86_64-Intel-R-_Pentium-R-_CPU_N3700_@_1.60GHz-with-gentoo-2.2.1
# if mplatform is None:
# mplatform = subprocess.check_output(
# """vagrant ssh %s -c 'python -mplatform'""" % box,
# shell=True,
# stderr=DEV_NULL
# )
# return mplatform
print (repr(args))
def ssh_run(cmd):
"""
Run a command line in a vagrant box via vagrant ssh.
Return the output.
"""
return subprocess.check_output(
"""%s ssh %s -c '%s'""" % (EXE, box, cmd),
shell=True,
stderr=DEV_NULL
).replace('^@', '')
def run(cmd):
"""
Run a command in the host.
Stop the tests with a useful message if it fails.
"""
if sys.platform.startswith('win'):
p = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
else:
p = subprocess.Popen(
cmd,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True
)
stdout, stderr = p.communicate()
if p.returncode != 0:
print(stdout, file=sys.stderr)
# Stop the doctest
raise KeyboardInterrupt(stderr)
return stdout
def cut(y, column_nums, sort=False):
"""
returns a list of lines reduced to the chosen column_nums
"""
assert y and len(y) > 0, "Empty string passed to cut"
#
if hasattr(y,'encode'):
s = y.encode('utf-8')
else:
s = y
lines = s.splitlines()
line_lists = [l.split() for l in lines if l]
rez = ["\t".join([col[col_num]
for col_num in column_nums if col_num < len(col)])
for col in line_lists]
if sort:
return sorted(rez)
else:
return rez
def joined_cut(s, column_nums, sort=False):
return "\n".join(cut(s, column_nums, sort))
for box in lBoxes:
globs = {
'ssh_run': ssh_run,
'run': run,
'cut': cut,
'joined_cut': joined_cut,
'skip_provisioning': args.no_provision,
'no_provisioning': args.no_provision,
'forcing': args.force,
'box': box,
}
if args.fail_fast:
OPTIONS = doctest.REPORT_ONLY_FIRST_FAILURE | OPTIONS
if box and not args.force:
output = subprocess.check_output("%s status %s" % (EXE, box,), shell=True)
if re.search(r"%s\s+not created" % box, output) is None:
print( "Vagrant box already exists. Destroy it or use '-f' to skip this test.", file=sys.stderr)
print ("Use '-f' in combination with '-n' to skip provisioning.", file=sys.stderr)
exit(1)
if args.file is None:
files = glob.glob('tests/*.txt')
else:
files = [args.file]
for fn in files:
print ( "%s / %s" % (box, fn) , file=sys.stderr)
print( '*' * 50 )
print (box)
print( '*' * 50 )
print (fn)
print( '*' * 50 )
try:
failure_count, test_count = doctest.testfile(fn,
module_relative=False,
optionflags=OPTIONS,
globs=globs)
except Exception as e:
sys.stderr.write('\n'.join(sys.path) +'\n')
raise
if args.haltonfail and failure_count > 0:
print ("Test failures occurred. Stopping tests and leaving vagrant box %s running." % box , file=sys.stderr)
exit(1)
# Clean up our vagrant box.
if box and not args.force:
print ( "Destroying %s" % box , file=sys.stderr)
run("%s destroy %s -f" % (EXE, box,))
elif box:
print ( "Vagrant box %s left running." % box, file=sys.stderr)

View File

@ -0,0 +1,84 @@
#/bin/sh
# -*- mode: sh; tab-width: 8; encoding: utf-8-unix -*-
PREFIX=/usr/local
. /usr/local/etc/testforge/testforge.bash
ROLE=toxcore
PYVER=3
P="BASE_PYTHON${PYVER}_MINOR"
[ -z "$PYTHON_MINOR" ] && PYTHON_MINOR="$(eval echo \$$P)"
PYTHON_EXE_MSYS=$PREFIX/bin/python$PYVER.bash
PYTHON_EXE=$PYTHON_EXE_MSYS
DESC=""
. /var/local/src/var_local_src.bash || exit 1
SITE_PACKAGES_MSYS=$PREFIX/$LIB/python$PYTHON_MINOR/site-packages
HTTP_DIR=$PREFIX/net/Http
DIR=toxygen_wrapper
MOD=$DIR
GIT_HUB=git.plastiras.org
GIT_USER=emdee
GIT_DIR=$DIR
# tox_profile
cd $PREFIX/src || exit 2
WD=$PWD
if [ "$#" -eq 0 ] ; then
if [ ! -d "$DIR" ] ; then
if [ ! -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" ] ; then
ols_are_we_connected || exit 0
[ -d "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" ] || \
mkdir "$PREFIX/net/Git/$GIT_HUB/$GIT_USER"
( cd "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" && \
git clone "https://$GIT_HUB/$GIT_USER/$GIT_DIR" ) ||\
exit 2
( cd "$PREFIX/net/Git/$GIT_HUB/$GIT_USER" && \
git config user emdee && \
git config email emdee@ )
fi
cp -rip "$PREFIX/net/Git/$GIT_HUB/$GIT_USER/$GIT_DIR" . || exit 3
fi
cd $DIR || exit 4
[ -f __init__.py ] || touch __init__.py
[ -d libs ] || mkdir libs
cd libs
for file in libtoxav.so libtoxcore.so libtoxencryptsave.so ; do
[ -e $file ] && continue
ln -s $PREFIX//src/c-toxcore/_build/libtoxcore.so $file
done
cd ..
# ols_apply_testforge_patches
# "$PYTHON_EXE_MSYS" -c "import $MOD" 2>/dev/null || exit 10
exit 0
elif [ $1 = 'check' ] ; then # 1*
# "$PYTHON_EXE_MSYS" -c "import $MOD" 2>/dev/null || exit 10
:
elif [ "$1" = 'lint' ] ; then # 2*
[ -n "$PYVER" ] || return 20
ols_run_lint_pylint -E --recursive y || exit 2$?
elif [ "$1" = 'test' ] ; then # 3*
cd $PREFIX/src/$DIR/$DIR/wrapper_tests || exit 32
$PYTHON_EXE_MSYS tests_wrapper.py \
>> $WD/$DIR/test.log 2>&1 || \
{ ERROR "$MOD code $?" ; cat $WD/$DIR/test.log ; exit 35 ; }
elif [ "$1" = 'refresh' ] ; then # 6*
cd $PREFIX/src/$DIR || exit 60
/usr/local/sbin/base_diff_from_dst.bash $ROLE || exit 6$?
fi

View File

@ -4,6 +4,8 @@
prog=`basename $0 .bash` prog=`basename $0 .bash`
ROLE=toxcore ROLE=toxcore
PREFIX=/usr/local PREFIX=/usr/local
LOCAL_DOCTEST=/usr/local/bin/testforge_run_doctest3.bash
DOCTEST=${LOCAL_DOCTEST}
. /usr/local/bin/usr_local_tput.bash . /usr/local/bin/usr_local_tput.bash
# we install into /usr/local/bin and it takes precedence # we install into /usr/local/bin and it takes precedence
@ -16,21 +18,24 @@ if [ "$#" -eq 0 ] ; then
cd $PREFIX/src || exit 2 cd $PREFIX/src || exit 2
WD=$PWD WD=$PWD
bash c-toxcore.bash # || exit 3$? bash c-toxcore.bash # || exit 13$?
bash tox_profile.bash # || 4$? bash tox_profile.bash # || 14$?
# sh mitogen.bash # sh mitogen.bash
# sh toxcore_docker.bash || exit 4$? # sh toxcore_docker.bash || exit 14$?
# which sdwdate >/dev/null 2>/dev/null || \ # which sdwdate >/dev/null 2>/dev/null || \
# [ -f $PREFIX/bin/sdwdate.bash ] || \ # [ -f $PREFIX/bin/sdwdate.bash ] || \
# sh sdwdate.bash # sh sdwdate.bash
sh gridfire.bash # || exit 6$? bash gridfire.bash # || exit 16$?
sh pyassuan.bash #|| exit 7$? bash pyassuan.bash #|| exit 17$?
sh tinfoilhat.shmoo.com.bash bash tinfoilhat.shmoo.com.bash
# sh negotiator.bash # sh negotiator.bash
bash kernelexpect.bash
# bash dracut-055.bash
bash toxygen_wrapper.bash
[ -d testssl.sh ] || \ [ -d testssl.sh ] || \
sh testssl.bash || exit 9$? sh testssl.bash || exit 19$?
exit 0 exit 0
@ -43,7 +48,7 @@ elif [ "$1" = 'check' ] ; then
elif [ "$1" = 'lint' ] ; then elif [ "$1" = 'lint' ] ; then
# ols_run_tests_shellcheck $ROLE || exit 2$? # ols_run_tests_shellcheck $ROLE || exit 2$?
msys_var_local_src_prog_key $1 || exit 21$? msys_var_local_src_prog_key $1 || exit 21$?
# ols_run_tests_pylint || exit 22$? msys_run_tests_pylint || exit 22$?
exit 0 exit 0
elif [ "$1" = 'test' ] ; then elif [ "$1" = 'test' ] ; then
@ -53,4 +58,8 @@ elif [ "$1" = 'test' ] ; then
msys_gentoo_test_imports || exit 32$? msys_gentoo_test_imports || exit 32$?
#hangs /usr/bin/expect gpgkey_test_gpg.exp foobar || exit 31$? #hangs /usr/bin/expect gpgkey_test_gpg.exp foobar || exit 31$?
elif [ "$1" = 'doctest' ] ; then # 8*
msys_var_local_src_prog_key $1 || exit 8$?
${DOCTEST} /usr/local/share/doc/txt/${ROLE}3.txt
fi fi

View File

@ -7,12 +7,13 @@
verbosity: 1 verbosity: 1
msg: "toxcore main.yml BOX_OS_FAMILY={{BOX_OS_FAMILY}} {{BOX_GENTOO_FROM_MP}} {{ansible_virtualization_role|replace('NA', 'host')}}" msg: "toxcore main.yml BOX_OS_FAMILY={{BOX_OS_FAMILY}} {{BOX_GENTOO_FROM_MP}} {{ansible_virtualization_role|replace('NA', 'host')}}"
- name: toxcore include_vars - name: include toxcore by-platform vars
include_vars: "{{item}}.yml" include_vars: "{{item}}.yml"
with_items: with_items:
- Linux - Linux
- "{{ ansible_distribution }}{{ ansible_distribution_major_version }}" - "{{ ansible_distribution }}{{ ansible_distribution_major_version }}"
tags: always tags: always
check_mode: false
- name: "rsync toxcore root_overlay" - name: "rsync toxcore root_overlay"
synchronize: synchronize:
@ -113,7 +114,7 @@
groups: "{{ item.1 }}" groups: "{{ item.1 }}"
when: when:
- item != '' - item != ''
- "len(toxcore_system_users) > 0" - "toxcore_system_users != []"
# some groups may not be there # some groups may not be there
ignore_errors: true ignore_errors: true
with_nested: with_nested:
@ -183,8 +184,9 @@
- name: install toxcore pips HOST - name: install toxcore pips HOST
environment: "{{ portage_proxy_env }}" environment: "{{ portage_proxy_env }}"
become_user: "{{ BOX_USER_NAME }}"
shell: | shell: |
sudo -u "{{ BOX_USER_NAME }}" \ # sudo -u "{{ BOX_USER_NAME }}"
pip3.sh install {{' '.join(toxcore_pips3_inst_host if ansible_virtualization_role|replace('NA', 'host') == 'host' else toxcore_pips3_inst_guest)}} pip3.sh install {{' '.join(toxcore_pips3_inst_host if ansible_virtualization_role|replace('NA', 'host') == 'host' else toxcore_pips3_inst_guest)}}
ignore_errors: "{{ BASE_PKG_IGNORE_ERRORS }}" ignore_errors: "{{ BASE_PKG_IGNORE_ERRORS }}"
when: when:
@ -222,6 +224,7 @@
loop_control: loop_control:
loop_var: LOOP_USER_F loop_var: LOOP_USER_F
# broken rc-update: service `qemu-quest-agent' does not exist
- name: enable and start toxcore services - name: enable and start toxcore services
service: service:
name: "{{ item }}" name: "{{ item }}"
@ -259,7 +262,7 @@
# this should not run as root # this should not run as root
- name: "usr_local_toxcore.bash" - name: "usr_local_toxcore.bash"
become_user: "{{ LOOP_USER_F[0] }}" become_user: "{{ BOX_USER_NAME }}"
environment: "{{ shell_proxy_env }}" environment: "{{ shell_proxy_env }}"
shell: | shell: |
umask 0002 umask 0002

View File

@ -35,8 +35,10 @@ toxcore_pkgs_inst:
- dev-vcs/git - dev-vcs/git
- gpg - gpg
- python3-yaml - python3-yaml
- xmlstarlet - app-text/xmlstarlet
- dev-python/pylint - dev-python/pylint
- dev-python/flake8
- dev-python/isort
# - app-portage/gentoolkit # - app-portage/gentoolkit
- sys-apps/gptfdisk - sys-apps/gptfdisk
- app-admin/testdisk - app-admin/testdisk

View File

@ -19,10 +19,14 @@ toxcore_kmods_not_in_host:
toxcore_gpg_keys_system: toxcore_gpg_keys_system:
- uid: "30737D12308C9D0C882FC34B57CB0A121BAECB2E" #
primary: "70ACBB6BFEE7BC572A8941D19266C4FA11FD00FD" - uid: "9BAD8B9BBD1CBDEDE3443292900F3C4971086004"
name: "Daniel Robbins (metro:node) <drobbins@funtoo.org>" primary: "9BAD8B9BBD1CBDEDE3443292900F3C4971086004"
key: "9266C4FA11FD00FD" name: "Harald Hoyer <harald@hoyer.xyz>"
key: "4BC0896FB5693595"
fingerprint: "9BAD 8B9B BD1C BDED E344 3292 900F 3C49 7108 6004"
uri: "https://harald.hoyer.xyz/files/gpg-key.txt"
html: https://harald.hoyer.xyz/impressum/
toxcore_services_started: toxcore_services_started:
- "{{ toxcore_libvirt_services_host if 'libvirt' in TOXCORE_FEATURES and ansible_virtualization_role|replace('NA', 'host') == 'host' else [] }}" - "{{ toxcore_libvirt_services_host if 'libvirt' in TOXCORE_FEATURES and ansible_virtualization_role|replace('NA', 'host') == 'host' else [] }}"

67
setup.cfg Normal file
View File

@ -0,0 +1,67 @@
[metadata]
classifiers =
License :: OSI Approved
License :: OSI Approved :: BSD 1-clause
Intended Audience :: Web Developers
Operating System :: Microsoft :: Windows
Operating System :: POSIX :: BSD :: FreeBSD
Operating System :: POSIX :: Linux
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: Implementation :: CPython
Framework :: AsyncIO
[options]
zip_safe = false
python_requires = ~=3.6
include_package_data = false
install_requires =
qasync
cryptography
rsa
stem
ruamel.yaml
package_dir=
=src
packages=find:
[options.packages.find]
where=src
[options.entry_points]
console_scripts =
phantompy = phantompy.__main__:iMain
exclude_badExits = exclude_badExits:iMain
[easy_install]
zip_ok = false
[flake8]
jobs = 1
max-line-length = 88
ignore =
E111
E114
E128
E225
E261
E302
E305
E402
E501
E502
E541
E701
E702
E704
E722
E741
F508
F541
W503
W601

102
stem_examples.txt Normal file
View File

@ -0,0 +1,102 @@
# -*-mode: doctest; tab-width: 0; py-indent-offset: 4; coding: utf-8-unix -*-
== stem_examples tor testing ==
This is a Python doctest file that is executable documentation.
Pass the controller password if needed as an environment variable:
>>> import os
>>> assert os.environ['TOR_CONTROLLER_PASSWORD']
Add our code to the PYTHONPATH
>>> import sys
>>> sys.path.append(os.path.join(os.getcwd(), 'src', 'stem_examples'))
We'll need the settings defined in {{{/usr/local/etc/testforge/testforge.yml}}}
>>> print("yaml", file=sys.stderr)
>>> import yaml
>>> sFacts = open('/usr/local/etc/testforge/testforge.yml').read()
>>> assert sFacts
>>> dFacts = yaml.safe_load(sFacts)
FixMe: use the settings for the ports and directories below.
>>> import os
>>> os.environ['http_proxy'] = 'http://'+dFacts['HTTP_PROXYHOST']+':'+str(dFacts['HTTP_PROXYPORT'])
>>> os.environ['https_proxy'] = 'http://'+dFacts['HTTPS_PROXYHOST']+':'+str(dFacts['HTTPS_PROXYPORT'])
>>> os.environ['socks_proxy'] = 'socks5://'+dFacts['SOCKS_PROXYHOST']+':'+str(dFacts['SOCKS_PROXYPORT'])
### compare_flags Comparing Directory Authority Flags
### introduction_points Introduction Points
>>> print("introduction_points", file=sys.stderr)
>>> import introduction_points
The introduction points are the ways of connecting to hidden services.
We test 3 known hidden services: Facebook, DuckDuckGo and .
>>> lKNOWN_ONIONS = [
... 'facebookwkhpilnemxj7asaniu7vnjjbiltxjqhye3mhbshg7kx5tfyd', # facebook
... 'duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad', # ddg
... 'zkaan2xfbuxia2wpf7ofnkbz6r5zdbbvxbunvp5g2iebopbfc4iqmbad', # hks
... ]
We wil expect to get back the hidden service version, the descriptor-lifetime
and then the descriptor-signing-key-cert:
>>> introduction_points.iMain(lKNOWN_ONIONS) #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
hs-descriptor 3
descriptor-lifetime ...
<BLANKLINE>
### exit_used Determine The Exit You're Using
Lets say you're using Tor and one day you run into something odd. Maybe a
misconfigured relay, or maybe one that's being malicious.
How can you figure out what exit you're using?
>>> print("exit_used", file=sys.stderr)
>>> import exit_used
## relay_connections Connection Summary
>>> print("relay_connections", file=sys.stderr)
>>> import relay_connections
The following provides a summary of your relay's inbound and outbound connections.
You must be root or tor to run this:
relay_connections.iMain(["--ctrlport", "9051"])
## outdated_relays
>>> print("outdated_relays", file=sys.stderr)
>>> import outdated_relays
>>> outdated_relays.iMain() #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Checking for outdated relays ...
<BLANKLINE>
## tor_bootstrap_check
>>> print("tor_bootstrap_check", file=sys.stderr)
>>> import tor_bootstrap_check
A script by adrelanos@riseup.net to check what percentage of boostrapping
tor is at. This fails under doctest but not from the cmdline
>> tor_bootstrap_check.iMain() #doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
NOTICE ...
<BLANKLINE>
control_port = stem.socket.ControlPort(address, port)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/stem/socket.py", line 503, in __init__
self.connect()
File "/usr/local/lib/python3.11/site-packages/stem/socket.py", line 172, in connect
self._socket = self._make_socket()
^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.11/site-packages/stem/socket.py", line 538, in _make_socket
raise stem.SocketError(exc)
stem.SocketError: Socket error: 0x01: General SOCKS server failure