pep8 isort

This commit is contained in:
emdee 2022-11-17 08:58:45 +00:00
parent d6200d6302
commit aac3793b35
3 changed files with 192 additions and 188 deletions

View File

@ -79,24 +79,21 @@ For usage, do ```python3 exclude_badExits.py --help`
""" """
import sys
import os
import re
import socket
import time
import argparse import argparse
import string import os
import sys
import time
from io import StringIO from io import StringIO
import ipaddress
# list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12'))
from urllib3.util.ssl_match_hostname import CertificateError
import stem import stem
import urllib3
from stem import InvalidRequest from stem import InvalidRequest
from stem.control import Controller
from stem.connection import IncorrectPassword from stem.connection import IncorrectPassword
from stem.util.tor_tools import is_valid_fingerprint from stem.util.tor_tools import is_valid_fingerprint
from urllib3.util.ssl_match_hostname import CertificateError
# list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12'))
try: try:
from ruamel.yaml import YAML from ruamel.yaml import YAML
yaml = YAML(typ='rt') yaml = YAML(typ='rt')
@ -112,20 +109,24 @@ if yaml is None:
yaml = None yaml = None
try: try:
from unbound import ub_ctx,RR_TYPE_TXT,RR_CLASS_IN from unbound import RR_CLASS_IN, RR_TYPE_TXT, ub_ctx
except: except:
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
global LOG global LOG
import logging import logging
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
LOG = logging.getLogger() LOG = logging.getLogger()
from support_onions import (bAreWeConnected, icheck_torrc, lIntroductionPoints,
oGetStemController, vwait_for_controller,
yKNOWN_NODNS, zResolveDomain)
from support_phantompy import vsetup_logging from support_phantompy import vsetup_logging
from trustor_poc import TrustorError, idns_validate
from trustor_poc import oDownloadUrlUrllib3 as oDownloadUrl from trustor_poc import oDownloadUrlUrllib3 as oDownloadUrl
from trustor_poc import idns_validate, TrustorError
from support_onions import icheck_torrc, bAreWeConnected, lIntroductionPoints, zResolveDomain, vwait_for_controller, yKNOWN_NODNS
LOG.info("imported HTTPSAdapter") LOG.info("imported HTTPSAdapter")
ETC_DIR = '/etc/tor/yaml' ETC_DIR = '/etc/tor/yaml'
@ -138,17 +139,6 @@ sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
sINCLUDE_EXIT_KEY = 'ExitNodes' sINCLUDE_EXIT_KEY = 'ExitNodes'
sINCLUDE_GUARD_KEY = 'EntryNodes' sINCLUDE_GUARD_KEY = 'EntryNodes'
def oMakeController(sSock='', port=9051):
import getpass
if sSock and os.path.exists(sSock):
controller = Controller.from_socket_file(path=sSock)
else:
controller = Controller.from_port(port=port)
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
return controller
oBAD_NODES = {} oBAD_NODES = {}
oBAD_ROOT = 'BadNodes' oBAD_ROOT = 'BadNodes'
oBAD_NODES[oBAD_ROOT] = {} oBAD_NODES[oBAD_ROOT] = {}
@ -163,8 +153,8 @@ def lYamlBadNodes(sFile,
global lKNOWN_NODNS global lKNOWN_NODNS
global lMAYBE_NODNS global lMAYBE_NODNS
l = [] if not yaml:
if not yaml: return l return []
if os.path.exists(sFile): if os.path.exists(sFile):
with open(sFile, 'rt') as oFd: with open(sFile, 'rt') as oFd:
oBAD_NODES = safe_load(oFd) oBAD_NODES = safe_load(oFd)
@ -188,7 +178,6 @@ oGOOD_NODES = {}
oGOOD_ROOT = 'GoodNodes' oGOOD_ROOT = 'GoodNodes'
def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'): def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
global oGOOD_NODES global oGOOD_NODES
root = oGOOD_ROOT
l = [] l = []
if not yaml: return l if not yaml: return l
if os.path.exists(sFile): if os.path.exists(sFile):
@ -225,13 +214,13 @@ lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050): def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050):
global tBAD_URLS global tBAD_URLS
global lKNOWN_NODNS global lKNOWN_NODNS
# cleanups for yaml # cleanups
for elt in lINTS: for elt in lINTS:
if elt in a: if elt in a:
a[elt] = int(a[elt]) a[elt] = int(a[elt])
for elt in lBOOLS: for elt in lBOOLS:
if elt in a: if elt in a:
if a[elt] in ['y','yes', 'true', 'True']: if a[elt] in ['y', 'yes', 'true', 'True']:
a[elt] = True a[elt] = True
else: else:
a[elt] = False a[elt] = False
@ -291,7 +280,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
if a['proof'] not in ['uri-rsa']: if a['proof'] not in ['uri-rsa']:
# only support uri for now # only support uri for now
if False and ub_ctx: if False and ub_ctx:
fp_domain = fp +'.'+domain fp_domain = fp + '.' + domain
if idns_validate(fp_domain, if idns_validate(fp_domain,
libunbound_resolv_file='resolv.conf', libunbound_resolv_file='resolv.conf',
dnssec_DS_file='dnssec-root-trust', dnssec_DS_file='dnssec-root-trust',
@ -301,7 +290,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
return a return a
LOG.debug(f"{len(keys)} contact fields for {fp}") LOG.debug(f"{len(keys)} contact fields for {fp}")
url="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt" url = f"https://{domain}/.well-known/tor-relay/rsa-fingerprint.txt"
try: try:
LOG.debug(f"Downloading from {domain} for {fp}") LOG.debug(f"Downloading from {domain} for {fp}")
o = oDownloadUrl(url, https_cafile, o = oDownloadUrl(url, https_cafile,
@ -319,7 +308,10 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
else: else:
LOG.warn(f"TrustorError downloading from {domain} {e.args}") LOG.warn(f"TrustorError downloading from {domain} {e.args}")
tBAD_URLS.add(a['url']) tBAD_URLS.add(a['url'])
except (BaseException ) as e: except urllib3.exceptions.MaxRetryError as e: # noqa
# maybe offline - not bad
LOG.warn(f"MaxRetryError downloading from {domain} {e}")
except (BaseException) as e:
LOG.error(f"Exception {type(e)} downloading from {domain} {e}") LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
else: else:
if hasattr(o, 'status'): if hasattr(o, 'status'):
@ -344,7 +336,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
if not l: if not l:
LOG.warn(f"Downloading from {domain} empty for {fp}") LOG.warn(f"Downloading from {domain} empty for {fp}")
else: else:
a['fps'] = [elt for elt in l if elt and len(elt) == 40 a['fps'] = [elt for elt in l if elt and len(elt) == 40 \
and not elt.startswith('#')] and not elt.startswith('#')]
LOG.info(f"Downloaded from {domain} {len(a['fps'])} FPs") LOG.info(f"Downloaded from {domain} {len(a['fps'])} FPs")
return a return a
@ -379,7 +371,7 @@ def aParseContact(contact, fp):
if ':' in line] if ':' in line]
LOG.debug(f"{fp} {len(l)} fields") LOG.debug(f"{fp} {len(l)} fields")
s = f'"{fp}":\n' s = f'"{fp}":\n'
s += '\n'.join([f" {line}\"".replace(':',': \"', 1) s += '\n'.join([f" {line}\"".replace(':', ': \"', 1)
for line in l]) for line in l])
oFd = StringIO(s) oFd = StringIO(s)
a = safe_load(oFd) a = safe_load(oFd)
@ -434,8 +426,7 @@ def oMainArgparser(_=None):
default=os.path.join(ETC_DIR, 'badcontacts.yaml'), default=os.path.join(ETC_DIR, 'badcontacts.yaml'),
help="Yaml file of bad contacts that bad FPs are using") help="Yaml file of bad contacts that bad FPs are using")
parser.add_argument('--strict_nodes', type=int, default=0, parser.add_argument('--strict_nodes', type=int, default=0, choices=[0, 1],
choices=[0,1],
help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable") help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable")
parser.add_argument('--wait_boot', type=int, default=120, parser.add_argument('--wait_boot', type=int, default=120,
help="Seconds to wait for Tor to booststrap") help="Seconds to wait for Tor to booststrap")
@ -456,29 +447,29 @@ def oMainArgparser(_=None):
help="Write the proof data of the included nodes to a YAML file") help="Write the proof data of the included nodes to a YAML file")
return parser return parser
def vwrite_badnodes(oArgs, oBAD_NODES, slen): def vwrite_badnodes(oargs, oBAD_NODES, slen):
if oArgs.bad_nodes: if oargs.bad_nodes:
tmp = oArgs.bad_nodes +'.tmp' tmp = oargs.bad_nodes +'.tmp'
bak = oArgs.bad_nodes +'.bak' bak = oargs.bad_nodes +'.bak'
with open(tmp, 'wt') as oFYaml: with open(tmp, 'wt') as oFYaml:
yaml.dump(oBAD_NODES, oFYaml) yaml.dump(oBAD_NODES, oFYaml)
LOG.info(f"Wrote {slen} to {oArgs.bad_nodes}") LOG.info(f"Wrote {slen} to {oargs.bad_nodes}")
oFYaml.close() oFYaml.close()
if os.path.exists(oArgs.bad_nodes): if os.path.exists(oargs.bad_nodes):
os.rename(oArgs.bad_nodes, bak) os.rename(oargs.bad_nodes, bak)
os.rename(tmp, oArgs.bad_nodes) os.rename(tmp, oargs.bad_nodes)
def vwrite_goodnodes(oArgs, oGOOD_NODES, ilen): def vwrite_goodnodes(oargs, oGOOD_NODES, ilen):
if oArgs.good_nodes: if oargs.good_nodes:
tmp = oArgs.good_nodes +'.tmp' tmp = oargs.good_nodes +'.tmp'
bak = oArgs.good_nodes +'.bak' bak = oargs.good_nodes +'.bak'
with open(tmp, 'wt') as oFYaml: with open(tmp, 'wt') as oFYaml:
yaml.dump(oGOOD_NODES, oFYaml) yaml.dump(oGOOD_NODES, oFYaml)
LOG.info(f"Wrote {ilen} good relays to {oArgs.good_nodes}") LOG.info(f"Wrote {ilen} good relays to {oargs.good_nodes}")
oFYaml.close() oFYaml.close()
if os.path.exists(oArgs.good_nodes): if os.path.exists(oargs.good_nodes):
os.rename(oArgs.good_nodes, bak) os.rename(oargs.good_nodes, bak)
os.rename(tmp, oArgs.good_nodes) os.rename(tmp, oargs.good_nodes)
def iMain(lArgs): def iMain(lArgs):
global aTRUST_DB global aTRUST_DB
@ -487,18 +478,18 @@ def iMain(lArgs):
global oGOOD_NODES global oGOOD_NODES
global lKNOWN_NODNS global lKNOWN_NODNS
parser = oMainArgparser() parser = oMainArgparser()
oArgs = parser.parse_args(lArgs) oargs = parser.parse_args(lArgs)
vsetup_logging(oArgs.log_level) vsetup_logging(oargs.log_level)
if bAreWeConnected() is False: if bAreWeConnected() is False:
raise SystemExit("we are not connected") raise SystemExit("we are not connected")
sFile = oArgs.torrc sFile = oargs.torrc
if sFile and os.path.exists(sFile): if sFile and os.path.exists(sFile):
icheck_torrc(sFile, oArgs) icheck_torrc(sFile, oargs)
twhitelist_set = set() twhitelist_set = set()
sFile = oArgs.good_contacts sFile = oargs.good_contacts
if sFile and os.path.exists(sFile): if sFile and os.path.exists(sFile):
try: try:
with open(sFile, 'rt') as oFd: with open(sFile, 'rt') as oFd:
@ -506,7 +497,7 @@ def iMain(lArgs):
LOG.info(f"{len(aTRUST_DB.keys())} trusted contacts from {sFile}") LOG.info(f"{len(aTRUST_DB.keys())} trusted contacts from {sFile}")
# reverse lookup of fps to contacts # reverse lookup of fps to contacts
# but... # but...
for k,v in aTRUST_DB.items(): for (k, v,) in aTRUST_DB.items():
if 'modified' not in v.keys(): if 'modified' not in v.keys():
v['modified'] = int(time.time()) v['modified'] = int(time.time())
aTRUST_DB_INDEX[k] = v aTRUST_DB_INDEX[k] = v
@ -520,19 +511,19 @@ def iMain(lArgs):
except Exception as e: except Exception as e:
LOG.exception(f"Error reading YAML TrustDB {sFile} {e}") LOG.exception(f"Error reading YAML TrustDB {sFile} {e}")
if os.path.exists(oArgs.proxy_ctl): if os.path.exists(oargs.proxy_ctl):
controller = oMakeController(sSock=oArgs.proxy_ctl) controller = oGetStemController(log_level=oargs.log_level, sock_or_pair=oargs.proxy_ctl)
else: else:
port =int(oArgs.proxy_ctl) port =int(oargs.proxy_ctl)
controller = oMakeController(port=port) controller = oGetStemController(port=port)
vwait_for_controller(controller, oArgs.wait_boot) vwait_for_controller(controller, oargs.wait_boot)
if oArgs.good_contacts: if oargs.good_contacts:
good_contacts_tmp = oArgs.good_contacts + '.tmp' good_contacts_tmp = oargs.good_contacts + '.tmp'
elt = controller.get_conf('UseMicrodescriptors') elt = controller.get_conf('UseMicrodescriptors')
if elt != '0' : if elt != '0':
LOG.error('"UseMicrodescriptors 0" is required in your /etc/tor/torrc. Exiting.') LOG.error('"UseMicrodescriptors 0" is required in your /etc/tor/torrc. Exiting.')
controller.set_conf('UseMicrodescriptors', 0) controller.set_conf('UseMicrodescriptors', 0)
# does it work dynamically? # does it work dynamically?
@ -542,8 +533,8 @@ def iMain(lArgs):
if elt and elt != '{??}': if elt and elt != '{??}':
LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already") LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already")
twhitelist_set.update(set(lYamlGoodNodes(oArgs.good_nodes))) twhitelist_set.update(set(lYamlGoodNodes(oargs.good_nodes)))
LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} GuardNodes from {oArgs.good_nodes}") LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} GuardNodes from {oargs.good_nodes}")
global oGOOD_NODES global oGOOD_NODES
t = set() t = set()
@ -558,21 +549,21 @@ def iMain(lArgs):
# Provides the descriptor for a hidden service. The **address** is the # Provides the descriptor for a hidden service. The **address** is the
# '.onion' address of the hidden service # '.onion' address of the hidden service
w = set(oGOOD_NODES[oGOOD_ROOT]['Onions']) w = set(oGOOD_NODES[oGOOD_ROOT]['Onions'])
if oArgs.white_onions: if oargs.white_onions:
w.update(oArgs.white_onions.split(',')) w.update(oargs.white_onions.split(','))
if oArgs.points_timeout > 0: if oargs.points_timeout > 0:
LOG.info(f"{len(w)} services will be checked from IntroductionPoints") LOG.info(f"{len(w)} services will be checked from IntroductionPoints")
t.update(lIntroductionPoints(controller, w, itimeout=oArgs.points_timeout)) t.update(lIntroductionPoints(controller, w, itimeout=oargs.points_timeout))
if len(t) > 0: if len(t) > 0:
LOG.info(f"IntroductionPoints {len(t)} relays from {len(w)} services") LOG.info(f"IntroductionPoints {len(t)} relays from {len(w)} services")
twhitelist_set.update(t) twhitelist_set.update(t)
texclude_set = set() texclude_set = set()
if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes): if oargs.bad_nodes and os.path.exists(oargs.bad_nodes):
if False and oArgs.bad_sections: if False and oargs.bad_sections:
# BROKEN # BROKEN
sections = oArgs.bad_sections.split(',') sections = oargs.bad_sections.split(',')
texclude_set = set(lYamlBadNodes(oArgs.bad_nodes, texclude_set = set(lYamlBadNodes(oargs.bad_nodes,
lWanted=sections, lWanted=sections,
section=sEXCLUDE_EXIT_KEY)) section=sEXCLUDE_EXIT_KEY))
LOG.info(f"Preloaded {len(texclude_set)} bad fps") LOG.info(f"Preloaded {len(texclude_set)} bad fps")
@ -583,7 +574,7 @@ def iMain(lArgs):
iTotalContacts = 0 iTotalContacts = 0
aBadContacts = {} aBadContacts = {}
lConds = oArgs.contact.split(',') lConds = oargs.contact.split(',')
iR = 0 iR = 0
relays = controller.get_server_descriptors() relays = controller.get_server_descriptors()
@ -612,17 +603,17 @@ def iMain(lArgs):
relay.contact = str(relay.contact, 'UTF-8') relay.contact = str(relay.contact, 'UTF-8')
if ('Empty' in lConds and not relay.contact) or \ if ('Empty' in lConds and not relay.contact) or \
('NoEmail' in lConds and relay.contact and not 'email:' in relay.contact): ('NoEmail' in lConds and relay.contact and 'email:' not in relay.contact):
texclude_set.add(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
if not relay.contact or not 'ciissversion:' in relay.contact: if not relay.contact or 'ciissversion:' not in relay.contact:
# should be unreached 'Empty' should always be in lConds # should be unreached 'Empty' should always be in lConds
continue continue
iTotalContacts += 1 iTotalContacts += 1
fp = relay.fingerprint fp = relay.fingerprint
if relay.contact and not 'url:' in relay.contact: if relay.contact and 'url:' not in relay.contact:
LOG.info(f"{fp} skipping bad contact - no url: {sofar}") LOG.info(f"{fp} skipping bad contact - no url: {sofar}")
LOG.debug(f"{fp} {relay.contact} {sofar}") LOG.debug(f"{fp} {relay.contact} {sofar}")
texclude_set.add(fp) texclude_set.add(fp)
@ -632,7 +623,7 @@ def iMain(lArgs):
# first rough cut # first rough cut
i = c.find('url:') i = c.find('url:')
if i >=0: if i >=0:
c = c[i+4:] c = c[i + 4:]
i = c.find(' ') i = c.find(' ')
if i >=0: c = c[:i] if i >=0: c = c[:i]
c = c.lstrip('https://').lstrip('http://').strip('/') c = c.lstrip('https://').lstrip('http://').strip('/')
@ -682,15 +673,14 @@ def iMain(lArgs):
texclude_set.add(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
b = aVerifyContact(list(a.values())[0], b = aVerifyContact(list(a.values())[0],
relay.fingerprint, relay.fingerprint,
oArgs.https_cafile, oargs.https_cafile,
timeout=oArgs.timeout, timeout=oargs.timeout,
host=oArgs.proxy_host, host=oargs.proxy_host,
port=oArgs.proxy_port) port=oargs.proxy_port)
# need to skip urllib3.exceptions.MaxRetryError
if not b or not 'fps' in b or not b['fps'] or not b['url']: if not b or 'fps' not in b or not b['fps'] or not b['url']:
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}") LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
LOG.debug(f"{relay.fingerprint} {b} {sofar}") LOG.debug(f"{relay.fingerprint} {b} {sofar}")
# If it's giving contact info that doesnt check out # If it's giving contact info that doesnt check out
@ -712,7 +702,7 @@ def iMain(lArgs):
aTRUST_DB[relay.fingerprint] = b aTRUST_DB[relay.fingerprint] = b
for elt in b['fps']: for elt in b['fps']:
aTRUST_DB_INDEX[elt] = b aTRUST_DB_INDEX[elt] = b
if oArgs.good_contacts and oArgs.log_level <= 20: if oargs.good_contacts and oargs.log_level <= 20:
# as we go along then clobber # as we go along then clobber
with open(good_contacts_tmp, 'wt') as oFYaml: with open(good_contacts_tmp, 'wt') as oFYaml:
yaml.dump(aTRUST_DB, oFYaml) yaml.dump(aTRUST_DB, oFYaml)
@ -724,37 +714,37 @@ def iMain(lArgs):
texclude_set = texclude_set.difference(tdns_urls) texclude_set = texclude_set.difference(tdns_urls)
LOG.info(f"{len(list(aTRUST_DB.keys()))} good contacts out of {iTotalContacts}") LOG.info(f"{len(list(aTRUST_DB.keys()))} good contacts out of {iTotalContacts}")
if oArgs.torrc_output and texclude_set: if oargs.torrc_output and texclude_set:
with open(oArgs.torrc_output, 'wt') as oFTorrc: with open(oargs.torrc_output, 'wt') as oFTorrc:
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n") oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n")
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n") oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n")
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])}\n") oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])}\n")
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}") LOG.info(f"Wrote tor configuration to {oargs.torrc_output}")
oFTorrc.close() oFTorrc.close()
if oArgs.bad_contacts and aBadContacts: if oargs.bad_contacts and aBadContacts:
# for later analysis # for later analysis
with open(oArgs.bad_contacts, 'wt') as oFYaml: with open(oargs.bad_contacts, 'wt') as oFYaml:
yaml.dump(aBadContacts, oFYaml) yaml.dump(aBadContacts, oFYaml)
oFYaml.close() oFYaml.close()
if oArgs.good_contacts != '' and aTRUST_DB: if oargs.good_contacts != '' and aTRUST_DB:
with open(good_contacts_tmp, 'wt') as oFYaml: with open(good_contacts_tmp, 'wt') as oFYaml:
yaml.dump(aTRUST_DB, oFYaml) yaml.dump(aTRUST_DB, oFYaml)
oFYaml.close() oFYaml.close()
if os.path.exists(oArgs.good_contacts): if os.path.exists(oargs.good_contacts):
bak = oArgs.good_contacts +'.bak' bak = oargs.good_contacts +'.bak'
os.rename(oArgs.good_contacts, bak) os.rename(oargs.good_contacts, bak)
os.rename(good_contacts_tmp, oArgs.good_contacts) os.rename(good_contacts_tmp, oargs.good_contacts)
LOG.info(f"Wrote {len(list(aTRUST_DB.keys()))} good contact details to {oArgs.good_contacts}") LOG.info(f"Wrote {len(list(aTRUST_DB.keys()))} good contact details to {oargs.good_contacts}")
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set) oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set)
oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set))) vwrite_badnodes(oargs, oBAD_NODES, str(len(texclude_set)))
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys()) oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys())
# GuardNodes are readonl # GuardNodes are readonl
vwrite_goodnodes(oArgs, oGOOD_NODES, len(aTRUST_DB_INDEX.keys())) vwrite_goodnodes(oargs, oGOOD_NODES, len(aTRUST_DB_INDEX.keys()))
retval = 0 retval = 0
try: try:
@ -764,7 +754,7 @@ def iMain(lArgs):
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(texclude_set)} net bad exit relays") LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(texclude_set)} net bad exit relays")
controller.set_conf(sEXCLUDE_EXIT_KEY, texclude_set) controller.set_conf(sEXCLUDE_EXIT_KEY, texclude_set)
except stem.SocketClosed as e: except stem.SocketClosed as e: # noqa
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit relays in Tor") LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit relays in Tor")
retval += 1 retval += 1
@ -772,7 +762,7 @@ def iMain(lArgs):
if aTRUST_DB_INDEX.keys(): if aTRUST_DB_INDEX.keys():
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(aTRUST_DB_INDEX.keys())} good relays") LOG.info(f"{sINCLUDE_EXIT_KEY} {len(aTRUST_DB_INDEX.keys())} good relays")
controller.set_conf(sINCLUDE_EXIT_KEY, aTRUST_DB_INDEX.keys()) controller.set_conf(sINCLUDE_EXIT_KEY, aTRUST_DB_INDEX.keys())
except stem.SocketClosed as e: except stem.SocketClosed as e: # noqa
LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1
@ -783,12 +773,12 @@ def iMain(lArgs):
controller.set_conf(sINCLUDE_GUARD_KEY, controller.set_conf(sINCLUDE_GUARD_KEY,
oGOOD_NODES[oGOOD_ROOT]['GuardNodes']) oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])
cur = controller.get_conf('StrictNodes') cur = controller.get_conf('StrictNodes')
if oArgs.strict_nodes and int(cur) != oArgs.strict_nodes: if oargs.strict_nodes and int(cur) != oargs.strict_nodes:
LOG.info(f"OVERRIDING StrictNodes to {oArgs.strict_nodes}") LOG.info(f"OVERRIDING StrictNodes to {oargs.strict_nodes}")
controller.set_conf('StrictNodes', oArgs.strict_nodes) controller.set_conf('StrictNodes', oargs.strict_nodes)
else: else:
LOG.info(f"StrictNodes is set to {cur}") LOG.info(f"StrictNodes is set to {cur}")
except stem.SocketClosed as e: except stem.SocketClosed as e: # noqa
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1

View File

@ -1,29 +1,29 @@
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*- # -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
import getpass
import os import os
import sys
import re import re
import traceback import select
import shutil import shutil
import socket import socket
import select import sys
import time import time
import getpass
if False: if False:
import cepa as stem import cepa as stem
from cepa.control import Controller
from cepa.connection import MissingPassword from cepa.connection import MissingPassword
from cepa.control import Controller
from cepa.util.tor_tools import is_valid_fingerprint from cepa.util.tor_tools import is_valid_fingerprint
else: else:
import stem import stem
from stem.control import Controller
from stem.connection import MissingPassword from stem.connection import MissingPassword
from stem.control import Controller
from stem.util.tor_tools import is_valid_fingerprint from stem.util.tor_tools import is_valid_fingerprint
global LOG global LOG
import logging import logging
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
LOG = logging.getLogger() LOG = logging.getLogger()
@ -69,13 +69,24 @@ yKNOWN_NODNS = """
- w.cccs.de - w.cccs.de
""" """
def oMakeController(sSock='', port=9051):
import getpass
if sSock and os.path.exists(sSock):
controller = Controller.from_socket_file(path=sSock)
else:
controller = Controller.from_port(port=port)
sys.stdout.flush()
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
controller.authenticate(p)
return controller
oSTEM_CONTROLER = None oSTEM_CONTROLER = None
def oGetStemController(log_level=10, sock_or_pair='/run/tor/control'): def oGetStemController(log_level=10, sock_or_pair='/run/tor/control'):
global oSTEM_CONTROLER global oSTEM_CONTROLER
if oSTEM_CONTROLER: return oSTEM_CONTROLER if oSTEM_CONTROLER: return oSTEM_CONTROLER
from stem.util.log import Runlevel import stem.util.log
Runlevel = log_level stem.util.log.Runlevel = log_level
if os.path.exists(sock_or_pair): if os.path.exists(sock_or_pair):
LOG.info(f"controller from socket {sock_or_pair}") LOG.info(f"controller from socket {sock_or_pair}")
@ -154,18 +165,21 @@ def lIntroductionPoints(controller=None, lOnions=[], itimeout=120, log_level=10)
try: try:
from cryptography.utils import int_from_bytes from cryptography.utils import int_from_bytes
except ImportError: except ImportError:
import cryptography.utils
# guessing - not in the current cryptography but stem expects it # guessing - not in the current cryptography but stem expects it
def int_from_bytes(**args): return int.to_bytes(*args) def int_from_bytes(**args): return int.to_bytes(*args)
cryptography.utils.int_from_bytes = int_from_bytes cryptography.utils.int_from_bytes = int_from_bytes
# this will fai if the trick above didnt work # this will fai if the trick above didnt work
from stem.prereq import is_crypto_available from stem.prereq import is_crypto_available
is_crypto_available(ed25519 = True) is_crypto_available(ed25519=True)
from stem.descriptor.hidden_service import HiddenServiceDescriptorV3
from stem.client.datatype import LinkByFingerprint
from stem import Timeout
from queue import Empty from queue import Empty
from stem import Timeout
from stem.client.datatype import LinkByFingerprint
from stem.descriptor.hidden_service import HiddenServiceDescriptorV3
if type(lOnions) not in [set, tuple, list]: if type(lOnions) not in [set, tuple, list]:
lOnions = list(lOnions) lOnions = list(lOnions)
if controller is None: if controller is None:
@ -200,7 +214,7 @@ def lIntroductionPoints(controller=None, lOnions=[], itimeout=120, log_level=10)
lp += [bin_to_hex(linkspecifier.value)] lp += [bin_to_hex(linkspecifier.value)]
LOG.info(f"{len(lp)} introduction points for {elt}") LOG.info(f"{len(lp)} introduction points for {elt}")
l += lp l += lp
except (Empty, Timeout, ) as e: except (Empty, Timeout,) as e: # noqa
LOG.warn(f"Timed out getting introduction points for {elt}") LOG.warn(f"Timed out getting introduction points for {elt}")
continue continue
except Exception as e: except Exception as e:
@ -210,13 +224,13 @@ def lIntroductionPoints(controller=None, lOnions=[], itimeout=120, log_level=10)
def zResolveDomain(domain): def zResolveDomain(domain):
try: try:
ip = sTorResolve(domain) ip = sTorResolve(domain)
except Exception as e: except Exception as e: # noqa
ip = '' ip = ''
if ip == '': if ip == '':
try: try:
lpair = getaddrinfo(domain, 443) lpair = getaddrinfo(domain, 443)
except Exception as e: except Exception as e:
LOG.warn("{e}") LOG.warn(f"{e}")
lpair = None lpair = None
if lpair is None: if lpair is None:
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}") LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
@ -233,13 +247,12 @@ def sTorResolve(target,
): ):
MAX_INFO_RESPONSE_PACKET_LENGTH = 8 MAX_INFO_RESPONSE_PACKET_LENGTH = 8
if '@' in target: if '@' in target:
LOG.warn(f"sTorResolve failed invalid hostname {target}" ) LOG.warn(f"sTorResolve failed invalid hostname {target}")
return '' return ''
target = target.strip('/') target = target.strip('/')
seb = b"\o004\o360\o000\o000\o000\o000\o000\o001\o000"
seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00" seb = b"\x04\xf0\x00\x00\x00\x00\x00\x01\x00"
seb += bytes(target, 'US-ASCII') + b"\x00" seb += bytes(target, 'US-ASCII') + b"\x00"
assert len(seb) == 10+len(target), str(len(seb))+repr(seb) assert len(seb) == 10 + len(target), str(len(seb)) + repr(seb)
# LOG.debug(f"0 Sending {len(seb)} to The TOR proxy {seb}") # LOG.debug(f"0 Sending {len(seb)} to The TOR proxy {seb}")
@ -247,7 +260,7 @@ def sTorResolve(target,
sock.connect((sHost, iPort)) sock.connect((sHost, iPort))
sock.settimeout(SOCK_TIMEOUT_SECONDS) sock.settimeout(SOCK_TIMEOUT_SECONDS)
oRet = sock.sendall(seb) oRet = sock.sendall(seb) # noqa
i = 0 i = 0
data = '' data = ''
@ -261,8 +274,7 @@ def sTorResolve(target,
flags=socket.MSG_WAITALL flags=socket.MSG_WAITALL
data = sock.recv(MAX_INFO_RESPONSE_PACKET_LENGTH, flags) data = sock.recv(MAX_INFO_RESPONSE_PACKET_LENGTH, flags)
except socket.timeout: except socket.timeout:
LOG.warn("4 The TOR proxy " \ LOG.warn(f"4 The TOR proxy {(sHost, iPort)}" \
+repr((sHost, iPort)) \
+" didnt reply in " + str(SOCK_TIMEOUT_SECONDS) + " sec." +" didnt reply in " + str(SOCK_TIMEOUT_SECONDS) + " sec."
+" #" +str(i)) +" #" +str(i))
except Exception as e: except Exception as e:
@ -271,7 +283,7 @@ def sTorResolve(target,
+" errored with " + str(e) +" errored with " + str(e)
+" #" +str(i)) +" #" +str(i))
sock.close() sock.close()
raise SystemExit(4) return ''
else: else:
if len(data) > 0: break if len(data) > 0: break
@ -280,9 +292,9 @@ def sTorResolve(target,
sLabel = "5 No reply #" sLabel = "5 No reply #"
else: else:
sLabel = "5 No data #" sLabel = "5 No data #"
LOG.info(sLabel +f"{i} on {sHost}:{iPort}" ) LOG.warn(f"sTorResolve: {sLabel} {i} on {sHost}:{iPort}")
sock.close() sock.close()
raise SystemExit(5) return ''
assert len(data) >= 8 assert len(data) >= 8
packet_sf = data[1] packet_sf = data[1]
@ -292,7 +304,7 @@ def sTorResolve(target,
return f"{data[4]}.{data[5]}.{data[6]}.{data[7]}" return f"{data[4]}.{data[5]}.{data[6]}.{data[7]}"
else: else:
# 91 # 91
LOG.warn(f"tor-resolve failed for {target} on {sHost}:{iPort}" ) LOG.warn(f"tor-resolve failed for {target} on {sHost}:{iPort}")
os.system(f"tor-resolve -4 {target} > /tmp/e 2>/dev/null") os.system(f"tor-resolve -4 {target} > /tmp/e 2>/dev/null")
# os.system("strace tor-resolve -4 "+target+" 2>&1|grep '^sen\|^rec'") # os.system("strace tor-resolve -4 "+target+" 2>&1|grep '^sen\|^rec'")
@ -321,8 +333,8 @@ def icheck_torrc(sFile, oArgs):
a = {} a = {}
for elt in l: for elt in l:
elt = elt.strip() elt = elt.strip()
if not elt or not ' ' in elt: continue if not elt or ' ' not in elt: continue
k,v = elt.split(' ', 1) (k, v,) = elt.split(' ', 1)
a[k] = v a[k] = v
keys = a keys = a

View File

@ -3,31 +3,32 @@
# from https://github.com/nusenu/trustor-poc # from https://github.com/nusenu/trustor-poc
# with minor refactoring to make the code more Pythonic. # with minor refactoring to make the code more Pythonic.
import os
import sys
import datetime import datetime
import os
import re
import sys
import requests import requests
from stem.control import Controller from stem.control import Controller
from stem.util.tor_tools import * # from stem.util.tor_tools import *
# from urllib.parse import urlparse
from urllib3.util import parse_url as urlparse from urllib3.util import parse_url as urlparse
try: try:
# unbound is not on pypi # unbound is not on pypi
from unbound import ub_ctx,RR_TYPE_TXT,RR_CLASS_IN from unbound import RR_CLASS_IN, RR_TYPE_TXT, ub_ctx
except: except:
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
global LOG global LOG
import logging import logging
import warnings import warnings
warnings.filterwarnings('ignore') warnings.filterwarnings('ignore')
LOG = logging.getLogger() LOG = logging.getLogger()
# download this python library from # download this python library from
# https://github.com/erans/torcontactinfoparser # https://github.com/erans/torcontactinfoparser
#sys.path.append('/home/....') # sys.path.append('/home/....')
try: try:
from torcontactinfo import TorContactInfoParser from torcontactinfo import TorContactInfoParser
except: except:
@ -42,7 +43,7 @@ def is_valid_hostname(hostname):
return False return False
if hostname[-1] == ".": if hostname[-1] == ".":
hostname = hostname[:-1] # strip exactly one dot from the right, if present hostname = hostname[:-1] # strip exactly one dot from the right, if present
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) allowed = re.compile("(?!-)[A-Z0-9-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split(".")) return all(allowed.match(x) for x in hostname.split("."))
def read_local_trust_config(trust_config): def read_local_trust_config(trust_config):
@ -127,7 +128,7 @@ def get_controller(address='127.0.0.1', port=9151, password=''):
''' '''
try: try:
#controller = Controller.from_socket_file(path=torsocketpath) # controller = Controller.from_socket_file(path=torsocketpath)
controller = Controller.from_port(address=address, port=port) controller = Controller.from_port(address=address, port=port)
controller.authenticate(password=password) controller.authenticate(password=password)
except Exception as e: except Exception as e:
@ -155,7 +156,7 @@ def find_validation_candidates(controller,
{ 'emeraldonion.org' : { 'uri-rsa': ['044600FD968728A6F220D5347AD897F421B757C0', '09DCA3360179C6C8A5A20DDDE1C54662965EF1BA']}} { 'emeraldonion.org' : { 'uri-rsa': ['044600FD968728A6F220D5347AD897F421B757C0', '09DCA3360179C6C8A5A20DDDE1C54662965EF1BA']}}
''' '''
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#proof # https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#proof
accepted_proof_types = ['uri-rsa','dns-rsa'] accepted_proof_types = ['uri-rsa', 'dns-rsa']
# https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#ciissversion # https://github.com/nusenu/ContactInfo-Information-Sharing-Specification#ciissversion
accepted_ciissversions = ['2'] accepted_ciissversions = ['2']
@ -186,15 +187,15 @@ def find_validation_candidates(controller,
if parsed_ci['ciissversion'] in accepted_ciissversions and prooftype in accepted_proof_types: if parsed_ci['ciissversion'] in accepted_ciissversions and prooftype in accepted_proof_types:
if ciurl.startswith('http://') or ciurl.startswith('https://'): if ciurl.startswith('http://') or ciurl.startswith('https://'):
try: try:
domain=urlparse(ciurl).netloc domain = urlparse(ciurl).netloc
except: except:
LOG.warning('failed to parse domain %s' % ciurl) LOG.warning('failed to parse domain %s' % ciurl)
domain='error' domain = 'error'
continue continue
else: else:
domain=ciurl domain = ciurl
if not is_valid_hostname(domain): if not is_valid_hostname(domain):
domain='error' domain = 'error'
continue continue
# we can ignore relays that do not claim to be operated by a trusted operator # we can ignore relays that do not claim to be operated by a trusted operator
# if we do not accept all # if we do not accept all
@ -204,19 +205,19 @@ def find_validation_candidates(controller,
if prooftype in result[domain].keys(): if prooftype in result[domain].keys():
result[domain][prooftype].append(fingerprint) result[domain][prooftype].append(fingerprint)
else: else:
result[domain] = { prooftype : [fingerprint] } result[domain] = {prooftype: [fingerprint]}
# mixed proof types are not allowd as per spec but we are not strict here # mixed proof types are not allowd as per spec but we are not strict here
LOG.warning('%s is using mixed prooftypes %s' % (domain, prooftype)) LOG.warning('%s is using mixed prooftypes %s' % (domain, prooftype))
else: else:
result[domain] = {prooftype : [fingerprint]} result[domain] = {prooftype: [fingerprint]}
return result return result
def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050): def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
# socks proxy used for outbound web requests (for validation of proofs) # socks proxy used for outbound web requests (for validation of proofs)
proxy = {'https': 'socks5h://' +host +':' +str(port)} proxy = {'https': "socks5h://{host}:{port}"}
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files # we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa # https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'} headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
LOG.debug("fetching %s...." % uri) LOG.debug("fetching %s...." % uri)
try: try:
@ -250,31 +251,32 @@ def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
if not oReqResp.headers['Content-Type'].startswith('text/plain'): if not oReqResp.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain") raise TrustorError(f"HTTP Content-Type != text/plain")
#check for redirects (not allowed as per spec) # check for redirects (not allowed as per spec)
if oReqResp.url != uri: if oReqResp.url != uri:
LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.url)) LOG.error(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.url)) raise TrustorError(f'Redirect detected {uri} vs %s (final)' % (oReqResp.url))
return oReqResp return oReqResp
logging.getLogger("urllib3").setLevel(logging.INFO) logging.getLogger("urllib3").setLevel(logging.INFO)
#import urllib3.contrib.pyopenssl # import urllib3.contrib.pyopenssl
#urllib3.contrib.pyopenssl.inject_into_urllib3() # urllib3.contrib.pyopenssl.inject_into_urllib3()
import ipaddress
import urllib3.util import urllib3.util
import ipaddress
def ballow_subdomain_matching(hostname, dnsnames): def ballow_subdomain_matching(hostname, dnsnames):
for elt in dnsnames: for elt in dnsnames:
if len(hostname.split('.')) > len(elt.split('.')) and \ if len(hostname.split('.')) > len(elt.split('.')) and hostname.endswith(elt):
hostname.endswith(elt):
# parent # parent
return True return True
return False return False
from urllib3.util.ssl_match_hostname import (CertificateError, from urllib3.util.ssl_match_hostname import (CertificateError, _dnsname_match,
match_hostname, _ipaddress_match)
_dnsname_match,
_ipaddress_match,
)
def my_match_hostname(cert, hostname): def my_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by """Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
@ -341,10 +343,10 @@ def my_match_hostname(cert, hostname):
raise CertificateError( raise CertificateError(
"no appropriate commonName or subjectAltName fields were found" "no appropriate commonName or subjectAltName fields were found"
) )
match_hostname = my_match_hostname urllib3.util.ssl_match_hostname.match_hostname = my_match_hostname
from urllib3.util.ssl_ import ( from urllib3.util.ssl_ import is_ipaddress
is_ipaddress,
)
def _my_match_hostname(cert, asserted_hostname): def _my_match_hostname(cert, asserted_hostname):
# Our upstream implementation of ssl.match_hostname() # Our upstream implementation of ssl.match_hostname()
# only applies this normalization to IP addresses so it doesn't # only applies this normalization to IP addresses so it doesn't
@ -364,11 +366,12 @@ def _my_match_hostname(cert, asserted_hostname):
# the cert when catching the exception, if they want to # the cert when catching the exception, if they want to
e._peer_cert = cert e._peer_cert = cert
raise raise
from urllib3.connection import _match_hostname, HTTPSConnection
urllib3.connection._match_hostname = _my_match_hostname urllib3.connection._match_hostname = _my_match_hostname
from urllib3.contrib.socks import SOCKSProxyManager from urllib3.contrib.socks import SOCKSProxyManager
from urllib3 import Retry
# from urllib3 import Retry
def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050): def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
"""Theres no need to use requests here and it """Theres no need to use requests here and it
adds too many layers on the SSL to be able to get at things adds too many layers on the SSL to be able to get at things
@ -384,7 +387,7 @@ def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
# we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files # we use this UA string when connecting to webservers to fetch rsa-fingerprint.txt proof files
# https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa # https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/#uri-rsa
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'} headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0'}
LOG.debug("fetching %s...." % uri) LOG.debug("fetching %s...." % uri)
try: try:
@ -419,7 +422,7 @@ def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
if not oReqResp.headers['Content-Type'].startswith('text/plain'): if not oReqResp.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain") raise TrustorError(f"HTTP Content-Type != text/plain")
#check for redirects (not allowed as per spec) # check for redirects (not allowed as per spec)
if oReqResp.geturl() != uri: if oReqResp.geturl() != uri:
LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl())) LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl())) raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
@ -427,10 +430,12 @@ def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
return oReqResp return oReqResp
import urllib3.connectionpool import urllib3.connectionpool
from urllib3.connection import HTTPSConnection
urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection
def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050): def lDownloadUrlFps(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
uri="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt" uri = f"https://{domain}/.well-known/tor-relay/rsa-fingerprint.txt"
o = oDownloadUrlRequests(uri, sCAfile, timeout=timeout, host=host, port=port) o = oDownloadUrlRequests(uri, sCAfile, timeout=timeout, host=host, port=port)
well_known_content = o.text.upper().strip().split('\n') well_known_content = o.text.upper().strip().split('\n')
well_known_content = [i for i in well_known_content if i and len(i) == 40] well_known_content = [i for i in well_known_content if i and len(i) == 40]
@ -460,7 +465,7 @@ def validate_proofs(candidates, validation_cache_file, timeout=20, host='127.0.0
LOG.error('%s:%s:%s' % (fingerprint, domain, prooftype)) LOG.error('%s:%s:%s' % (fingerprint, domain, prooftype))
elif prooftype == 'dns-rsa' and ub_ctx: elif prooftype == 'dns-rsa' and ub_ctx:
for fingerprint in candidates[domain][prooftype]: for fingerprint in candidates[domain][prooftype]:
fp_domain = fingerprint+'.'+domain fp_domain = fingerprint + '.' + domain
if idns_validate(fp_domain, if idns_validate(fp_domain,
libunbound_resolv_file='resolv.conf', libunbound_resolv_file='resolv.conf',
dnssec_DS_file='dnssec-root-trust', dnssec_DS_file='dnssec-root-trust',
@ -488,7 +493,6 @@ def idns_validate(domain,
# this is not the system wide /etc/resolv.conf # this is not the system wide /etc/resolv.conf
# use dnscrypt-proxy to encrypt your DNS and route it via tor's SOCKSPort # use dnscrypt-proxy to encrypt your DNS and route it via tor's SOCKSPort
ctx = ub_ctx() ctx = ub_ctx()
if (os.path.isfile(libunbound_resolv_file)): if (os.path.isfile(libunbound_resolv_file)):
ctx.resolvconf(libunbound_resolv_file) ctx.resolvconf(libunbound_resolv_file)
@ -526,12 +530,10 @@ def configure_tor(controller, trusted_fingerprints, exitonly=True):
try: try:
controller.set_conf('ExitNodes', trusted_fingerprints) controller.set_conf('ExitNodes', trusted_fingerprints)
LOG.error('limited exits to %s relays' % relay_count) LOG.error('limited exits to %s relays' % relay_count)
except Exception as e: except Exception as e: # noqa
LOG.exception('Failed to set ExitNodes tor config to trusted relays') LOG.exception('Failed to set ExitNodes tor config to trusted relays')
sys.exit(20) sys.exit(20)
if __name__ == '__main__': if __name__ == '__main__':
CAfile = '/etc/ssl/certs/ca-certificates.crt' CAfile = '/etc/ssl/certs/ca-certificates.crt'
trust_config = 'trust_config' trust_config = 'trust_config'
@ -542,12 +544,12 @@ if __name__ == '__main__':
trusted_fingerprints = read_local_validation_cache(validation_cache_file, trusted_fingerprints = read_local_validation_cache(validation_cache_file,
trusted_domains=trusted_domains) trusted_domains=trusted_domains)
# tor ControlPort password # tor ControlPort password
controller_password='' controller_password = ''
# tor ControlPort IP # tor ControlPort IP
controller_address = '127.0.0.1' controller_address = '127.0.0.1'
timeout = 20 timeout = 20
port = 9050 port = 9050
controller = get_controller(address=controller_address,password=controller_password) controller = get_controller(address=controller_address, password=controller_password)
r = find_validation_candidates(controller, r = find_validation_candidates(controller,
validation_cache=trusted_fingerprints, validation_cache=trusted_fingerprints,