This commit is contained in:
emdee 2022-11-13 04:37:30 +00:00
parent ae22d14437
commit 9b743bb101
3 changed files with 176 additions and 164 deletions

View File

@ -48,7 +48,7 @@ exclusion: the ```--contact``` commandline arg is a comma sep list of conditions
More may be added later. More may be added later.
Because you don't want to exclude the introduction points to any onion Because you don't want to exclude the introduction points to any onion
you want to connect to, ```--white_onions``` should whitelist the you want to connect to, ```--white_services``` should whitelist the
introduction points to a comma sep list of onions, but is introduction points to a comma sep list of onions, but is
currently broken in stem 1.8.0: see: currently broken in stem 1.8.0: see:
* https://github.com/torproject/stem/issues/96 * https://github.com/torproject/stem/issues/96
@ -87,8 +87,11 @@ import socket
import time import time
import argparse import argparse
from io import StringIO from io import StringIO
import ipaddr
# list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12'))
from urllib3.util.ssl_match_hostname import CertificateError from urllib3.util.ssl_match_hostname import CertificateError
import stem
from stem import InvalidRequest from stem import InvalidRequest
from stem.control import Controller from stem.control import Controller
from stem.connection import IncorrectPassword from stem.connection import IncorrectPassword
@ -111,7 +114,7 @@ except ImportError as e:
coloredlogs = False coloredlogs = False
from trustor_poc import oDownloadUrl, idns_validate, TrustorError from trustor_poc import oDownloadUrl, idns_validate, TrustorError
from support_onions import sTorResolve, getaddrinfo, icheck_torrc, bAreWeConnected from support_onions import icheck_torrc, bAreWeConnected, lIntroductionPoints, zResolveDomain, vwait_for_controller, yKNOWN_NODNS
global LOG global LOG
import logging import logging
@ -122,43 +125,13 @@ LOG = logging.getLogger()
ETC_DIR = '/etc/tor/yaml' ETC_DIR = '/etc/tor/yaml'
aTRUST_DB = {} aTRUST_DB = {}
aTRUST_DB_INDEX = {} aTRUST_DB_INDEX = {}
aFP_EMAIL = {}
sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/" sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/"
# You can call this while bootstrapping # You can call this while bootstrapping
sEXCLUDE_EXIT_KEY = 'ExcludeNodes' sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
sINCLUDE_EXIT_KEY = 'ExitNodes' sINCLUDE_EXIT_KEY = 'ExitNodes'
sINCLUDE_GUARD_KEY = 'EntryNodes' sINCLUDE_GUARD_KEY = 'EntryNodes'
# maybe we should check these each time but we
# got them by sorting bad relays in the wild
lKNOWN_NODNS = [
'0x0.is',
'a9.wtf',
'artikel5ev.de',
'arvanode.net',
'dodo.pm',
'dra-family.github.io',
'eraldonion.org',
'galtland.network',
'interfesse.net',
'kryptonit.org',
'lonet.sh',
'moneneis.de',
'nx42.de',
'ormycloud.org',
'plied-privacy.net',
'redacted.org',
'rification-for-nusenu.net',
'sv.ch',
'thingtohide.nl',
'tikel10.org',
'tor-exit-2.aa78i2efsewr0neeknk.xyz',
'tor-exit-3.aa78i2efsewr0neeknk.xyz',
'torix-relays.org',
'tse.com',
'tuxli.org',
'w.digidow.eu',
'www.quintex.com',
]
def oMakeController(sSock='', port=9051): def oMakeController(sSock='', port=9051):
import getpass import getpass
if sSock and os.path.exists(sSock): if sSock and os.path.exists(sSock):
@ -191,7 +164,7 @@ def lYamlBadNodes(sFile,
global lKNOWN_NODNS global lKNOWN_NODNS
root = 'ExcludeDomains' root = 'ExcludeDomains'
if root not in oBAD_NODES[oBAD_ROOT] or not oBAD_NODES[oBAD_ROOT][root]: if root not in oBAD_NODES[oBAD_ROOT] or not oBAD_NODES[oBAD_ROOT][root]:
oBAD_NODES[oBAD_ROOT][root] = lKNOWN_NODNS oBAD_NODES[oBAD_ROOT][root] = yaml.safe_load(StringIO(yKNOWN_NODNS))
else: else:
lKNOWN_NODNS = oBAD_NODES[oBAD_ROOT][root] lKNOWN_NODNS = oBAD_NODES[oBAD_ROOT][root]
return l return l
@ -208,28 +181,10 @@ def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
o = yaml.safe_load(oFd) o = yaml.safe_load(oFd)
oGOOD_NODES = o oGOOD_NODES = o
if 'GuardNodes' in o[oGOOD_ROOT].keys(): if 'GuardNodes' in o[oGOOD_ROOT].keys():
l += o[oGOOD_ROOT]['GuardNodes'] l = o[oGOOD_ROOT]['GuardNodes']
# yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml # yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml
return l return l
def lIntroductionPoints(controller, lOnions):
"""not working in stem 1.8.3"""
l = []
for elt in lOnions:
desc = controller.get_hidden_service_descriptor(elt, await_result=True, timeout=None)
l = desc.introduction_points()
if l:
LOG.warn(f"{elt} NO introduction points\n")
continue
LOG.info(f"{elt} introduction points are...\n")
for introduction_point in l:
LOG.info(' %s:%s => %s' % (introduction_point.address,
introduction_point.port,
introduction_point.identifier))
l += [introduction_point.address]
return l
tBAD_URLS = set() tBAD_URLS = set()
lATS = ['abuse', 'email'] lATS = ['abuse', 'email']
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory'] lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
@ -254,8 +209,16 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
a.update({'fps': []}) a.update({'fps': []})
keys = list(a.keys()) keys = list(a.keys())
if 'email' not in keys:
LOG.warn(f"{fp} 'email' not in {keys}")
a['email'] = ''
if 'ciissversion' not in keys:
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} 'ciissversion' not in {keys}")
a['ciissversion'] = 2
# test the url for fps and add it to the array # test the url for fps and add it to the array
if 'proof' not in keys: if 'proof' not in keys:
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} 'proof' not in {keys}") LOG.warn(f"{fp} 'proof' not in {keys}")
return a return a
@ -267,9 +230,11 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
if 'url' not in keys: if 'url' not in keys:
if 'uri' not in keys: if 'uri' not in keys:
a['url'] = '' a['url'] = ''
aFP_EMAIL[fp] = a['email']
LOG.warn(f"{fp} url and uri not in {keys}") LOG.warn(f"{fp} url and uri not in {keys}")
return a return a
a['url'] = a['uri'] a['url'] = a['uri']
aFP_EMAIL[fp] = a['email']
LOG.debug(f"{fp} 'uri' but not 'url' in {keys}") LOG.debug(f"{fp} 'uri' but not 'url' in {keys}")
# drop through # drop through
@ -280,24 +245,15 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
# domain should be a unique key for contacts # domain should be a unique key for contacts
domain = a['url'][8:].strip('/') domain = a['url'][8:].strip('/')
if domain in lKNOWN_NODNS: if lKNOWN_NODNS and domain in lKNOWN_NODNS:
LOG.warn(f"{domain} in lKNOWN_NODNS") LOG.warn(f"{domain} in lKNOWN_NODNS")
return {} return {}
try: ip = zResolveDomain(domain, lKNOWN_NODNS)
ip = sTorResolve(domain)
except Exception as e:
ip = ''
if ip == '': if ip == '':
try: aFP_EMAIL[fp] = a['email']
lpair = getaddrinfo(domain, 443) LOG.debug(f"{fp} {domain} does not resolve")
except Exception as e: lKNOWN_NODNS.append(domain)
LOG.warn("{e}") return {}
lpair = None
lKNOWN_NODNS.append(domain)
if lpair is None:
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
return a
ip = lpair[0]
if a['proof'] not in ['uri-rsa']: if a['proof'] not in ['uri-rsa']:
# only support uri for now # only support uri for now
@ -323,22 +279,26 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
LOG.warn(f"CertificateError downloading from {domain} {e}") LOG.warn(f"CertificateError downloading from {domain} {e}")
tBAD_URLS.add(a['url']) tBAD_URLS.add(a['url'])
except TrustorError as e: except TrustorError as e:
LOG.warn(f"TrustorError downloading from {domain} {e.args}") if e.args == "HTTP Errorcode 404":
aFP_EMAIL[fp] = a['email']
LOG.warn(f"TrustorError 404 from {domain} {e.args}")
else:
LOG.warn(f"TrustorError downloading from {domain} {e.args}")
tBAD_URLS.add(a['url']) tBAD_URLS.add(a['url'])
except (BaseException ) as e: except (BaseException ) as e:
LOG.error(f"Exception {type(e)} downloading from {domain} {e}") LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
else: else:
if o.status_code >= 300: if o.status_code >= 300:
LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}") aFP_EMAIL[fp] = a['email']
LOG.warn(f"Error from {domain} {o.status_code} {o.reason}")
# any reason retry? # any reason retry?
tBAD_URLS.add(a['url']) tBAD_URLS.add(a['url'])
return a return a
l = o.text.upper().strip().split('\n') l = o.text.upper().strip().split('\n')
a['modified'] = time.time()
if not l: if not l:
# already squacked in lD
LOG.warn(f"Downloading from {domain} empty for {fp}") LOG.warn(f"Downloading from {domain} empty for {fp}")
tBAD_URLS.add(a['url'])
else: else:
a['fps'] = [elt for elt in l if elt and len(elt) == 40 a['fps'] = [elt for elt in l if elt and len(elt) == 40
and not elt.startswith('#')] and not elt.startswith('#')]
@ -359,19 +319,6 @@ def aParseContact(contact, fp):
a = yaml.safe_load(oFd) a = yaml.safe_load(oFd)
return a return a
def vwait_for_controller(controller, wait_boot):
if bAreWeConnected() is False:
raise SystemExit("we are not connected")
percent = i = 0
# You can call this while boostrapping
while percent < 100 and i < wait_boot:
bootstrap_status = controller.get_info("status/bootstrap-phase")
progress_percent = re.match('.* PROGRESS=([0-9]+).*', bootstrap_status)
percent = int(progress_percent.group(1))
LOG.info(f"Bootstrapping {percent}%")
time.sleep(5)
i += 5
def vsetup_logging(log_level, logfile=''): def vsetup_logging(log_level, logfile=''):
global LOG global LOG
add = True add = True
@ -386,7 +333,7 @@ def vsetup_logging(log_level, logfile=''):
kwargs = dict(level=log_level, kwargs = dict(level=log_level,
force=True, force=True,
format='%(levelname)-4s %(message)s') format='%(levelname)s %(message)s')
if logfile: if logfile:
add = logfile.startswith('+') add = logfile.startswith('+')
@ -400,7 +347,7 @@ def vsetup_logging(log_level, logfile=''):
aKw = dict(level=log_level, aKw = dict(level=log_level,
logger=LOG, logger=LOG,
stream=sys.stdout if add else None, stream=sys.stdout if add else None,
fmt='%(levelname)-4s %(message)s' fmt='%(levelname)s %(message)s'
) )
coloredlogs.install(**aKw) coloredlogs.install(**aKw)
if logfile: if logfile:
@ -414,6 +361,24 @@ def vsetup_logging(log_level, logfile=''):
LOG.addHandler(oHandler) LOG.addHandler(oHandler)
LOG.info(f"SSetting log_level to {log_level!s}") LOG.info(f"SSetting log_level to {log_level!s}")
logging._levelToName = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARN',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET',
}
logging._nameToLevel = {
'CRITICAL': CRITICAL,
'FATAL': FATAL,
'ERROR': ERROR,
'WARN': WARNING,
'WARNING': WARNING,
'INFO': INFO,
'DEBUG': DEBUG,
'NOTSET': NOTSET,
}
def oMainArgparser(_=None): def oMainArgparser(_=None):
@ -463,6 +428,10 @@ def oMainArgparser(_=None):
parser.add_argument('--bad_contacts', type=str, parser.add_argument('--bad_contacts', type=str,
default=os.path.join(ETC_DIR, 'badcontacts.yaml'), default=os.path.join(ETC_DIR, 'badcontacts.yaml'),
help="Yaml file of bad contacts that bad FPs are using") help="Yaml file of bad contacts that bad FPs are using")
parser.add_argument('--strict_nodes', type=int, default=0,
choices=[0,1],
help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable")
parser.add_argument('--wait_boot', type=int, default=120, parser.add_argument('--wait_boot', type=int, default=120,
help="Seconds to wait for Tor to booststrap") help="Seconds to wait for Tor to booststrap")
parser.add_argument('--log_level', type=int, default=20, parser.add_argument('--log_level', type=int, default=20,
@ -470,12 +439,12 @@ def oMainArgparser(_=None):
parser.add_argument('--bad_sections', type=str, parser.add_argument('--bad_sections', type=str,
default='MyBadExit', default='MyBadExit',
help="sections of the badnodes.yaml to use, comma separated, '' BROKEN") help="sections of the badnodes.yaml to use, comma separated, '' BROKEN")
parser.add_argument('--white_onions', type=str, parser.add_argument('--white_services', type=str,
default='', default='',
help="comma sep. list of onions to whitelist their introduction points - BROKEN") help="comma sep. list of onions to whitelist their introduction points - BROKEN")
parser.add_argument('--torrc_output', type=str, default='', parser.add_argument('--torrc_output', type=str, default='',
help="Write the torrc configuration to a file") help="Write the torrc configuration to a file")
parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, '/proof.yaml'), parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, 'proof.yaml'),
help="Write the proof data of the included nodes to a YAML file") help="Write the proof data of the included nodes to a YAML file")
return parser return parser
@ -506,7 +475,8 @@ def vwrite_goodnodes(oArgs, oGOOD_NODES, slen):
def iMain(lArgs): def iMain(lArgs):
global aTRUST_DB global aTRUST_DB
global aTRUST_DB_INDEX global aTRUST_DB_INDEX
global oBAD_NODES
global oGOOD_NODES
global lKNOWN_NODNS global lKNOWN_NODNS
parser = oMainArgparser() parser = oMainArgparser()
oArgs = parser.parse_args(lArgs) oArgs = parser.parse_args(lArgs)
@ -519,18 +489,26 @@ def iMain(lArgs):
if sFile and os.path.exists(sFile): if sFile and os.path.exists(sFile):
icheck_torrc(sFile, oArgs) icheck_torrc(sFile, oArgs)
twhitelist_set = set()
sFile = oArgs.proof_output sFile = oArgs.proof_output
if sFile and os.path.exists(sFile): if sFile and os.path.exists(sFile):
try: try:
with open(sFile, 'rt') as oFd: with open(sFile, 'rt') as oFd:
aTRUST_DB = yaml.safe_load(oFd) aTRUST_DB = yaml.safe_load(oFd)
assert type(aTRUST_DB) == dict
LOG.info(f"{len(aTRUST_DB.keys())} trusted contacts from {sFile}")
# reverse lookup of fps to contacts # reverse lookup of fps to contacts
# but... # but...
for k,v in aTRUST_DB.items(): for k,v in aTRUST_DB.items():
if 'modified' not in v.keys():
v['modified'] = time.time()
aTRUST_DB_INDEX[k] = v aTRUST_DB_INDEX[k] = v
if 'fps' in aTRUST_DB[k].keys(): if 'fps' in aTRUST_DB[k].keys():
for fp in aTRUST_DB[k]['fps']: for fp in aTRUST_DB[k]['fps']:
if fp in aTRUST_DB_INDEX:
continue
aTRUST_DB_INDEX[fp] = v aTRUST_DB_INDEX[fp] = v
LOG.info(f"{len(aTRUST_DB_INDEX.keys())} good relays from {sFile}")
except Exception as e: except Exception as e:
LOG.exception(f"Error reading YAML TrustDB {sFile} {e}") LOG.exception(f"Error reading YAML TrustDB {sFile} {e}")
@ -557,32 +535,40 @@ def iMain(lArgs):
if elt and elt != '{??}': if elt and elt != '{??}':
LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already") LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already")
lGoodOverrideSet = lYamlGoodNodes(oArgs.good_nodes) twhitelist_set.update(set(lYamlGoodNodes(oArgs.good_nodes)))
LOG.info(f"lYamlGoodNodes {len(lGoodOverrideSet)} from {oArgs.good_nodes}") LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} GuardNodes from {oArgs.good_nodes}")
if oArgs.white_onions: global oGOOD_NODES
l = lIntroductionPoints(controller, oArgs.white_onions.split(',')) t = set()
lGoodOverrideSet += l if 'IntroductionPoints' in oGOOD_NODES[oGOOD_ROOT]['Relays'].keys():
t = set(oGOOD_NODES[oGOOD_ROOT]['Relays']['IntroductionPoints'])
# not working = maybe when stem is updated
w = set(oGOOD_NODES[oGOOD_ROOT]['Services'])
if oArgs.white_services:
w.update(oArgs.white_services.split(','))
t.update(lIntroductionPoints(controller, w))
if len(t) > 0:
LOG.info(f"IntroductionPoints {len(t)} nodes")
twhitelist_set.update(t)
exit_excludelist = [] texclude_set = set()
if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes): if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes):
if False and oArgs.bad_sections: if False and oArgs.bad_sections:
# BROKEN # BROKEN
sections = oArgs.bad_sections.split(',') sections = oArgs.bad_sections.split(',')
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes, lexclude_list = set(lYamlBadNodes(oArgs.bad_nodes,
lWanted=sections, lWanted=sections,
section=sEXCLUDE_EXIT_KEY) section=sEXCLUDE_EXIT_KEY))
else: else:
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes) texclude_set = set(lYamlBadNodes(oArgs.bad_nodes))
LOG.info(f"lYamlBadNodes {len(exit_excludelist)}") LOG.info(f"lYamlBadNodes {len(texclude_set)}")
tProofGoodFps = set() ttrust_db_index = aTRUST_DB_INDEX.keys()
iDnsContact = 0 iDnsContact = 0
lBadContactUrls = []
iFakeContact = 0 iFakeContact = 0
aBadContacts = {} aBadContacts = {}
aProofUri = {}
lConds = oArgs.contact.split(',') lConds = oArgs.contact.split(',')
iR = 0 iR = 0
@ -594,7 +580,7 @@ def iMain(lArgs):
continue continue
relay.fingerprint = relay.fingerprint.upper() relay.fingerprint = relay.fingerprint.upper()
sofar = f"G:{len(list(aProofUri.keys()))} U:{iDnsContact} F:{iFakeContact} BF:{len(exit_excludelist)} GF:{len(tProofGoodFps)} #{iR}" sofar = f"G:{len(aTRUST_DB.keys())} U:{iDnsContact} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} #{iR}"
if not relay.exit_policy.is_exiting_allowed(): if not relay.exit_policy.is_exiting_allowed():
if sEXCLUDE_EXIT_KEY == 'ExcludeNodes': if sEXCLUDE_EXIT_KEY == 'ExcludeNodes':
LOG.debug(f"{relay.fingerprint} not an exit {sofar}") LOG.debug(f"{relay.fingerprint} not an exit {sofar}")
@ -602,14 +588,9 @@ def iMain(lArgs):
LOG.warn(f"{relay.fingerprint} not an exit {sofar}") LOG.warn(f"{relay.fingerprint} not an exit {sofar}")
# continue # continue
if relay.fingerprint in tProofGoodFps: # great contact had good fps and we are in them
# we already have it. if relay.fingerprint in aTRUST_DB_INDEX.keys():
continue # a cached entry
if relay.fingerprint in aTRUST_DB:
if aTRUST_DB[relay.fingerprint]['fps'] and \
relay.fingerprint in aTRUST_DB[relay.fingerprint]['fps']:
tProofGoodFps.add(relay.fingerprint)
continue continue
if type(relay.contact) == bytes: if type(relay.contact) == bytes:
@ -618,45 +599,62 @@ def iMain(lArgs):
if ('Empty' in lConds and not relay.contact) or \ if ('Empty' in lConds and not relay.contact) or \
('NoEmail' in lConds and relay.contact and not '@' in relay.contact): ('NoEmail' in lConds and relay.contact and not '@' in relay.contact):
exit_excludelist.append(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
if not relay.contact: if not relay.contact or not 'ciissversion:' in relay.contact:
# should be unreached 'Empty' should always be in lConds # should be unreached 'Empty' should always be in lConds
continue continue
c = relay.contact.lower() c = relay.contact.lower()
# first rough cut
i = c.find('url:') i = c.find('url:')
if i >=0: c = c[i+4:] if i >=0: c = c[i+4:]
i = c.find(' ') i = c.find(' ')
if i >=0: c = c[:i] if i >=0: c = c[:i]
domain = c.replace('https://', '').replace('http://', '').strip('/') c = c.replace('https://', '').replace('http://', '').strip('/')
i = c.find('/')
if domain in lKNOWN_NODNS: if i >=0: c = c[:i]
domain = c
LOG.info(f"{relay.fingerprint} domain={domain}")
if domain and domain in lKNOWN_NODNS:
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {domain} {sofar}") LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {domain} {sofar}")
exit_excludelist.append(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue
elif 'dns-rsa' in relay.contact.lower(): if domain:
LOG.info(f"skipping 'dns-rsa' {relay.fingerprint}.{domain} {sofar}") ip = zResolveDomain(domain, lKNOWN_NODNS)
if not ip:
LOG.warn(f"{relay.fingerprint} {domain} did not resolve {sofar}")
texclude_set.add(relay.fingerprint)
iFakeContact += 1
continue
if 'dns-rsa' in relay.contact.lower():
target = f"{relay.fingerprint}.{domain}"
LOG.info(f"skipping 'dns-rsa' {target} {sofar}")
iDnsContact += 1 iDnsContact += 1
elif 'proof:uri-rsa' in relay.contact.lower(): elif 'proof:uri-rsa' in relay.contact.lower():
a = aParseContact(relay.contact, relay.fingerprint) a = aParseContact(relay.contact, relay.fingerprint)
if not a: if not a:
LOG.warn(f"{relay.fingerprint} did not parse {sofar}") LOG.warn(f"{relay.fingerprint} did not parse {sofar}")
exit_excludelist.append(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
if 'url' in a and a['url']: if 'url' in a and a['url']:
if a['url'] in tBAD_URLS: if a['url'] in tBAD_URLS:
# The fp is using a contact with a URL we know is bad # The fp is using a contact with a URL we know is bad
LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}") LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}")
exit_excludelist.append(relay.fingerprint) iFakeContact += 1
texclude_set.add(relay.fingerprint)
continue continue
domain = a['url'].replace('https://', '').replace('http://', '') domain = a['url'].replace('https://', '').replace('http://', '')
if domain in lKNOWN_NODNS: if domain in lKNOWN_NODNS:
# The fp is using a contact with a URL we know is bogus # The fp is using a contact with a URL we know is bogus
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}") LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}")
exit_excludelist.append(relay.fingerprint) iFakeContact += 1
texclude_set.add(relay.fingerprint)
continue continue
@ -671,49 +669,45 @@ def iMain(lArgs):
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}") LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
# If it's giving contact info that doesnt check out # If it's giving contact info that doesnt check out
# it could be a bad exit with fake contact info # it could be a bad exit with fake contact info
exit_excludelist.append(relay.fingerprint) texclude_set.add(relay.fingerprint)
aBadContacts[relay.fingerprint] = b aBadContacts[relay.fingerprint] = b
continue continue
if relay.fingerprint not in b['fps']: if relay.fingerprint not in b['fps']:
LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}") LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}")
# assume a fp is using a bogus contact # assume a fp is using a bogus contact
exit_excludelist.append(relay.fingerprint) texclude_set.add(relay.fingerprint)
iFakeContact += 1 iFakeContact += 1
aBadContacts[relay.fingerprint] = b aBadContacts[relay.fingerprint] = b
continue continue
# great contact had good fps and we are in them
tProofGoodFps.union(b['fps'])
if relay.fingerprint in aProofUri.keys():
# a cached entry
continue
LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}") LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}")
# add our contact info to the trustdb # add our contact info to the trustdb
aProofUri[relay.fingerprint] = b aTRUST_DB[relay.fingerprint] = b
for elt in b['fps']:
aTRUST_DB_INDEX[elt] = b
if oArgs.proof_output and oArgs.log_level <= 20: if oArgs.proof_output and oArgs.log_level <= 20:
# as we go along then clobber # as we go along then clobber
with open(proof_output_tmp, 'wt') as oFYaml: with open(proof_output_tmp, 'wt') as oFYaml:
yaml.dump(aProofUri, indent=2, stream=oFYaml) yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
oFYaml.close() oFYaml.close()
exit_excludelist = list(set(exit_excludelist).difference(set(lGoodOverrideSet))) texclude_set = texclude_set.difference(twhitelist_set)
if oArgs.proof_output and aProofUri: if oArgs.proof_output and aTRUST_DB:
with open(proof_output_tmp, 'wt') as oFYaml: with open(proof_output_tmp, 'wt') as oFYaml:
yaml.dump(aProofUri, indent=2, stream=oFYaml) yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
LOG.info(f"Wrote {len(list(aProofUri))} proof details to {oArgs.proof_output}") LOG.info(f"Wrote {len(list(aTRUST_DB.keys()))} good contact details to {oArgs.proof_output}")
oFYaml.close() oFYaml.close()
if os.path.exists(oArgs.proof_output): if os.path.exists(oArgs.proof_output):
bak = oArgs.proof_output +'.bak' bak = oArgs.proof_output +'.bak'
os.rename(oArgs.proof_output, bak) os.rename(oArgs.proof_output, bak)
os.rename(proof_output_tmp, oArgs.proof_output) os.rename(proof_output_tmp, oArgs.proof_output)
if oArgs.torrc_output and exit_excludelist: if oArgs.torrc_output and texclude_set:
with open(oArgs.torrc_output, 'wt') as oFTorrc: with open(oArgs.torrc_output, 'wt') as oFTorrc:
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(exit_excludelist)}\n") oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n")
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(tProofGoodFps)}\n") oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n")
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(o[oGOOD_ROOT]['GuardNodes'])}\n") oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(o[oGOOD_ROOT]['GuardNodes'])}\n")
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}") LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}")
oFTorrc.close() oFTorrc.close()
@ -724,40 +718,46 @@ def iMain(lArgs):
yaml.dump(aBadContacts, indent=2, stream=oFYaml) yaml.dump(aBadContacts, indent=2, stream=oFYaml)
oFYaml.close() oFYaml.close()
global oBAD_NODES oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set)
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = exit_excludelist
oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
vwrite_badnodes(oArgs, oBAD_NODES, str(len(exit_excludelist))) vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set)))
global oGOOD_NODES oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys())
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = tProofGoodFps # GuardNodes are readonl
vwrite_goodnodes(oArgs, oGOOD_NODES, str(len(tProofGoodFps))) vwrite_goodnodes(oArgs, oGOOD_NODES, str(len(ttrust_db_index)))
retval = 0 retval = 0
try: try:
logging.getLogger('stem').setLevel(30) logging.getLogger('stem').setLevel(30)
try: try:
if exit_excludelist: if texclude_set:
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(exit_excludelist)} net bad exit nodes") LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(texclude_set)} net bad exit relays")
controller.set_conf(sEXCLUDE_EXIT_KEY, exit_excludelist) controller.set_conf(sEXCLUDE_EXIT_KEY, texclude_set)
except stem.SocketClosed as e: except stem.SocketClosed as e:
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit nodes in Tor") LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit relays in Tor")
retval += 1 retval += 1
try: try:
if tProofGoodFps: if aTRUST_DB_INDEX.keys():
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(tProofGoodFps)} good nodes") LOG.info(f"{sINCLUDE_EXIT_KEY} {len(aTRUST_DB_INDEX.keys())} good relays")
controller.set_conf(sINCLUDE_EXIT_KEY, tProofGoodFps) controller.set_conf(sINCLUDE_EXIT_KEY, aTRUST_DB_INDEX.keys())
except stem.SocketClosed as e: except stem.SocketClosed as e:
LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1
try: try:
o = oGOOD_NODES if 'GuardNodes' in oGOOD_NODES[oGOOD_ROOT].keys():
if 'GuardNodes' in o[oGOOD_ROOT].keys(): LOG.info(f"{sINCLUDE_GUARD_KEY} {len(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])} guard nodes")
LOG.info(f"{sINCLUDE_GUARD_KEY} {len(o[oGOOD_ROOT]['GuardNodes'])} guard nodes") # FixMe for now override StrictNodes it may be unusable otherwise
controller.set_conf(sINCLUDE_GUARD_KEY, o[oGOOD_ROOT]['GuardNodes']) controller.set_conf(sINCLUDE_GUARD_KEY,
oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])
cur = controller.get_conf('StrictNodes')
if oArgs.strict_nodes and int(cur) != oArgs.strict_nodes:
LOG.info(f"OVERRIDING StrictNodes to {oArgs.strict_nodes}")
controller.set_conf('StrictNodes', oArgs.strict_nodes)
else:
LOG.info(f"StrictNodes is set to {cur}")
except stem.SocketClosed as e: except stem.SocketClosed as e:
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1

View File

@ -9,9 +9,11 @@ from requests.utils import (
select_proxy, select_proxy,
urldefragauth, urldefragauth,
) )
import urllib3
from urllib3.util import parse_url from urllib3.util import parse_url
from urllib3.util.retry import Retry from urllib3.util.retry import Retry
from urllib3.util import Timeout as TimeoutSauce from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.ssl_match_hostname import match_hostname as match_hostname
DEFAULT_POOLBLOCK = False DEFAULT_POOLBLOCK = False
DEFAULT_POOLSIZE = 10 DEFAULT_POOLSIZE = 10
@ -262,8 +264,15 @@ class HTTPSAdapter(HTTPAdapter):
return self.build_response(request, resp) return self.build_response(request, resp)
from urllib3.util.ssl_match_hostname import match_hostname as _match_hostname def ballow_subdomain_matching(hostname, dnsnames):
def match_hostname(cert, hostname): for elt in dnsnames:
if len(split(hostname, '.')) > len(split(elt, '.')) and \
hostname.endswith(elt):
# parent
return True
return False
def my_match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by """Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*. rules are followed, but IP addresses are not accepted for *hostname*.
@ -316,6 +325,8 @@ def match_hostname(cert, hostname):
dnsnames.append(value) dnsnames.append(value)
if len(dnsnames) > 1: if len(dnsnames) > 1:
# soften this to allow subdomain matching # soften this to allow subdomain matching
if ballow_subdomain_matching(hostname, dnsnames):
return
raise CertificateError( raise CertificateError(
"hostname %r " "hostname %r "
"doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames))) "doesn't match any of %s" % (hostname, ", ".join(map(repr, dnsnames)))
@ -327,5 +338,4 @@ def match_hostname(cert, hostname):
"no appropriate commonName or subjectAltName fields were found" "no appropriate commonName or subjectAltName fields were found"
) )
urllib3.util.ssl_match_hostname = match_hostname urllib3.util.ssl_match_hostname.match_hostnaem = my_match_hostname

View File

@ -33,7 +33,6 @@ try:
except: except:
TorContactInfoParser = None TorContactInfoParser = None
class TrustorError(Exception): pass class TrustorError(Exception): pass
# https://stackoverflow.com/questions/2532053/validate-a-hostname-string # https://stackoverflow.com/questions/2532053/validate-a-hostname-string
@ -235,6 +234,9 @@ def oDownloadUrl(domain, sCAfile, timeout=30, host='127.0.0.1', port=9050):
except Exception as e: except Exception as e:
LOG.warn(f"Could not import HTTPSAdapter {e}") LOG.warn(f"Could not import HTTPSAdapter {e}")
HTTPSAdapter = None HTTPSAdapter = None
raise SystemExit(f"{e}")
else:
LOG.info(f"Loaded HTTPSAdapter")
try: try:
with requests.sessions.Session() as session: with requests.sessions.Session() as session: