Added lookupdns.py

This commit is contained in:
emdee 2022-11-14 11:59:33 +00:00
parent 2ea65cc181
commit 94c0834092
3 changed files with 97 additions and 86 deletions

View File

@ -160,7 +160,7 @@ def lYamlBadNodes(sFile,
global oBAD_NODES global oBAD_NODES
global lKNOWN_NODNS global lKNOWN_NODNS
global lMAYBE_NODNS global lMAYBE_NODNS
l = [] l = []
if not yaml: return l if not yaml: return l
if os.path.exists(sFile): if os.path.exists(sFile):
@ -198,7 +198,7 @@ def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
# yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml # yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml
return l return l
def bdomain_is_bad(domain): def bdomain_is_bad(domain, fp):
global lKNOWN_NODNS global lKNOWN_NODNS
if domain in lKNOWN_NODNS: return True if domain in lKNOWN_NODNS: return True
if domain in lMAYBE_NODNS: if domain in lMAYBE_NODNS:
@ -208,10 +208,11 @@ def bdomain_is_bad(domain):
lKNOWN_NODNS.append(domain) lKNOWN_NODNS.append(domain)
lMAYBE_NODNS.remove(domain) lMAYBE_NODNS.remove(domain)
return True return True
if '@' in domain: for elt in '@(){}$!':
LOG.warn(f"@ in domain {domain}") if elt in domain:
return True LOG.warn(f"{elt} in domain {domain}")
return True
return False return False
tBAD_URLS = set() tBAD_URLS = set()
@ -256,7 +257,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
if aCachedContact['email'] == a['email']: if aCachedContact['email'] == a['email']:
LOG.info(f"{fp} in aTRUST_DB_INDEX") LOG.info(f"{fp} in aTRUST_DB_INDEX")
return aCachedContact return aCachedContact
if 'url' not in keys: if 'url' not in keys:
if 'uri' not in keys: if 'uri' not in keys:
a['url'] = '' a['url'] = ''
@ -270,20 +271,21 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
c = a['url'].lstrip('https://').lstrip('http://').strip('/') c = a['url'].lstrip('https://').lstrip('http://').strip('/')
a['url'] = 'https://' +c a['url'] = 'https://' +c
# domain should be a unique key for contacts # domain should be a unique key for contacts
domain = a['url'][8:] domain = a['url'][8:]
if bdomain_is_bad(domain): if bdomain_is_bad(domain, fp):
LOG.warn(f"{domain} is bad from {a['url']}") LOG.warn(f"{domain} is bad from {a['url']}")
LOG.info(f"{domain} is bad from {a}") LOG.debug(f"{fp} is bad from {a}")
return a return a
ip = zResolveDomain(domain) ip = zResolveDomain(domain)
if ip == '': if ip == '':
aFP_EMAIL[fp] = a['email'] aFP_EMAIL[fp] = a['email']
LOG.debug(f"{fp} {domain} does not resolve") LOG.debug(f"{fp} {domain} does not resolve")
lKNOWN_NODNS.append(domain) lKNOWN_NODNS.append(domain)
return {} return {}
if a['proof'] not in ['uri-rsa']: if a['proof'] not in ['uri-rsa']:
# only support uri for now # only support uri for now
if False and ub_ctx: if False and ub_ctx:
@ -295,7 +297,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
pass pass
LOG.warn(f"{fp} proof={a['proof']} not supported yet") LOG.warn(f"{fp} proof={a['proof']} not supported yet")
return a return a
LOG.debug(f"{len(keys)} contact fields for {fp}") LOG.debug(f"{len(keys)} contact fields for {fp}")
url="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt" url="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt"
try: try:
@ -328,7 +330,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
# any reason retry? # any reason retry?
tBAD_URLS.add(a['url']) tBAD_URLS.add(a['url'])
return a return a
if hasattr(o, 'text'): if hasattr(o, 'text'):
data = o.text data = o.text
else: else:
@ -347,7 +349,7 @@ def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050)
def aParseContactYaml(contact, fp): def aParseContactYaml(contact, fp):
""" """
See the Tor ContactInfo Information Sharing Specification v2 See the Tor ContactInfo Information Sharing Specification v2
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/ https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/
""" """
lelts = contact.split() lelts = contact.split()
@ -357,7 +359,7 @@ def aParseContactYaml(contact, fp):
LOG.debug(f"{fp} {a}") LOG.debug(f"{fp} {a}")
return a return a
key = '' key = ''
for elt in lets: for elt in lelts:
if key == '': if key == '':
key = elt key = elt
continue continue
@ -368,7 +370,7 @@ def aParseContactYaml(contact, fp):
def aParseContact(contact, fp): def aParseContact(contact, fp):
""" """
See the Tor ContactInfo Information Sharing Specification v2 See the Tor ContactInfo Information Sharing Specification v2
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/ https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/
""" """
l = [line for line in contact.strip().replace('"', '').split(' ') l = [line for line in contact.strip().replace('"', '').split(' ')
@ -424,22 +426,22 @@ def vsetup_logging(log_level, logfile=''):
LOG.info(f"SSetting log_level to {log_level!s}") LOG.info(f"SSetting log_level to {log_level!s}")
logging._levelToName = { logging._levelToName = {
CRITICAL: 'CRITICAL', logging.CRITICAL: 'CRITICAL',
ERROR: 'ERROR', logging.ERROR: 'ERROR',
WARNING: 'WARN', logging.WARNING: 'WARN',
INFO: 'INFO', logging.INFO: 'INFO',
DEBUG: 'DEBUG', logging.DEBUG: 'DEBUG',
NOTSET: 'NOTSET', logging.NOTSET: 'NOTSET',
} }
logging._nameToLevel = { logging._nameToLevel = {
'CRITICAL': CRITICAL, 'CRITICAL': logging.CRITICAL,
'FATAL': FATAL, 'FATAL': logging.FATAL,
'ERROR': ERROR, 'ERROR': logging.ERROR,
'WARN': WARNING, 'WARN': logging.WARNING,
'WARNING': WARNING, 'WARNING': logging.WARNING,
'INFO': INFO, 'INFO': logging.INFO,
'DEBUG': DEBUG, 'DEBUG': logging.DEBUG,
'NOTSET': NOTSET, 'NOTSET': logging.NOTSET,
} }
def oMainArgparser(_=None): def oMainArgparser(_=None):
@ -490,7 +492,7 @@ def oMainArgparser(_=None):
parser.add_argument('--bad_contacts', type=str, parser.add_argument('--bad_contacts', type=str,
default=os.path.join(ETC_DIR, 'badcontacts.yaml'), default=os.path.join(ETC_DIR, 'badcontacts.yaml'),
help="Yaml file of bad contacts that bad FPs are using") help="Yaml file of bad contacts that bad FPs are using")
parser.add_argument('--strict_nodes', type=int, default=0, parser.add_argument('--strict_nodes', type=int, default=0,
choices=[0,1], choices=[0,1],
help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable") help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable")
@ -498,7 +500,7 @@ def oMainArgparser(_=None):
help="Seconds to wait for Tor to booststrap") help="Seconds to wait for Tor to booststrap")
parser.add_argument('--points_timeout', type=int, default=0, parser.add_argument('--points_timeout', type=int, default=0,
help="Timeout for getting introduction points - must be long >120sec. 0 means disabled looking for IPs") help="Timeout for getting introduction points - must be long >120sec. 0 means disabled looking for IPs")
parser.add_argument('--log_level', type=int, default=10, parser.add_argument('--log_level', type=int, default=20,
help="10=debug 20=info 30=warn 40=error") help="10=debug 20=info 30=warn 40=error")
parser.add_argument('--bad_sections', type=str, parser.add_argument('--bad_sections', type=str,
default='MyBadExit', default='MyBadExit',
@ -523,24 +525,24 @@ def vwrite_badnodes(oArgs, oBAD_NODES, slen):
if os.path.exists(oArgs.bad_nodes): if os.path.exists(oArgs.bad_nodes):
os.rename(oArgs.bad_nodes, bak) os.rename(oArgs.bad_nodes, bak)
os.rename(tmp, oArgs.bad_nodes) os.rename(tmp, oArgs.bad_nodes)
def vwrite_goodnodes(oArgs, oGOOD_NODES, slen): def vwrite_goodnodes(oArgs, oGOOD_NODES, ilen):
if oArgs.good_nodes: if oArgs.good_nodes:
tmp = oArgs.good_nodes +'.tmp' tmp = oArgs.good_nodes +'.tmp'
bak = oArgs.good_nodes +'.bak' bak = oArgs.good_nodes +'.bak'
with open(tmp, 'wt') as oFYaml: with open(tmp, 'wt') as oFYaml:
yaml.dump(oGOOD_NODES, indent=2, stream=oFYaml) yaml.dump(oGOOD_NODES, indent=2, stream=oFYaml)
LOG.info(f"Wrote {slen} good nodes to {oArgs.good_nodes}") LOG.info(f"Wrote {ilen} good relays to {oArgs.good_nodes}")
oFYaml.close() oFYaml.close()
if os.path.exists(oArgs.good_nodes): if os.path.exists(oArgs.good_nodes):
os.rename(oArgs.good_nodes, bak) os.rename(oArgs.good_nodes, bak)
os.rename(tmp, oArgs.good_nodes) os.rename(tmp, oArgs.good_nodes)
def iMain(lArgs): def iMain(lArgs):
global aTRUST_DB global aTRUST_DB
global aTRUST_DB_INDEX global aTRUST_DB_INDEX
global oBAD_NODES global oBAD_NODES
global oGOOD_NODES global oGOOD_NODES
global lKNOWN_NODNS global lKNOWN_NODNS
parser = oMainArgparser() parser = oMainArgparser()
oArgs = parser.parse_args(lArgs) oArgs = parser.parse_args(lArgs)
@ -573,7 +575,7 @@ def iMain(lArgs):
continue continue
aTRUST_DB_INDEX[fp] = v aTRUST_DB_INDEX[fp] = v
LOG.info(f"{len(aTRUST_DB_INDEX.keys())} good relays from {sFile}") LOG.info(f"{len(aTRUST_DB_INDEX.keys())} good relays from {sFile}")
except Exception as e: except Exception as e:
LOG.exception(f"Error reading YAML TrustDB {sFile} {e}") LOG.exception(f"Error reading YAML TrustDB {sFile} {e}")
@ -625,17 +627,17 @@ def iMain(lArgs):
texclude_set = set(lYamlBadNodes(oArgs.bad_nodes, texclude_set = set(lYamlBadNodes(oArgs.bad_nodes,
lWanted=sections, lWanted=sections,
section=sEXCLUDE_EXIT_KEY)) section=sEXCLUDE_EXIT_KEY))
LOG.info(f"Preloaded {len(texclude_set)} bad fps") LOG.info(f"Preloaded {len(texclude_set)} bad fps")
ttrust_db_index = aTRUST_DB_INDEX.keys() ttrust_db_index = aTRUST_DB_INDEX.keys()
tdns_contacts = set() tdns_urls = set()
iFakeContact = 0 iFakeContact = 0
iTotalContacts = 0 iTotalContacts = 0
aBadContacts = {} aBadContacts = {}
lConds = oArgs.contact.split(',') lConds = oArgs.contact.split(',')
iR = 0 iR = 0
relays = controller.get_server_descriptors() relays = controller.get_server_descriptors()
for relay in relays: for relay in relays:
iR += 1 iR += 1
@ -643,15 +645,15 @@ def iMain(lArgs):
LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint) LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint)
continue continue
relay.fingerprint = relay.fingerprint.upper() relay.fingerprint = relay.fingerprint.upper()
sofar = f"G:{len(aTRUST_DB.keys())} U:{len(tdns_contacts)} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} TC:{iTotalContacts} #{iR}" sofar = f"G:{len(aTRUST_DB.keys())} U:{len(tdns_urls)} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} TC:{iTotalContacts} #{iR}"
if not relay.exit_policy.is_exiting_allowed(): if not relay.exit_policy.is_exiting_allowed():
if sEXCLUDE_EXIT_KEY == 'ExcludeNodes': if sEXCLUDE_EXIT_KEY == 'ExcludeNodes':
pass # LOG.debug(f"{relay.fingerprint} not an exit {sofar}") pass # LOG.debug(f"{relay.fingerprint} not an exit {sofar}")
else: else:
pass # LOG.warn(f"{relay.fingerprint} not an exit {sofar}") pass # LOG.warn(f"{relay.fingerprint} not an exit {sofar}")
# continue # continue
# great contact had good fps and we are in them # great contact had good fps and we are in them
if relay.fingerprint in aTRUST_DB_INDEX.keys(): if relay.fingerprint in aTRUST_DB_INDEX.keys():
# a cached entry # a cached entry
@ -660,54 +662,55 @@ def iMain(lArgs):
if type(relay.contact) == bytes: if type(relay.contact) == bytes:
# dunno # dunno
relay.contact = str(relay.contact, 'UTF-8') relay.contact = str(relay.contact, 'UTF-8')
if ('Empty' in lConds and not relay.contact) or \ if ('Empty' in lConds and not relay.contact) or \
('NoEmail' in lConds and relay.contact and not 'email:' in relay.contact): ('NoEmail' in lConds and relay.contact and not 'email:' in relay.contact):
texclude_set.add(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
if not relay.contact or not 'ciissversion:' in relay.contact: if not relay.contact or not 'ciissversion:' in relay.contact:
# should be unreached 'Empty' should always be in lConds # should be unreached 'Empty' should always be in lConds
continue continue
iTotalContacts += 1 iTotalContacts += 1
fp = relay.fingerprint
if relay.contact and not 'url:' in relay.contact: if relay.contact and not 'url:' in relay.contact:
LOG.info(f"{relay.fingerprint} skipping bad contact - no url: {sofar}") LOG.info(f"{fp} skipping bad contact - no url: {sofar}")
LOG.debug(f"{relay.fingerprint} {relay.contact} {sofar}") LOG.debug(f"{fp} {relay.contact} {sofar}")
texclude_set.add(relay.fingerprint) texclude_set.add(fp)
continue continue
c = relay.contact.lower() c = relay.contact.lower()
# first rough cut # first rough cut
i = c.find('url:') i = c.find('url:')
if i >=0: if i >=0:
c = c[i+4:] c = c[i+4:]
i = c.find(' ') i = c.find(' ')
if i >=0: c = c[:i] if i >=0: c = c[:i]
c = c.lstrip('https://').lstrip('http://').strip('/') c = c.lstrip('https://').lstrip('http://').strip('/')
i = c.find('/') i = c.find('/')
if i >=0: c = c[:i] if i >=0: c = c[:i]
domain = c domain = c
if domain and bdomain_is_bad(domain): if domain and bdomain_is_bad(domain, fp):
LOG.info(f"{relay.fingerprint} skipping bad {domain} {sofar}") LOG.info(f"{fp} skipping bad {domain} {sofar}")
LOG.debug(f"{relay.fingerprint} {relay.contact} {sofar}") LOG.debug(f"{fp} {relay.contact} {sofar}")
texclude_set.add(relay.fingerprint) texclude_set.add(fp)
continue continue
if domain: if domain:
ip = zResolveDomain(domain) ip = zResolveDomain(domain)
if not ip: if not ip:
LOG.warn(f"{relay.fingerprint} {domain} did not resolve {sofar}") LOG.warn(f"{fp} {domain} did not resolve {sofar}")
texclude_set.add(relay.fingerprint) texclude_set.add(fp)
lKNOWN_NODNS.append(domain) lKNOWN_NODNS.append(domain)
iFakeContact += 1 iFakeContact += 1
continue continue
if 'dns-rsa' in relay.contact.lower(): if 'dns-rsa' in relay.contact.lower():
target = f"{relay.fingerprint}.{domain}" target = f"{relay.fingerprint}.{domain}"
LOG.info(f"skipping 'dns-rsa' {target} {sofar}") LOG.info(f"skipping 'dns-rsa' {target} {sofar}")
tdns_contacts.add(target) tdns_urls.add(target)
elif 'proof:uri-rsa' in relay.contact.lower(): elif 'proof:uri-rsa' in relay.contact.lower():
a = aParseContact(relay.contact, relay.fingerprint) a = aParseContact(relay.contact, relay.fingerprint)
if not a: if not a:
@ -730,7 +733,7 @@ def iMain(lArgs):
iFakeContact += 1 iFakeContact += 1
texclude_set.add(relay.fingerprint) texclude_set.add(relay.fingerprint)
continue continue
b = aVerifyContact(list(a.values())[0], b = aVerifyContact(list(a.values())[0],
relay.fingerprint, relay.fingerprint,
@ -738,7 +741,7 @@ def iMain(lArgs):
timeout=oArgs.timeout, timeout=oArgs.timeout,
host=oArgs.proxy_host, host=oArgs.proxy_host,
port=oArgs.proxy_port) port=oArgs.proxy_port)
if not b or not 'fps' in b or not b['fps'] or not b['url']: if not b or not 'fps' in b or not b['fps'] or not b['url']:
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}") LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
LOG.debug(f"{relay.fingerprint} {b} {sofar}") LOG.debug(f"{relay.fingerprint} {b} {sofar}")
@ -747,7 +750,7 @@ def iMain(lArgs):
texclude_set.add(relay.fingerprint) texclude_set.add(relay.fingerprint)
aBadContacts[relay.fingerprint] = b aBadContacts[relay.fingerprint] = b
continue continue
if relay.fingerprint not in b['fps']: if relay.fingerprint not in b['fps']:
LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}") LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}")
# assume a fp is using a bogus contact # assume a fp is using a bogus contact
@ -766,9 +769,11 @@ def iMain(lArgs):
with open(proof_output_tmp, 'wt') as oFYaml: with open(proof_output_tmp, 'wt') as oFYaml:
yaml.dump(aTRUST_DB, indent=2, stream=oFYaml) yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
oFYaml.close() oFYaml.close()
LOG.info(f"Filtered {len(twhitelist_set)} whitelisted relays") LOG.info(f"Filtered {len(twhitelist_set)} whitelisted relays")
texclude_set = texclude_set.difference(twhitelist_set) texclude_set = texclude_set.difference(twhitelist_set)
# accept the dns-rsa urls for now until we test them
texclude_set = texclude_set.difference(tdns_urls)
LOG.info(f"{len(list(aTRUST_DB.keys()))} good contacts out of {iTotalContacts}") LOG.info(f"{len(list(aTRUST_DB.keys()))} good contacts out of {iTotalContacts}")
if oArgs.proof_output and aTRUST_DB: if oArgs.proof_output and aTRUST_DB:
@ -785,7 +790,7 @@ def iMain(lArgs):
with open(oArgs.torrc_output, 'wt') as oFTorrc: with open(oArgs.torrc_output, 'wt') as oFTorrc:
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n") oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n")
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n") oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n")
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(o[oGOOD_ROOT]['GuardNodes'])}\n") oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])}\n")
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}") LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}")
oFTorrc.close() oFTorrc.close()
@ -798,10 +803,10 @@ def iMain(lArgs):
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set) oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set)
oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set))) vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set)))
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys()) oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys())
# GuardNodes are readonl # GuardNodes are readonl
vwrite_goodnodes(oArgs, oGOOD_NODES, str(len(ttrust_db_index))) vwrite_goodnodes(oArgs, oGOOD_NODES, len(aTRUST_DB_INDEX.keys()))
retval = 0 retval = 0
try: try:
logging.getLogger('stem').setLevel(30) logging.getLogger('stem').setLevel(30)
@ -838,7 +843,7 @@ def iMain(lArgs):
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor") LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
retval += 1 retval += 1
LOG.info("dns-rsa domains:\n{'\n'.join(tdns_contacts)}") sys.stdout.write("dns-rsa domains:\n" +'\n'.join(tdns_urls) +'\n')
return retval return retval
except InvalidRequest as e: except InvalidRequest as e:

View File

@ -27,9 +27,8 @@ LOG = logging.getLogger()
bHAVE_TORR = shutil.which('tor-resolve') bHAVE_TORR = shutil.which('tor-resolve')
# maybe we should check these each time but we # we check these each time but we got them by sorting bad relays
# got them by sorting bad relays in the wild # in the wild we'll keep a copy here so we can avoid restesting
# we'll keep a copy here
yKNOWN_NODNS = """ yKNOWN_NODNS = """
--- ---
- 0x0.is - 0x0.is
@ -50,6 +49,7 @@ yKNOWN_NODNS = """
- or.wowplanet.de - or.wowplanet.de
- ormycloud.org - ormycloud.org
- plied-privacy.net - plied-privacy.net
- rivacysvcs.net
- redacted.org - redacted.org
- rification-for-nusenu.net - rification-for-nusenu.net
- rofl.cat - rofl.cat

View File

@ -52,7 +52,7 @@ def read_local_trust_config(trust_config):
''' '''
result = [] result = []
# for now we support max_depth = 0 only # for now we support max_depth = 0 only
# this PoC version has no support for recursion # this PoC version has no support for recursion
# https://github.com/nusenu/tor-relay-operator-ids-trust-information#trust-information-consumers # https://github.com/nusenu/tor-relay-operator-ids-trust-information#trust-information-consumers
@ -140,7 +140,11 @@ def get_controller(address='127.0.0.1', port=9151, password=''):
return controller return controller
def find_validation_candidates(controller, trusted_domains=[],validation_cache=[],accept_all=False): def find_validation_candidates(controller,
trusted_domains=[],
validation_cache=[],
CAfile='/etc/ssl/certs/ca-certificates.crt',
accept_all=False):
''' '''
connect to a tor client via controlport and return a dict of all connect to a tor client via controlport and return a dict of all
not yet validated fingerprints per trusted operators not yet validated fingerprints per trusted operators
@ -221,14 +225,14 @@ def oDownloadUrlRequests(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
head = requests.head(uri, timeout=timeout, proxies=proxy, headers=headers) head = requests.head(uri, timeout=timeout, proxies=proxy, headers=headers)
except Exception as e: except Exception as e:
raise TrustorError(f"HTTP HEAD request failed for {uri} {e}") raise TrustorError(f"HTTP HEAD request failed for {uri} {e}")
if head.status_code >= 300: if head.status_code >= 300:
raise TrustorError(f"HTTP Errorcode {head.status_code}") raise TrustorError(f"HTTP Errorcode {head.status_code}")
if not head.headers['Content-Type'].startswith('text/plain'): if not head.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain") raise TrustorError(f"HTTP Content-Type != text/plain")
if not os.path.exists(sCAfile): if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}") raise TrustorError(f"File not found CAfile {sCAfile}")
try: try:
with requests.sessions.Session() as session: with requests.sessions.Session() as session:
oReqResp = session.request(method="get", url=uri, oReqResp = session.request(method="get", url=uri,
@ -336,7 +340,7 @@ def my_match_hostname(cert, hostname):
else: else:
raise CertificateError( raise CertificateError(
"no appropriate commonName or subjectAltName fields were found" "no appropriate commonName or subjectAltName fields were found"
) )
match_hostname = my_match_hostname match_hostname = my_match_hostname
from urllib3.util.ssl_ import ( from urllib3.util.ssl_ import (
is_ipaddress, is_ipaddress,
@ -393,15 +397,15 @@ def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
retries=False) retries=False)
except Exception as e: except Exception as e:
LOG.error(f"HTTP HEAD request failed for {uri} {e}") LOG.error(f"HTTP HEAD request failed for {uri} {e}")
raise raise
if head.status >= 300: if head.status >= 300:
raise TrustorError(f"HTTP Errorcode {head.status}") raise TrustorError(f"HTTP Errorcode {head.status}")
if not head.headers['Content-Type'].startswith('text/plain'): if not head.headers['Content-Type'].startswith('text/plain'):
raise TrustorError(f"HTTP Content-Type != text/plain") raise TrustorError(f"HTTP Content-Type != text/plain")
if not os.path.exists(sCAfile): if not os.path.exists(sCAfile):
raise TrustorError(f"File not found CAfile {sCAfile}") raise TrustorError(f"File not found CAfile {sCAfile}")
try: try:
oReqResp = proxy.request("GET", uri, oReqResp = proxy.request("GET", uri,
headers=headers, headers=headers,
@ -420,7 +424,7 @@ def oDownloadUrlUrllib3(uri, sCAfile, timeout=30, host='127.0.0.1', port=9050):
LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl())) LOG.error(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl())) raise TrustorError(f'Redirect detected %s vs %s (final)' % (uri, oReqResp.geturl()))
oReqResp.decode_content = True oReqResp.decode_content = True
return oReqResp return oReqResp
import urllib3.connectionpool import urllib3.connectionpool
urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection urllib3.connectionpool.VerifiedHTTPSConnection = HTTPSConnection
@ -483,7 +487,7 @@ def idns_validate(domain,
# this is not the system wide /etc/resolv.conf # this is not the system wide /etc/resolv.conf
# use dnscrypt-proxy to encrypt your DNS and route it via tor's SOCKSPort # use dnscrypt-proxy to encrypt your DNS and route it via tor's SOCKSPort
ctx = ub_ctx() ctx = ub_ctx()
if (os.path.isfile(libunbound_resolv_file)): if (os.path.isfile(libunbound_resolv_file)):
@ -529,6 +533,7 @@ def configure_tor(controller, trusted_fingerprints, exitonly=True):
if __name__ == '__main__': if __name__ == '__main__':
CAfile = '/etc/ssl/certs/ca-certificates.crt'
trust_config = 'trust_config' trust_config = 'trust_config'
assert os.path.exists(trust_config) assert os.path.exists(trust_config)
trusted_domains = read_local_trust_config(trust_config) trusted_domains = read_local_trust_config(trust_config)
@ -546,7 +551,8 @@ if __name__ == '__main__':
r = find_validation_candidates(controller, r = find_validation_candidates(controller,
validation_cache=trusted_fingerprints, validation_cache=trusted_fingerprints,
trusted_domains=trusted_domains) trusted_domains=trusted_domains,
CAfile=CAfile)
validate_proofs(r, validation_cache_file, validate_proofs(r, validation_cache_file,
timeout=timeout, timeout=timeout,
host=controller_address, host=controller_address,