#!/usr/local/bin/python3.sh # -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*- # https://github.com/nusenu/noContactInfo_Exit_Excluder # https://github.com/TheSmashy/TorExitRelayExclude """ This extends nusenu's basic idea of using the stem library to dynamically exclude nodes that are likely to be bad by putting them on the ExcludeNodes or ExcludeExitNodes setting of a running Tor. * https://github.com/nusenu/noContactInfo_Exit_Excluder * https://github.com/TheSmashy/TorExitRelayExclude The basic idea is to exclude Exit nodes that do not have ContactInfo: * https://github.com/nusenu/ContactInfo-Information-Sharing-Specification That can be extended to relays that do not have an email in the contact, or to relays that do not have ContactInfo that is verified to include them. """ __prolog__ = __doc__ sGOOD_NODES = """ --- GoodNodes: EntryNodes: [] Relays: # ExitNodes will be overwritten by this program ExitNodes: [] IntroductionPoints: [] # use the Onions section to list onion services you want the # Introduction Points whitelisted - these points may change daily # Look in tor's notice.log for 'Every introduction point for service' Onions: [] # use the Services list to list elays you want the whitelisted # Look in tor's notice.log for 'Wanted to contact directory mirror' Services: [] """ sBAD_NODES = """ BadNodes: # list the internet domains you know are bad so you don't # waste time trying to download contacts from them. ExcludeDomains: [] ExcludeNodes: # BadExit will be overwritten by this program BadExit: [] # list MyBadExit in --bad_sections if you want it used, to exclude nodes # or any others as a list separated by comma(,) MyBadExit: [] """ __doc__ +=f"""But there's a problem, and your Tor notice.log will tell you about it: you could exclude the relays needed to access hidden services or mirror directories. So we need to add to the process the concept of a whitelist. In addition, we may have our own blacklist of nodes we want to exclude, or use these lists for other applications like selektor. So we make two files that are structured in YAML: ``` /etc/tor/yaml/torrc-goodnodes.yaml {sGOOD_NODES} By default all sections of the goodnodes.yaml are used as a whitelist. Use the GoodNodes/Onions list to list onion services you want the Introduction Points whitelisted - these points may change daily Look in tor's notice.log for warnings of 'Every introduction point for service' ```--hs_dir``` ```default='/var/lib/tor'``` will make the program parse the files named ```hostname``` below this dir to find Hidden Services to whitelist. The Introduction Points can change during the day, so you may want to rerun this program to freshen the list of Introduction Points. A full run that processes all the relays from stem can take 30 minutes, or run with: ```--saved_only``` will run the program with just cached information on the relats, but will update the Introduction Points from the Services. /etc/tor/yaml/torrc-badnodes.yaml {sBAD_NODES} ``` That part requires [PyYAML](https://pyyaml.org/wiki/PyYAML) https://github.com/yaml/pyyaml/ or ```ruamel```: do ```pip3 install ruamel``` or ```pip3 install PyYAML```; the advantage of the former is that it preserves comments. (You may have to run this as the Tor user to get RW access to /run/tor/control, in which case the directory for the YAML files must be group Tor writeable, and its parent's directories group Tor RX.) Because you don't want to exclude the introduction points to any onion you want to connect to, ```--white_onions``` should whitelist the introduction points to a comma sep list of onions; we fixed stem to do this: * https://github.com/torproject/stem/issues/96 * https://gitlab.torproject.org/legacy/trac/-/issues/25417 Use the GoodNodes/Onions list in goodnodes.yaml to list onion services you want the Introduction Points whitelisted - these points may change daily. Look in tor's notice.log for 'Every introduction point for service' ```notice_log``` will parse the notice log for warnings about relays and services that will then be whitelisted. ```--torrc``` will read a file like /etc/tor/torrc and make some suggestions based on what it finds; it will not edit or change the file. ```--torrc_output``` will write the torrc ExcludeNodes configuration to a file. ```--good_contacts``` will write the contact info as a ciiss dictionary to a YAML file. If the proof is uri-rsa, the well-known file of fingerprints is downloaded and the fingerprints are added on a 'fps' field we create of that fingerprint's entry of the YAML dictionary. This file is read at the beginning of the program to start with a trust database, and only new contact info from new relays are added to the dictionary. Now for the final part: we lookup the Contact info of every relay that is currently in our Tor, and check it the existence of the well-known file that lists the fingerprints of the relays it runs. If it fails to provide the well-know url, we assume its a bad relay and add it to a list of nodes that goes on ```ExcludeNodes``` (not just ExcludeExitNodes```). If the Contact info is good, we add the list of fingerprints to ```ExitNodes```, a whitelist of relays to use as exits. ```--bad_on``` We offer the users 3 levels of cleaning: 1. clean relays that have no contact ```=Empty``` 2. clean relays that don't have an email in the contact (implies 1) ```=Empty,NoEmail``` 3. clean relays that don't have "good' contactinfo. (implies 1) ```=Empty,NoEmail,NotGood``` The default is ```Empty,NoEmail,NotGood``` ; ```NoEmail``` is inherently imperfect in that many of the contact-as-an-email are obfuscated, but we try anyway. To be "good" the ContactInfo must: 1. have a url for the well-defined-file to be gotten 2. must have a file that can be gotten at the URL 3. must support getting the file with a valid SSL cert from a recognized authority 4. (not in the spec but added by Python) must use a TLS SSL > v1 5. must have a fingerprint list in the file 6. must have the FP that got us the contactinfo in the fingerprint list in the file. ```--wait_boot``` is the number of seconds to wait for Tor to booststrap ```--wellknown_output``` will make the program write the well-known files (```/.well-known/tor-relay/rsa-fingerprint.txt```) to a directory. ```--relays_output write the download relays in json to a file. The relays are downloaded from https://onionoo.torproject.org/details For usage, do ```python3 exclude_badExits.py --help` See [exclude_badExits.txt](./exclude_badExits.txt) """ # https://github.com/nusenu/trustor-example-trust-config/blob/main/trust_config # https://github.com/nusenu/tor-relay-operator-ids-trust-information import os import json import sys import tempfile import time from io import StringIO import logging import warnings import stem from stem import InvalidRequest from stem.connection import IncorrectPassword from stem.util.tor_tools import is_valid_fingerprint import urllib3 from urllib3.util.ssl_match_hostname import CertificateError # list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12')) try: from ruamel.yaml import YAML yaml = YAML(typ='rt') yaml.indent(mapping=2, sequence=2) safe_load = yaml.load except: yaml = None if yaml is None: try: import yaml safe_load = yaml.safe_load except: yaml = None try: from unbound import RR_CLASS_IN, RR_TYPE_TXT, ub_ctx except: ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None from toxygen_wrapper.tests.support_onions import ( bAreWeConnected, icheck_torrc, lIntroductionPoints, yKNOWN_NODNS, zResolveDomain) from exclude_badExits.trustor_poc import TrustorError, idns_validate if False: import httpx import asyncio from exclude_badExits.trustor_poc import oDownloadUrlHttpx else: httpx = None from exclude_badExits.trustor_poc import oDownloadUrlUrllib3Socks as oDownloadUrl from exclude_badExits.torcontactinfo import TorContactInfoParser from exclude_badExits.exclude_utils import ( aCleanContact, sCleanEmail, aParseContact, oStemController, oMainArgparser, vwrite_goodnodes, vwrite_badnodes, vwrite_good_contacts, vwritefinale, vsetup_logging ) warnings.filterwarnings('ignore') global LOG LOG = logging.getLogger() oPARSER = TorContactInfoParser() aGOOD_CONTACTS_DB = {} aGOOD_CONTACTS_FPS = {} aBAD_CONTACTS_DB = {} aRELAYS_DB = {} aRELAYS_DB_INDEX = {} aFP_EMAIL = {} aDOMAIN_FPS = {} sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/" # You can call this while bootstrapping sEXCLUDE_EXIT_GROUP = 'ExcludeNodes' sINCLUDE_EXIT_KEY = 'ExitNodes' oBAD_ROOT = 'BadNodes' aBAD_NODES = safe_load(sBAD_NODES) sGOOD_ROOT = 'GoodNodes' sINCLUDE_GUARD_KEY = 'EntryNodes' sEXCLUDE_DOMAINS = 'ExcludeDomains' aGOOD_NODES = safe_load(sGOOD_NODES) lKNOWN_NODNS = [] tMAYBE_NODNS = set() def tExcludeSet(oargs, sEXCLUDE_EXIT_GROUP): texclude_set = set() sections = {'BadExit'} if oargs.bad_nodes and os.path.exists(oargs.bad_nodes): if oargs.bad_sections: sections.update(oargs.bad_sections.split(',')) texclude_set = set(lYamlBadNodes(oargs.bad_nodes, tWanted=sections, section=sEXCLUDE_EXIT_GROUP)) LOG.info(f"Preloaded {len(texclude_set)} bad fps") return texclude_set def lYamlBadNodes(sFile, section=sEXCLUDE_EXIT_GROUP, tWanted=None): global aBAD_NODES global lKNOWN_NODNS global tMAYBE_NODNS l = [] if tWanted is None: tWanted = {'BadExit'} if not yaml: return l if os.path.exists(sFile): with open(sFile, 'rt') as oFd: aBAD_NODES = safe_load(oFd) root = sEXCLUDE_EXIT_GROUP # for elt in o[oBAD_ROOT][root][section].keys(): # if tWanted and elt not in tWanted: continue # # l += o[oBAD_ROOT][root][section][elt] for sub in tWanted: l += aBAD_NODES[oBAD_ROOT][sEXCLUDE_EXIT_GROUP][sub] tMAYBE_NODNS = set(safe_load(StringIO(yKNOWN_NODNS))) root = sEXCLUDE_DOMAINS if sEXCLUDE_DOMAINS in aBAD_NODES[oBAD_ROOT] and aBAD_NODES[oBAD_ROOT][sEXCLUDE_DOMAINS]: tMAYBE_NODNS.update(set(aBAD_NODES[oBAD_ROOT][sEXCLUDE_DOMAINS])) return l def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'): global aGOOD_NODES l = [] if not yaml: return l if os.path.exists(sFile): with open(sFile, 'rt') as oFd: o = safe_load(oFd) aGOOD_NODES = o if 'EntryNodes' in o[sGOOD_ROOT].keys(): l = o[sGOOD_ROOT]['EntryNodes'] # yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml return l def bdomain_is_bad(domain, fp): global lKNOWN_NODNS if domain in lKNOWN_NODNS: return True if domain in tMAYBE_NODNS: ip = zResolveDomain(domain) if ip == '': LOG.debug(f"{fp} {domain} does not resolve") lKNOWN_NODNS.append(domain) tMAYBE_NODNS.remove(domain) return True for elt in '@(){}$!': if elt in domain: LOG.warn(f"{elt} in domain {domain}") return True return False tBAD_URLS = set() lAT_REPS = ['[]', ' at ', '(at)', '[at]', '', '(att)', '_at_', '~at~', '.at.', '!at!', 't', '<(a)>', '|__at-|', '<:at:>', '[__at ]', '"a t"', 'removeme at ', ' a7 ', '{at-}' '[at}', 'atsign', '-at-', '(at_sign)', 'a.t', 'atsignhere', ' _a_ ', ' (at-sign) ', "'at sign'", '(a)', ' atsign ', '(at symbol)', ' anat ', '=at=', '-at-', '-dot-', ' [a] ','(at)', '', '[at sign]', '"at"', '{at}', '-----symbol for email----', '[at@]', '(at sign here)', '==at', '|=dot|','/\t', ] lDOT_REPS = [' point ', ' dot ', '[dot]', '(dot)', '_dot_', '!dot!', '<.>', '<:dot:>', '|dot--|', ' d07 ', '', '(dot]', '{dot)', 'd.t', "'dot'", '(d)', '-dot-', ' adot ', '(d)', ' . ', '[punto]', '(point)', '"dot"', '{.}', '--separator--', '|=dot|', ' period ', ')dot(', ] lNO_EMAIL = [ '', '', '', '@snowden', 'ano ano@fu.dk', 'anonymous', 'anonymous@buzzzz.com', 'check http://highwaytohoell.de', 'no-spam@tor.org', 'no@no.no', 'noreply@bytor.com', 'not a person ', 'not@needed.com', 'not@needed.com', 'not@re.al', 'nothanks', 'nottellingyou@mail.info', 'ur@mom.com', 'your@e-mail', 'your@email.com', r'', ] # def bVerifyContact(lAT_REPS, lDOT_REPS, lNO_EMAIL, a, fp, https_cafile=None): global aFP_EMAIL global tBAD_URLS global lKNOWN_NODNS global aGOOD_CONTACTS_DB global aGOOD_CONTACTS_FPS assert a assert fp assert https_cafile keys = list(a.keys()) a = aCleanContact(a, lAT_REPS, lDOT_REPS, lNO_EMAIL) a['fp'] = fp if 'email' not in keys: a['email'] = '' if 'ciissversion' not in keys: aFP_EMAIL[fp] = a['email'] LOG.warn(f"{fp} 'ciissversion' not in {keys}") return a # test the url for fps and add it to the array if 'proof' not in keys: aFP_EMAIL[fp] = a['email'] LOG.warn(f"{fp} 'proof' not in {keys}") return a if aGOOD_CONTACTS_FPS and fp in aGOOD_CONTACTS_FPS.keys(): aCachedContact = aGOOD_CONTACTS_FPS[fp] if aCachedContact['email'] == a['email']: LOG.info(f"{fp} in aGOOD_CONTACTS_FPS") return aCachedContact if 'url' not in keys: if 'uri' not in keys: a['url'] = '' aFP_EMAIL[fp] = a['email'] LOG.warn(f"{fp} url and uri not in {keys}") return a a['url'] = a['uri'] aFP_EMAIL[fp] = a['email'] LOG.debug(f"{fp} 'uri' but not 'url' in {keys}") # drop through domain = a['url'].replace('https://', '').replace('http://', '') # domain should be a unique key for contacts? if bdomain_is_bad(domain, fp): LOG.warn(f"{domain} is bad - {a['url']}") LOG.debug(f"{fp} is bad from {a}") return a ip = zResolveDomain(domain) if ip == '': aFP_EMAIL[fp] = a['email'] LOG.debug(f"{fp} {domain} does not resolve") lKNOWN_NODNS.append(domain) return a return True def oVerifyUrl(url, domain, fp=None, https_cafile=None, timeout=20, host='127.0.0.1', port=9050, oargs=None): if bAreWeConnected() is False: raise SystemExit("we are not connected") if url in tBAD_URLS: LOG.debug(f"BC Known bad url from {domain} for {fp}") return None o = None try: if httpx: LOG.debug(f"Downloading from {domain} for {fp}") # await o = oDownloadUrl(url, https_cafile, timeout=timeout, host=host, port=port, content_type='text/plain') else: LOG.debug(f"Downloading from {domain} for {fp}") o = oDownloadUrl(url, https_cafile, timeout=timeout, host=host, port=port, content_type='text/plain') # requests response: text "reason", "status_code" except AttributeError as e: LOG.exception(f"BC AttributeError downloading from {domain} {e}") tBAD_URLS.add(url) except CertificateError as e: LOG.warn(f"BC CertificateError downloading from {domain} {e}") tBAD_URLS.add(url) except TrustorError as e: if e.args == "HTTP Errorcode 404": #? aFP_EMAIL[fp] = a['email'] LOG.warn(f"BC TrustorError 404 from {domain} {e.args}") else: LOG.warn(f"BC TrustorError downloading from {domain} {e.args}") tBAD_URLS.add(url) except (urllib3.exceptions.MaxRetryError, urllib3.exceptions.ProtocolError,) as e: # noqa # # maybe offline - not bad LOG.warn(f"BC MaxRetryError downloading from {domain} {e}") except (BaseException) as e: LOG.warn(f"BC Exception {type(e)} downloading from {domain} {e}") else: return o return None # async # If we keep a cache of FPs that we have gotten by downloading a URL # we can avoid re-downloading the URL of other FP in the list of relays. # If we paralelize the gathering of the URLs, we may have simultaneous # gathers of the same URL from different relays, defeating the advantage # of going parallel. The cache is global aDOMAIN_FPS. def aVerifyContact(a, fp, https_cafile=None, timeout=20, host='127.0.0.1', port=9050, oargs=None): global aFP_EMAIL global tBAD_URLS global lKNOWN_NODNS global aDOMAIN_FPS global aBAD_CONTACTS_DB assert a assert fp assert https_cafile domain = a['url'].replace('https://', '').replace('http://', '').rstrip('/') a['url'] = 'https://' + domain if domain in aDOMAIN_FPS.keys(): a['fps'] = aDOMAIN_FPS[domain] return a r = bVerifyContact(lAT_REPS, lDOT_REPS, lNO_EMAIL, a, fp, https_cafile=https_cafile) if r is not True: return r if a['url'] in tBAD_URLS: a['fps'] = [] return a if a['proof'] == 'dns-rsa': if ub_ctx: fp_domain = fp + '.' + domain if idns_validate(fp_domain, libunbound_resolv_file='resolv.conf', dnssec_DS_file='dnssec-root-trust', ) == 0: LOG.warn(f"{fp} proof={a['proof']} - validated good") a['fps'] = [fp] aGOOD_CONTACTS_FPS[fp] = a else: a['fps'] = [] return a # only test url for now drop through url = a['url'] else: url = a['url'] + "/.well-known/tor-relay/rsa-fingerprint.txt" o = oVerifyUrl(url, domain, fp=fp, https_cafile=https_cafile, timeout=timeout, host=host, port=port, oargs=oargs) if not o: LOG.warn(f"BC Failed Download from {url} ") a['fps'] = [] tBAD_URLS.add(url) aBAD_CONTACTS_DB[fp] = a elif a['proof'] == 'dns-rsa': # well let the test of the URL be enough for now LOG.debug(f"Downloaded from {url} ") a['fps'] = [fp] aDOMAIN_FPS[domain] = a['fps'] elif a['proof'] == 'uri-rsa': a = aContactFps(oargs, a, fp, o, domain) if a['fps']: LOG.debug(f"Downloaded from {url} {len(a['fps'])} FPs for {fp}") else: aBAD_CONTACTS_DB[fp] = a LOG.debug(f"BC Downloaded from {url} NO FPs for {fp}") aDOMAIN_FPS[domain] = a['fps'] return a def aContactFps(oargs, a, fp, o, domain): global aFP_EMAIL global tBAD_URLS global aDOMAIN_FPS if hasattr(o, 'status'): status_code = o.status else: status_code = o.status_code if status_code >= 300: aFP_EMAIL[fp] = a['email'] LOG.warn(f"Error from {domain} {status_code} {o.reason}") # any reason retry? tBAD_URLS.add(a['url']) return a if hasattr(o, 'text'): data = o.text else: data = str(o.data, 'UTF-8') l = data.upper().strip().split('\n') LOG.debug(f"Downloaded from {domain} {len(l)} lines {len(data)} bytes") if oargs.wellknown_output: sdir = os.path.join(oargs.wellknown_output, domain, '.well-known', 'tor-relay') sfile = os.path.join(sdir, "rsa-fingerprint.txt") try: if not os.path.isdir(sdir): os.makedirs(sdir) with open(sfile, 'wt') as oFd: oFd.write(data) except Exception as e: LOG.warn(f"Error writing {sfile} {e}") a['modified'] = int(time.time()) if not l: LOG.warn(f"Downloaded from {domain} empty for {fp}") else: a['fps'] = [elt.strip() for elt in l if elt \ and len(elt) == 40 \ and not elt.startswith('#')] LOG.info(f"Downloaded from {domain} {len(a['fps'])} FPs") return a def lget_onionoo_relays(oargs): import requests adata = {} if oargs.relays_output and os.path.exists(oargs.relays_output): # and less than a day old? LOG.info(f"Getting OO relays from {oargs.relays_output}") try: with open(oargs.relays_output, 'rt') as ofd: sdata = ofd.read() adata = json.loads(sdata) except Exception as e: LOG.error(f"Getting data relays from {oargs.relays_output}") adata = {} if not adata: surl = "https://onionoo.torproject.org/details" LOG.info(f"Getting OO relays from {surl}") sCAfile = oargs.https_cafile assert os.path.exists(sCAfile), sCAfile if True: try: o = oDownloadUrl(surl, sCAfile, timeout=oargs.timeout, host=oargs.proxy_host, port=oargs.proxy_port, content_type='') if hasattr(o, 'text'): sdata = o.text else: sdata = str(o.data, 'UTF-8') except Exception as e: # simplejson.errors.JSONDecodeError # urllib3.exceptions import ConnectTimeoutError, NewConnectionError # (urllib3.exceptions.MaxRetryError, urllib3.exceptions.ProtocolError,) LOG.exception(f"JSON error {e}") return [] else: LOG.debug(f"Downloaded {surl} {len(sdata)} bytes") adata = json.loads(sdata) else: odata = requests.get(surl, verify=sCAfile) try: adata = odata.json() except Exception as e: # simplejson.errors.JSONDecodeError LOG.exception(f"JSON error {e}") return [] else: LOG.debug(f"Downloaded {surl} {len(adata)} relays") sdata = repr(adata) if oargs.relays_output: try: with open(oargs.relays_output, 'wt') as ofd: ofd.write(sdata) except Exception as e: LOG.warn(f"Error {oargs.relays_output} {e}") else: LOG.debug(f"Wrote {oargs.relays_output} {len(sdata)} bytes") lonionoo_relays = [r for r in adata["relays"] if 'fingerprint' in r.keys()] return lonionoo_relays def bProcessContact(b, texclude_set, aBadContacts, iFakeContact=0): global aGOOD_CONTACTS_DB global aGOOD_CONTACTS_FPS sofar = '' fp = b['fp'] # need to skip urllib3.exceptions.MaxRetryError if not b or 'fps' not in b or not b['fps'] or not b['url']: LOG.warn(f"{fp} did NOT VERIFY {sofar}") LOG.debug(f"{fp} {b} {sofar}") # If it's giving contact info that doesnt check out # it could be a bad exit with fake contact info texclude_set.add(fp) aBadContacts[fp] = b return None if fp not in b['fps']: LOG.warn(f"{fp} the FP IS NOT in the list of fps {sofar}") # assume a fp is using a bogus contact texclude_set.add(fp) aBadContacts[fp] = b return False LOG.info(f"{fp} GOOD {b['url']} {sofar}") # add our contact info to the trustdb aGOOD_CONTACTS_DB[fp] = b for elt in b['fps']: aGOOD_CONTACTS_FPS[elt] = b return True lNOT_IN_RELAYS_DB = [] def bCheckFp(relay, sofar, lConds, texclude_set): global aGOOD_CONTACTS_DB global aGOOD_CONTACTS_FPS global lNOT_IN_RELAYS_DB if not is_valid_fingerprint(relay.fingerprint): LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint) return None fp = relay.fingerprint if aRELAYS_DB and fp not in aRELAYS_DB.keys(): LOG.warn(f"{fp} not in aRELAYS_DB") lNOT_IN_RELAYS_DB += [fp] if not relay.exit_policy.is_exiting_allowed(): if sEXCLUDE_EXIT_GROUP == sEXCLUDE_EXIT_GROUP: pass # LOG.debug(f"{fp} not an exit {sofar}") else: pass # LOG.warn(f"{fp} not an exit {sofar}") # return None # great contact had good fps and we are in them if fp in aGOOD_CONTACTS_FPS.keys(): # a cached entry return None if type(relay.contact) == bytes: # dunno relay.contact = str(relay.contact, 'UTF-8') # fail if the contact is empty if ('Empty' in lConds and not relay.contact): LOG.debug(f"{fp} skipping empty contact - Empty {sofar}") texclude_set.add(fp) return None contact = sCleanEmail(relay.contact, lAT_REPS, lDOT_REPS, lNO_EMAIL) # fail if the contact has no email - unreliable if 'NoEmail' in lConds and relay.contact and \ ('@' not in contact): LOG.debug(f"{fp} skipping contact - NoEmail {contact} {sofar}") # LOG.spam(f"{fp} {relay.contact} {sofar}") texclude_set.add(fp) return None # fail if the contact does not pass if ('NotGood' in lConds and relay.contact and ('ciissversion:' not in relay.contact)): LOG.debug(f"{fp} skipping no ciissversion in contact {sofar}") # LOG.spam(f"{fp} {relay.contact} {sofar}") texclude_set.add(fp) return None # fail if the contact does not have url: to pass if relay.contact and 'url' not in relay.contact: LOG.debug(f"{fp} skipping unfetchable contact - no url {sofar}") # LOG.spam(f"{fp} {relay.contact} {sofar}") if ('NotGood' in lConds): texclude_set.add(fp) return None return True def oMainPreamble(lArgs): global LOG global aGOOD_CONTACTS_DB global aGOOD_CONTACTS_FPS parser = oMainArgparser( __prolog__= __prolog__) oargs = parser.parse_args(lArgs) vsetup_logging(LOG, oargs.log_level, stream=sys.stdout) if bAreWeConnected() is False: raise SystemExit("we are not connected") sFile = oargs.torrc if sFile and os.path.exists(sFile): icheck_torrc(sFile, oargs) sFile = oargs.good_contacts if sFile and os.path.exists(sFile): try: with open(sFile, 'rt') as oFd: aGOOD_CONTACTS_DB = safe_load(oFd) LOG.info(f"{len(aGOOD_CONTACTS_DB.keys())} trusted contacts from {sFile}") # reverse lookup of fps to contacts # but... for (k, v,) in aGOOD_CONTACTS_DB.items(): if 'modified' not in v.keys(): v['modified'] = int(time.time()) aGOOD_CONTACTS_FPS[k] = v if 'fps' in aGOOD_CONTACTS_DB[k].keys(): for fp in aGOOD_CONTACTS_DB[k]['fps']: if fp in aGOOD_CONTACTS_FPS: continue aGOOD_CONTACTS_FPS[fp] = v LOG.info(f"{len(aGOOD_CONTACTS_FPS.keys())} good relays from {sFile}") except Exception as e: LOG.exception(f"Error reading YAML TrustDB {sFile} {e}") return oargs def tWhitelistSet(oargs, controller): twhitelist_set = set() twhitelist_set.update(set(lYamlGoodNodes(oargs.good_nodes))) LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} EntryNodes from {oargs.good_nodes}") t = set() if 'IntroductionPoints' in aGOOD_NODES[sGOOD_ROOT]['Relays'].keys(): t = set(aGOOD_NODES[sGOOD_ROOT]['Relays']['IntroductionPoints']) if oargs.hs_dir and os.path.exists(oargs.hs_dir): for (dirpath, dirnames, filenames,) in os.walk(oargs.hs_dir): for f in filenames: if f != 'hostname': continue with open(os.path.join(dirpath, f), 'rt') as oFd: son = oFd.read() t.update(son) LOG.debug(f"Added {son} to the list for Introduction Points") if oargs.notice_log and os.path.exists(oargs.notice_log): tmp = tempfile.mktemp() i = os.system(f"grep 'Every introduction point for service' {oargs.notice_log} |sed -e 's/.* service //' -e 's/ is .*//'|sort -u |sed -e '/ /d' > {tmp}") if i: with open(tmp, 'rt') as oFd: tnew = {elt.strip() for elt in oFd.readlines()} t.update(tnew) LOG.info(f"Whitelist {len(tnew)} services to {oargs.notice_log}") os.remove(tmp) w = set() if sGOOD_ROOT in aGOOD_NODES and 'Services' in aGOOD_NODES[sGOOD_ROOT].keys(): w = set(aGOOD_NODES[sGOOD_ROOT]['Services']) if len(w) > 0: LOG.info(f"Whitelist {len(w)} relays from {sGOOD_ROOT}/Services") if oargs.notice_log and os.path.exists(oargs.notice_log): tmp = tempfile.mktemp() i = os.system(f"grep 'Wanted to contact directory mirror \$' /var/lib/tor/.SelekTOR/3xx/cache/9050/notice.log|sed -e 's/.* \$//' -e 's/[~ ].*//'|sort -u > {tmp}") if i: with open(tmp, 'rt') as oFd: lnew = oFd.readlines() w.update(set(lnew)) LOG.info(f"Whitelist {len(lnew)} relays from {oargs.notice_log}") os.remove(tmp) twhitelist_set.update(w) w = set() if 'Onions' in aGOOD_NODES[sGOOD_ROOT].keys(): # Provides the descriptor for a hidden service. The **address** is the # '.onion' address of the hidden service w = set(aGOOD_NODES[sGOOD_ROOT]['Onions']) if oargs.white_onions: w.update(oargs.white_onions.split(',')) if oargs.points_timeout > 0: LOG.info(f"{len(w)} services will be checked from IntroductionPoints") t.update(lIntroductionPoints(controller, w, itimeout=oargs.points_timeout, password=oargs.torctl_pass)) if len(t) > 0: LOG.info(f"IntroductionPoints {len(t)} relays from {len(w)} IPs for onions") twhitelist_set.update(t) return twhitelist_set # async def iMain(lArgs): global aGOOD_CONTACTS_DB global aGOOD_CONTACTS_FPS global aBAD_CONTACTS_DB global aBAD_NODES global aGOOD_NODES global lKNOWN_NODNS global aRELAYS_DB global aRELAYS_DB_INDEX global tBAD_URLS global lNOT_IN_RELAYS_DB oargs = oMainPreamble(lArgs) controller = oStemController(oargs, sEXCLUDE_EXIT_GROUP) twhitelist_set = tWhitelistSet(oargs, controller) texclude_set = tExcludeSet(oargs, sEXCLUDE_EXIT_GROUP) ttrust_db_index = aGOOD_CONTACTS_FPS.keys() iFakeContact = 0 iTotalContacts = 0 aBadContacts = {} iR = 0 relays = controller.get_server_descriptors() lqueue = [] socksu = f"socks5://{oargs.proxy_host}:{oargs.proxy_port}" if oargs.saved_only: relays = [] for relay in relays: iR += 1 fp = relay.fingerprint = relay.fingerprint.upper() sofar = f"G:{len(aGOOD_CONTACTS_DB.keys())} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} TC:{iTotalContacts} #{iR}" lConds = oargs.bad_on.split(',') r = bCheckFp(relay, sofar, lConds, texclude_set) if r is not True: continue # if it has a ciissversion in contact we count it in total iTotalContacts += 1 # only proceed if 'NotGood' not in lConds: if 'NotGood' not in lConds: continue # fail if the contact does not have url: to pass a = aParseContact(relay.contact, fp, lAT_REPS, lDOT_REPS, lNO_EMAIL) if not a: LOG.warn(f"{fp} BC contact did not parse {sofar}") texclude_set.add(fp) aBAD_CONTACTS_DB[fp] = a continue if 'url' in a and a['url']: # fail if the contact uses a url we already know is bad if a['url'] in tBAD_URLS: LOG.debug(f"{fp} skipping in tBAD_URLS {a['url']} {sofar}") # LOG.spam(f"{fp} {a} {sofar}") texclude_set.add(fp) continue domain = a['url'].replace('https://', '').replace('http://', '') # fail if the contact uses a domain we already know does not resolve if domain in lKNOWN_NODNS: # The fp is using a contact with a URL we know is bogus LOG.debug(f"{fp} BC skipping in lKNOWN_NODNS {a} {sofar}") # LOG.spam(f"{fp} {relay} {sofar}") texclude_set.add(fp) aBAD_CONTACTS_DB[fp] = a continue # drop through if 'proof' in a and a['proof'] in ['uri-rsa', 'dns-rsa']: if domain in aDOMAIN_FPS.keys(): continue if httpx: a['fp'] = fp lqueue.append(asyncio.create_task( aVerifyContact(a, fp, https_cafile=oargs.https_cafile, timeout=oargs.timeout, host=oargs.proxy_host, port=oargs.proxy_port, oargs=oargs))) else: b = aVerifyContact(a, fp, https_cafile=oargs.https_cafile, timeout=oargs.timeout, host=oargs.proxy_host, port=oargs.proxy_port, oargs=oargs) r = bProcessContact(b, texclude_set, aBadContacts, iFakeContact) if r is False: iFakeContact += 1 if httpx: # for b in asyncio.as_completed(lqueue): for b in lqueue: # r = await b r = b r = bProcessContact(r, texclude_set, aBadContacts, iFakeContact) if r is False: iFakeContact += 1 elif r is True: # iGoodContact += 1 pass texclude_set = texclude_set.difference(twhitelist_set) louts = [] if oargs.torrc_output and texclude_set: try: with open(oargs.torrc_output, 'wt') as oFTorrc: oFTorrc.write(f"{sEXCLUDE_EXIT_GROUP} {','.join(texclude_set)}\n") oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aGOOD_CONTACTS_FPS.keys())}\n") oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(aGOOD_NODES[sGOOD_ROOT]['EntryNodes'])}\n") LOG.info(f"Wrote tor configuration to {oargs.torrc_output}") oFTorrc.close() louts += [oargs.torrc_output] except Exception as e: LOG.warn(f"ERROR writing {oargs.torrc_output} {e}") # drop through if oargs.bad_contacts and aBadContacts: try: # for later analysis with open(oargs.bad_contacts, 'wt') as oFYaml: yaml.dump(aBadContacts, oFYaml) oFYaml.close() louts += [oargs.bad_contacts] except Exception as e: LOG.warn(f"ERROR writing {oargs.bad_contacts} {e}") # drop through if oargs.good_contacts != '' and aGOOD_CONTACTS_DB: try: vwrite_good_contacts(oargs, aGOOD_CONTACTS_DB) louts += [oargs.good_contacts] except Exception as e: LOG.warn(f"ERROR writing vwrite_good_contacts {e}") # drop through aBAD_NODES[oBAD_ROOT][sEXCLUDE_EXIT_GROUP]['BadExit'] = list(texclude_set) aBAD_NODES[oBAD_ROOT][sEXCLUDE_DOMAINS] = lKNOWN_NODNS if oargs.bad_nodes: stag = sEXCLUDE_EXIT_GROUP + '/BadExit' try: vwrite_badnodes(oargs, aBAD_NODES, str(len(texclude_set)), stag) louts += [oargs.bad_nodes] except Exception as e: LOG.warn(f"ERROR writing vwrite_badnodes {e}") # drop through aGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aGOOD_CONTACTS_FPS.keys()) # EntryNodes are readony if oargs.good_nodes: try: vwrite_goodnodes(oargs, aGOOD_NODES, len(aGOOD_CONTACTS_FPS.keys())) louts += [oargs.good_nodes] except Exception as e: LOG.warn(f"ERROR writing vwrite_goodnodes {e}") # drop through retval = 0 try: logging.getLogger('stem').setLevel(30) if texclude_set: try: LOG.info(f"controller {sEXCLUDE_EXIT_GROUP} {len(texclude_set)} net bad relays") controller.set_conf(sEXCLUDE_EXIT_GROUP, list(texclude_set)) except (Exception, stem.InvalidRequest, stem.SocketClosed,) as e: # noqa LOG.error(f"Failed setting {sEXCLUDE_EXIT_GROUP} bad exit relays in Tor {e}") LOG.debug(repr(texclude_set)) retval += 1 if aGOOD_CONTACTS_FPS.keys(): l = [elt for elt in aGOOD_CONTACTS_FPS.keys() if len (elt) == 40] try: LOG.info(f"controller {sINCLUDE_EXIT_KEY} {len(l)} good relays") controller.set_conf(sINCLUDE_EXIT_KEY, l) except (Exception, stem.InvalidRequest, stem.SocketClosed) as e: # noqa LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor {e}") LOG.debug(repr(l)) retval += 1 if 'EntryNodes' in aGOOD_NODES[sGOOD_ROOT].keys(): try: LOG.info(f"controller {sINCLUDE_GUARD_KEY} {len(aGOOD_NODES[sGOOD_ROOT]['EntryNodes'])} guard nodes") # FixMe for now override StrictNodes it may be unusable otherwise controller.set_conf(sINCLUDE_GUARD_KEY, aGOOD_NODES[sGOOD_ROOT]['EntryNodes']) except (Exception, stem.InvalidRequest, stem.SocketClosed,) as e: # noqa LOG.error(f"Failed setting {sINCLUDE_GUARD_KEY} guard nodes in Tor {e}") LOG.debug(repr(list(aGOOD_NODES[sGOOD_ROOT]['EntryNodes']))) retval += 1 cur = controller.get_conf('StrictNodes') if oargs.strict_nodes and int(cur) != oargs.strict_nodes: controller.set_conf('StrictNodes', oargs.strict_nodes) cur = controller.get_conf('StrictNodes') if int(cur) != oargs.strict_nodes: LOG.warn(f"controller failed StrictNodes NOT {oargs.strict_nodes}") else: LOG.info(f"controller OVERRODE StrictNodes to {oargs.strict_nodes}") else: LOG.info(f"controller StrictNodes is set to {cur}") # final LOG.info(f"Filtered {len(twhitelist_set)} whitelisted relays") LOG.info(f"{len(list(aGOOD_CONTACTS_DB.keys()))} good contacts out of {iTotalContacts}") vwritefinale(oargs, lNOT_IN_RELAYS_DB) elts='\n' + '\n'.join(louts) LOG.info(f"The following files were written:{elts}") except KeyboardInterrupt: return 0 except Exception as e: LOG.exception(str(e)) retval = 2 finally: # wierd we are getting stem errors during the final return # with a traceback that doesnt correspond to any real flow # File "/usr/lib/python3.9/site-packages/stem/control.py", line 2474, in set_conf # self.set_options({param: value}, False) logging.getLogger('stem').setLevel(40) try: for elt in controller._event_listeners: controller.remove_event_listener(elt) controller.close() except Exception as e: LOG.warn(str(e)) return retval if __name__ == '__main__': try: # i = asyncio.run(iMain(sys.argv[1:])) i = iMain(sys.argv[1:]) except IncorrectPassword as e: LOG.error(e) i = 1 except KeyboardInterrupt: i = 0 except Exception as e: LOG.exception(e) i = 2 sys.exit(i)