2022-11-07 05:40:00 +00:00
|
|
|
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
|
|
|
|
|
|
|
|
# https://github.com/nusenu/noContactInfo_Exit_Excluder
|
|
|
|
# https://github.com/TheSmashy/TorExitRelayExclude
|
|
|
|
"""
|
|
|
|
This extends nusenu's basic idea of using the stem library to
|
|
|
|
dynamically exclude nodes that are likely to be bad by putting them
|
|
|
|
on the ExcludeNodes or ExcludeExitNodes setting of a running Tor.
|
|
|
|
* https://github.com/nusenu/noContactInfo_Exit_Excluder
|
|
|
|
* https://github.com/TheSmashy/TorExitRelayExclude
|
|
|
|
|
|
|
|
The basic cut is to exclude Exit nodes that do not have a contact.
|
|
|
|
That can be extended to nodes that do not have an email in the contact etc.
|
2022-11-07 11:38:22 +00:00
|
|
|
"""
|
2022-11-08 14:15:05 +00:00
|
|
|
"""But there's a problem, and your Tor notice.log will tell you about it:
|
2022-11-07 05:40:00 +00:00
|
|
|
you could exclude the nodes needed to access hidden services or
|
2022-11-08 14:15:05 +00:00
|
|
|
directorues. So we need to add to the process the concept of a whitelist.
|
2022-11-07 05:40:00 +00:00
|
|
|
In addition, we may have our own blacklist of nodes we want to exclude,
|
|
|
|
or use these lists for other applications like selektor.
|
|
|
|
|
|
|
|
So we make two files that are structured in YAML:
|
|
|
|
```
|
2022-11-08 14:15:05 +00:00
|
|
|
/etc/tor/yaml/torrc-goodnodes.yaml
|
|
|
|
GoodNodes:
|
|
|
|
Relays:
|
|
|
|
IntroductionPoints:
|
|
|
|
- NODEFINGERPRINT
|
2022-11-07 05:40:00 +00:00
|
|
|
...
|
|
|
|
By default all sections of the goodnodes.yaml are used as a whitelist.
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
/etc/tor/yaml/torrc-badnodes.yaml
|
|
|
|
BadNodes:
|
2022-11-07 05:40:00 +00:00
|
|
|
ExcludeExitNodes:
|
|
|
|
BadExit:
|
|
|
|
# $0000000000000000000000000000000000000007
|
|
|
|
```
|
|
|
|
That part requires [PyYAML](https://pyyaml.org/wiki/PyYAML)
|
|
|
|
https://github.com/yaml/pyyaml/
|
|
|
|
|
|
|
|
Right now only the ExcludeExitNodes section is used by we may add ExcludeNodes
|
|
|
|
later, and by default all sub-sections of the badnodes.yaml are used as a
|
|
|
|
ExcludeExitNodes but it can be customized with the lWanted commandline arg.
|
|
|
|
|
|
|
|
The original idea has also been extended to add different conditions for
|
|
|
|
exclusion: the ```--contact``` commandline arg is a comma sep list of conditions:
|
|
|
|
* Empty - no contact info
|
|
|
|
* NoEmail - no @ sign in the contact',
|
|
|
|
More may be added later.
|
|
|
|
|
|
|
|
Because you don't want to exclude the introduction points to any onion
|
2022-11-08 14:15:05 +00:00
|
|
|
you want to connect to, ```--white_onions``` should whitelist the
|
|
|
|
introduction points to a comma sep list of onions, but is
|
2022-11-07 05:40:00 +00:00
|
|
|
currently broken in stem 1.8.0: see:
|
|
|
|
* https://github.com/torproject/stem/issues/96
|
|
|
|
* https://gitlab.torproject.org/legacy/trac/-/issues/25417
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
|
|
|
|
|
|
|
|
Now for the final part: we lookup the Contact info of every server
|
|
|
|
that is currently in our Tor, and check it for its existence.
|
|
|
|
If it fails to provide the well-know url, we assume its a bogus
|
|
|
|
relay and add it to a list of nodes that goes on ExcludeNodes -
|
|
|
|
not just exclude Exit.
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
If the Contact info is good we add the list of fingerprints to add
|
|
|
|
to ExitNodes, a whitelist of relays to use as exits.
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
```--proof_output``` will write the contact info as a ciiss dictionary
|
|
|
|
to a YAML file. If the proof is uri-rsa, the well-known file of fingerprints
|
2022-11-08 14:15:05 +00:00
|
|
|
is downloaded and the fingerprints are added on a 'fps' field we create
|
|
|
|
of that fingerprint's entry of the YAML dictionary. This file is read at the
|
2022-11-07 11:38:22 +00:00
|
|
|
beginning of the program to start with a trust database, and only new
|
2022-11-08 14:15:05 +00:00
|
|
|
contact info from new relays are added to the dictionary.
|
|
|
|
|
|
|
|
You can expect it to take an hour or two the first time this is run:
|
|
|
|
>700 domains.
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
For usage, do ```python3 exclude_badExits.py --help`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
import os
|
|
|
|
import re
|
2022-11-09 09:30:43 +00:00
|
|
|
import socket
|
2022-11-07 05:40:00 +00:00
|
|
|
import time
|
|
|
|
import argparse
|
2022-11-07 11:38:22 +00:00
|
|
|
from io import StringIO
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
from urllib3.util.ssl_match_hostname import CertificateError
|
2022-11-08 14:15:05 +00:00
|
|
|
from stem import InvalidRequest
|
2022-11-07 05:40:00 +00:00
|
|
|
from stem.control import Controller
|
2022-11-07 11:38:22 +00:00
|
|
|
from stem.connection import IncorrectPassword
|
|
|
|
from stem.util.tor_tools import is_valid_fingerprint
|
2022-11-07 05:40:00 +00:00
|
|
|
try:
|
|
|
|
import yaml
|
|
|
|
except:
|
|
|
|
yaml = None
|
|
|
|
try:
|
2022-11-08 14:15:05 +00:00
|
|
|
from unbound import ub_ctx,RR_TYPE_TXT,RR_CLASS_IN
|
|
|
|
except:
|
|
|
|
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
|
|
|
|
|
|
|
|
try:
|
2022-11-07 05:40:00 +00:00
|
|
|
if 'COLOREDLOGS_LEVEL_STYLES' not in os.environ:
|
|
|
|
os.environ['COLOREDLOGS_LEVEL_STYLES'] = 'spam=22;debug=28;verbose=34;notice=220;warning=202;success=118,bold;error=124;critical=background=red'
|
|
|
|
# https://pypi.org/project/coloredlogs/
|
2022-11-08 14:15:05 +00:00
|
|
|
import coloredlogs
|
2022-11-07 05:40:00 +00:00
|
|
|
except ImportError as e:
|
|
|
|
coloredlogs = False
|
2022-11-09 05:43:26 +00:00
|
|
|
|
|
|
|
from trustor_poc import oDownloadUrl, idns_validate, TrustorError
|
|
|
|
from support_onions import sTorResolve, getaddrinfo, icheck_torrc, bAreWeConnected
|
2022-11-07 05:40:00 +00:00
|
|
|
|
|
|
|
global LOG
|
|
|
|
import logging
|
2022-11-08 14:15:05 +00:00
|
|
|
import warnings
|
|
|
|
warnings.filterwarnings('ignore')
|
2022-11-07 05:40:00 +00:00
|
|
|
LOG = logging.getLogger()
|
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
ETC_DIR = '/etc/tor/yaml'
|
2022-11-07 11:38:22 +00:00
|
|
|
aTRUST_DB = {}
|
2022-11-09 09:30:43 +00:00
|
|
|
aTRUST_DB_INDEX = {}
|
2022-11-07 05:40:00 +00:00
|
|
|
sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/"
|
|
|
|
# You can call this while bootstrapping
|
2022-11-08 14:15:05 +00:00
|
|
|
sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
|
|
|
|
sINCLUDE_EXIT_KEY = 'ExitNodes'
|
|
|
|
sINCLUDE_GUARD_KEY = 'EntryNodes'
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
# maybe we should check these each time but we
|
|
|
|
# got them by sorting bad relays in the wild
|
|
|
|
lKNOWN_NODNS = [
|
|
|
|
'0x0.is',
|
|
|
|
'a9.wtf',
|
2022-11-09 09:30:43 +00:00
|
|
|
'artikel5ev.de',
|
2022-11-09 05:43:26 +00:00
|
|
|
'arvanode.net',
|
|
|
|
'dodo.pm',
|
2022-11-09 09:30:43 +00:00
|
|
|
'dra-family.github.io',
|
|
|
|
'eraldonion.org',
|
2022-11-09 05:43:26 +00:00
|
|
|
'galtland.network',
|
|
|
|
'interfesse.net',
|
|
|
|
'kryptonit.org',
|
2022-11-09 09:30:43 +00:00
|
|
|
'lonet.sh',
|
|
|
|
'moneneis.de',
|
2022-11-09 05:43:26 +00:00
|
|
|
'nx42.de',
|
2022-11-09 09:30:43 +00:00
|
|
|
'ormycloud.org',
|
|
|
|
'plied-privacy.net',
|
|
|
|
'rification-for-nusenu.net',
|
|
|
|
'sv.ch',
|
|
|
|
'thingtohide.nl',
|
|
|
|
'tikel10.org',
|
2022-11-09 05:43:26 +00:00
|
|
|
'tor-exit-2.aa78i2efsewr0neeknk.xyz',
|
|
|
|
'tor-exit-3.aa78i2efsewr0neeknk.xyz',
|
2022-11-09 09:30:43 +00:00
|
|
|
'torix-relays.org',
|
|
|
|
'tse.com',
|
|
|
|
'tuxli.org',
|
|
|
|
'w.digidow.eu',
|
|
|
|
'www.quintex.com',
|
2022-11-09 05:43:26 +00:00
|
|
|
]
|
2022-11-07 11:38:22 +00:00
|
|
|
def oMakeController(sSock='', port=9051):
|
2022-11-08 14:15:05 +00:00
|
|
|
import getpass
|
2022-11-07 11:38:22 +00:00
|
|
|
if sSock and os.path.exists(sSock):
|
2022-11-07 05:40:00 +00:00
|
|
|
controller = Controller.from_socket_file(path=sSock)
|
|
|
|
else:
|
|
|
|
controller = Controller.from_port(port=port)
|
|
|
|
sys.stdout.flush()
|
|
|
|
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
|
|
|
|
controller.authenticate(p)
|
|
|
|
return controller
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
oBAD_NODES = {}
|
|
|
|
oBAD_ROOT = 'BadNodes'
|
|
|
|
def lYamlBadNodes(sFile,
|
|
|
|
section=sEXCLUDE_EXIT_KEY,
|
|
|
|
lWanted=['BadExit']):
|
|
|
|
global oBAD_NODES
|
2022-11-07 05:40:00 +00:00
|
|
|
l = []
|
|
|
|
if not yaml: return l
|
|
|
|
if os.path.exists(sFile):
|
|
|
|
with open(sFile, 'rt') as oFd:
|
|
|
|
o = yaml.safe_load(oFd)
|
2022-11-08 14:15:05 +00:00
|
|
|
oBAD_NODES = o
|
|
|
|
|
|
|
|
# BROKEN
|
2022-11-09 09:30:43 +00:00
|
|
|
# root = 'ExcludeNodes'
|
2022-11-08 14:15:05 +00:00
|
|
|
# for elt in o[oBAD_ROOT][root][section].keys():
|
|
|
|
# if lWanted and elt not in lWanted: continue
|
|
|
|
# # l += o[oBAD_ROOT][root][section][elt]
|
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
global lKNOWN_NODNS
|
|
|
|
root = 'ExcludeDomains'
|
|
|
|
if root not in o[oBAD_ROOT] or not o[oBAD_ROOT][root]:
|
|
|
|
o[oBAD_ROOT][root] = lKNOWN_NODNS
|
|
|
|
else:
|
|
|
|
lKNOWN_NODNS = o[oBAD_ROOT][root]
|
2022-11-07 05:40:00 +00:00
|
|
|
return l
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
oGOOD_NODES = {}
|
|
|
|
oGOOD_ROOT = 'GoodNodes'
|
2022-11-07 05:40:00 +00:00
|
|
|
def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
|
2022-11-08 14:15:05 +00:00
|
|
|
global oGOOD_NODES
|
|
|
|
root = oGOOD_ROOT
|
2022-11-07 05:40:00 +00:00
|
|
|
l = []
|
|
|
|
if not yaml: return l
|
|
|
|
if os.path.exists(sFile):
|
|
|
|
with open(sFile, 'rt') as oFd:
|
|
|
|
o = yaml.safe_load(oFd)
|
2022-11-08 14:15:05 +00:00
|
|
|
oGOOD_NODES = o
|
|
|
|
if 'GuardNodes' in o[root].keys():
|
|
|
|
l += o[oGOOD_ROOT]['GuardNodes']
|
2022-11-07 05:40:00 +00:00
|
|
|
# yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml
|
|
|
|
return l
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
def lIntroductionPoints(controller, lOnions):
|
2022-11-07 05:40:00 +00:00
|
|
|
"""not working in stem 1.8.3"""
|
|
|
|
l = []
|
|
|
|
for elt in lOnions:
|
|
|
|
desc = controller.get_hidden_service_descriptor(elt, await_result=True, timeout=None)
|
|
|
|
l = desc.introduction_points()
|
|
|
|
if l:
|
|
|
|
LOG.warn(f"{elt} NO introduction points\n")
|
|
|
|
continue
|
|
|
|
LOG.info(f"{elt} introduction points are...\n")
|
|
|
|
|
|
|
|
for introduction_point in l:
|
|
|
|
LOG.info(' %s:%s => %s' % (introduction_point.address,
|
|
|
|
introduction_point.port,
|
|
|
|
introduction_point.identifier))
|
|
|
|
l += [introduction_point.address]
|
|
|
|
return l
|
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS = set()
|
2022-11-08 14:15:05 +00:00
|
|
|
lATS = ['abuse', 'email']
|
|
|
|
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
|
|
|
|
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
|
2022-11-07 11:38:22 +00:00
|
|
|
'sandbox', 'offlinemasterkey']
|
2022-11-08 14:15:05 +00:00
|
|
|
def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050):
|
2022-11-09 09:30:43 +00:00
|
|
|
global tBAD_URLS
|
|
|
|
global lKNOWN_NODNS
|
2022-11-08 14:15:05 +00:00
|
|
|
# cleanups for yaml
|
2022-11-07 11:38:22 +00:00
|
|
|
for elt in lINTS:
|
|
|
|
if elt in a:
|
|
|
|
a[elt] = int(a[elt])
|
|
|
|
for elt in lBOOLS:
|
|
|
|
if elt in a:
|
|
|
|
if a[elt] in ['y','yes', 'true', 'True']:
|
|
|
|
a[elt] = True
|
|
|
|
else:
|
|
|
|
a[elt] = False
|
2022-11-08 14:15:05 +00:00
|
|
|
for elt in lATS:
|
|
|
|
if elt in a:
|
|
|
|
a[elt] = a[elt].replace('[]', '@')
|
|
|
|
|
|
|
|
a.update({'fps': []})
|
2022-11-09 05:43:26 +00:00
|
|
|
keys = list(a.keys())
|
2022-11-07 11:38:22 +00:00
|
|
|
# test the url for fps and add it to the array
|
2022-11-09 05:43:26 +00:00
|
|
|
if 'proof' not in keys:
|
|
|
|
LOG.warn(f"{fp} 'proof' not in {keys}")
|
2022-11-07 11:38:22 +00:00
|
|
|
return a
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
if aTRUST_DB_INDEX and fp in aTRUST_DB_INDEX.keys():
|
|
|
|
aCachedContact = aTRUST_DB_INDEX[fp]
|
|
|
|
if aCachedContact['email'] = a['email']:
|
|
|
|
return aCachedContact
|
|
|
|
|
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
if 'url' not in keys:
|
|
|
|
if 'uri' not in keys:
|
2022-11-08 14:15:05 +00:00
|
|
|
a['url'] = ''
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{fp} url and uri not in {keys}")
|
2022-11-08 14:15:05 +00:00
|
|
|
return a
|
|
|
|
a['url'] = a['uri']
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.debug(f"{fp} 'uri' but not 'url' in {keys}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# drop through
|
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
if a['url'].startswith('http:'):
|
|
|
|
a['url'] = 'https:' +a['url'][5:]
|
|
|
|
elif not a['url'].startswith('https:'):
|
|
|
|
a['url'] = 'https:' +a['url']
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
# domain should be a unique key for contacts
|
2022-11-09 09:30:43 +00:00
|
|
|
domain = a['url'][8:].strip('/')
|
2022-11-09 05:43:26 +00:00
|
|
|
try:
|
|
|
|
ip = sTorResolve(domain)
|
|
|
|
except Exception as e:
|
2022-11-09 09:30:43 +00:00
|
|
|
try:
|
|
|
|
lpair = getaddrinfo(domain, 443)
|
|
|
|
except (socket.gaierror, ) as e:
|
|
|
|
LOG.debug("{e}")
|
|
|
|
lpair = None
|
|
|
|
lKNOWN_NODNS.append(domain)
|
|
|
|
except Exception as e:
|
|
|
|
LOG.warn("{e}")
|
|
|
|
lpair = None
|
|
|
|
lKNOWN_NODNS.append(domain)
|
|
|
|
if lpair is None:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"TorResolv and getaddrinfo failed for {domain}")
|
|
|
|
return a
|
|
|
|
ip = lpair[0]
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
if a['proof'] not in ['uri-rsa']:
|
|
|
|
# only support uri for now
|
|
|
|
if False and ub_ctx:
|
|
|
|
fp_domain = fp +'.'+domain
|
|
|
|
if idns_validate(fp_domain,
|
|
|
|
libunbound_resolv_file='resolv.conf',
|
|
|
|
dnssec_DS_file='dnssec-root-trust',
|
|
|
|
) == 0:
|
|
|
|
pass
|
|
|
|
LOG.warn(f"{fp} proof={a['proof']} not supported yet")
|
|
|
|
return a
|
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.debug(f"{len(keys)} contact fields for {fp}")
|
2022-11-07 11:38:22 +00:00
|
|
|
try:
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.debug(f"Downloading from {domain} for {fp}")
|
2022-11-09 05:43:26 +00:00
|
|
|
o = oDownloadUrl(domain, https_cafile,
|
|
|
|
timeout=timeout, host=host, port=port)
|
|
|
|
# requests response: text "reason", "status_code"
|
|
|
|
except AttributeError as e:
|
|
|
|
LOG.exception(f"AttributeError downloading from {domain} {e}")
|
|
|
|
except CertificateError as e:
|
|
|
|
LOG.warn(f"CertificateError downloading from {domain} {e}")
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
except TrustorError as e:
|
|
|
|
LOG.warn(f"TrustorError downloading from {domain} {e.args}")
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
except (BaseException ) as e:
|
|
|
|
LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
|
2022-11-07 11:38:22 +00:00
|
|
|
else:
|
2022-11-09 05:43:26 +00:00
|
|
|
if o.status_code >= 300:
|
|
|
|
LOG.warn(f"Error downloading from {domain} {o.status_code} {o.reason}")
|
|
|
|
# any reason retry?
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
return a
|
|
|
|
|
|
|
|
l = o.text.upper().strip().split('\n')
|
2022-11-07 11:38:22 +00:00
|
|
|
if not l:
|
2022-11-08 14:15:05 +00:00
|
|
|
# already squacked in lD
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"Downloading from {domain} empty for {fp}")
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-07 11:38:22 +00:00
|
|
|
else:
|
2022-11-08 14:15:05 +00:00
|
|
|
a['fps'] = [elt for elt in l if elt and len(elt) == 40
|
|
|
|
and not elt.startswith('#')]
|
2022-11-07 11:38:22 +00:00
|
|
|
return a
|
|
|
|
|
|
|
|
def aParseContact(contact, fp):
|
2022-11-08 14:15:05 +00:00
|
|
|
"""
|
|
|
|
See the Tor ContactInfo Information Sharing Specification v2
|
|
|
|
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/
|
|
|
|
"""
|
2022-11-07 11:38:22 +00:00
|
|
|
l = [line for line in contact.strip().replace('"', '').split(' ')
|
|
|
|
if ':' in line]
|
|
|
|
LOG.debug(f"{fp} {len(l)} fields")
|
|
|
|
s = f'"{fp}":\n'
|
|
|
|
s += '\n'.join([f" {line}\"".replace(':',': \"', 1)
|
|
|
|
for line in l])
|
|
|
|
oFd = StringIO(s)
|
|
|
|
a = yaml.safe_load(oFd)
|
|
|
|
return a
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
def vwait_for_controller(controller, wait_boot):
|
|
|
|
if bAreWeConnected() is False:
|
|
|
|
raise SystemExit("we are not connected")
|
|
|
|
percent = i = 0
|
|
|
|
# You can call this while boostrapping
|
|
|
|
while percent < 100 and i < wait_boot:
|
|
|
|
bootstrap_status = controller.get_info("status/bootstrap-phase")
|
|
|
|
progress_percent = re.match('.* PROGRESS=([0-9]+).*', bootstrap_status)
|
|
|
|
percent = int(progress_percent.group(1))
|
|
|
|
LOG.info(f"Bootstrapping {percent}%")
|
|
|
|
time.sleep(5)
|
|
|
|
i += 5
|
|
|
|
|
|
|
|
def vsetup_logging(log_level, logfile=''):
|
|
|
|
global LOG
|
|
|
|
add = True
|
|
|
|
|
|
|
|
# stem fucks up logging
|
|
|
|
from stem.util import log
|
|
|
|
logging.getLogger('stem').setLevel(30)
|
|
|
|
|
|
|
|
logging._defaultFormatter = logging.Formatter(datefmt='%m-%d %H:%M:%S')
|
|
|
|
logging._defaultFormatter.default_time_format = '%m-%d %H:%M:%S'
|
|
|
|
logging._defaultFormatter.default_msec_format = ''
|
|
|
|
|
|
|
|
kwargs = dict(level=log_level,
|
|
|
|
force=True,
|
|
|
|
format='%(levelname)-4s %(message)s')
|
|
|
|
|
|
|
|
if logfile:
|
|
|
|
add = logfile.startswith('+')
|
|
|
|
sub = logfile.startswith('-')
|
|
|
|
if add or sub:
|
|
|
|
logfile = logfile[1:]
|
|
|
|
kwargs['filename'] = logfile
|
|
|
|
|
|
|
|
if coloredlogs:
|
|
|
|
# https://pypi.org/project/coloredlogs/
|
|
|
|
aKw = dict(level=log_level,
|
|
|
|
logger=LOG,
|
|
|
|
stream=sys.stdout if add else None,
|
|
|
|
fmt='%(levelname)-4s %(message)s'
|
|
|
|
)
|
|
|
|
coloredlogs.install(**aKw)
|
|
|
|
if logfile:
|
|
|
|
oHandler = logging.FileHandler(logfile)
|
|
|
|
LOG.addHandler(oHandler)
|
|
|
|
LOG.info(f"CSetting log_level to {log_level!s}")
|
|
|
|
else:
|
|
|
|
logging.basicConfig(**kwargs)
|
|
|
|
if add and logfile:
|
|
|
|
oHandler = logging.StreamHandler(sys.stdout)
|
|
|
|
LOG.addHandler(oHandler)
|
|
|
|
LOG.info(f"SSetting log_level to {log_level!s}")
|
|
|
|
|
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
def oMainArgparser(_=None):
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from OpenSSL import SSL
|
|
|
|
lCAfs = SSL._CERTIFICATE_FILE_LOCATIONS
|
|
|
|
except:
|
|
|
|
lCAfs = []
|
|
|
|
|
|
|
|
CAfs = []
|
|
|
|
for elt in lCAfs:
|
|
|
|
if os.path.exists(elt):
|
|
|
|
CAfs.append(elt)
|
|
|
|
if not CAfs:
|
|
|
|
CAfs = ['']
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
parser = argparse.ArgumentParser(add_help=True,
|
|
|
|
epilog=__doc__)
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--https_cafile', type=str,
|
|
|
|
help="Certificate Authority file (in PEM)",
|
|
|
|
default=CAfs[0])
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--proxy_host', '--proxy-host', type=str,
|
|
|
|
default='127.0.0.1',
|
|
|
|
help='proxy host')
|
2022-11-07 11:38:22 +00:00
|
|
|
parser.add_argument('--proxy_port', '--proxy-port', default=9050, type=int,
|
2022-11-07 05:40:00 +00:00
|
|
|
help='proxy control port')
|
|
|
|
parser.add_argument('--proxy_ctl', '--proxy-ctl',
|
2022-11-09 05:43:26 +00:00
|
|
|
default='/run/tor/control' if os.path.exists('/run/tor/control') else 9051,
|
2022-11-07 11:38:22 +00:00
|
|
|
type=str,
|
|
|
|
help='control socket - or port')
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
parser.add_argument('--torrc',
|
2022-11-09 05:43:26 +00:00
|
|
|
default='/etc/tor/torrc-defaults',
|
2022-11-08 14:15:05 +00:00
|
|
|
type=str,
|
|
|
|
help='torrc to check for suggestions')
|
2022-11-09 05:43:26 +00:00
|
|
|
parser.add_argument('--timeout', default=60, type=int,
|
2022-11-08 14:15:05 +00:00
|
|
|
help='proxy download connect timeout')
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--good_nodes', type=str,
|
2022-11-09 05:43:26 +00:00
|
|
|
default=os.path.join(ETC_DIR, '/torrc-goodnodes.yaml'),
|
2022-11-07 05:40:00 +00:00
|
|
|
help="Yaml file of good nodes that should not be excluded")
|
|
|
|
parser.add_argument('--bad_nodes', type=str,
|
2022-11-09 05:43:26 +00:00
|
|
|
default=os.path.join(ETC_DIR, '/torrc-badnodes.yaml'),
|
2022-11-07 05:40:00 +00:00
|
|
|
help="Yaml file of bad nodes that should also be excluded")
|
|
|
|
parser.add_argument('--contact', type=str, default='Empty,NoEmail',
|
|
|
|
help="comma sep list of conditions - Empty,NoEmail")
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--bad_contacts', type=str,
|
|
|
|
default='/tmp/badcontacts.yaml',
|
|
|
|
help="Yaml file of bad contacts that bad FPs are using")
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--wait_boot', type=int, default=120,
|
|
|
|
help="Seconds to wait for Tor to booststrap")
|
|
|
|
parser.add_argument('--log_level', type=int, default=20,
|
|
|
|
help="10=debug 20=info 30=warn 40=error")
|
|
|
|
parser.add_argument('--bad_sections', type=str,
|
|
|
|
default='Hetzner,BadExit',
|
2022-11-08 14:15:05 +00:00
|
|
|
help="sections of the badnodes.yaml to use, comma separated, '' BROKEN")
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--white_onions', type=str,
|
|
|
|
default='',
|
|
|
|
help="comma sep. list of onions to whitelist their introduction points - BROKEN")
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--torrc_output', type=str, default='',
|
2022-11-07 05:40:00 +00:00
|
|
|
help="Write the torrc configuration to a file")
|
2022-11-09 05:43:26 +00:00
|
|
|
parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, '/proof.yaml'),
|
2022-11-07 11:38:22 +00:00
|
|
|
help="Write the proof data of the included nodes to a YAML file")
|
2022-11-07 05:40:00 +00:00
|
|
|
return parser
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
def vwrite_badnodes(oArgs):
|
|
|
|
global oBAD_NODES
|
|
|
|
if oArgs.bad_nodes:
|
|
|
|
tmp = oArgs.bad_nodes +'.tmp'
|
|
|
|
bak = oArgs.bad_nodes +'.bak'
|
|
|
|
with open(tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(oBAD_NODES, indent=2, stream=oFYaml)
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.info(f"Wrote {len(list(oBAD_NODES.keys()))} to {oArgs.bad_nodes}")
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.bad_nodes):
|
|
|
|
os.rename(oArgs.bad_nodes, bak)
|
|
|
|
os.rename(tmp, oArgs.bad_nodes)
|
|
|
|
|
|
|
|
def vwrite_goodnodes(oArgs):
|
|
|
|
global oGOOD_NODES
|
|
|
|
if oArgs.good_nodes:
|
|
|
|
tmp = oArgs.good_nodes +'.tmp'
|
|
|
|
bak = oArgs.good_nodes +'.bak'
|
|
|
|
with open(tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(oGOOD_NODES, indent=2, stream=oFYaml)
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.info(f"Wrote {len(list(oGOOD_NODES.keys()))} good nodes to {oArgs.good_nodes}")
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.good_nodes):
|
|
|
|
os.rename(oArgs.good_nodes, bak)
|
|
|
|
os.rename(tmp, oArgs.good_nodes)
|
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
def iMain(lArgs):
|
|
|
|
parser = oMainArgparser()
|
|
|
|
oArgs = parser.parse_args(lArgs)
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
vsetup_logging(oArgs.log_level)
|
|
|
|
if bAreWeConnected() is False:
|
|
|
|
raise SystemExit("we are not connected")
|
|
|
|
|
|
|
|
sFile = oArgs.torrc
|
|
|
|
if sFile and os.path.exists(sFile):
|
|
|
|
icheck_torrc(sFile, oArgs)
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
global aTRUST_DB
|
2022-11-09 09:30:43 +00:00
|
|
|
global aTRUST_DB_INDEX
|
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
sFile = oArgs.proof_output
|
|
|
|
if sFile and os.path.exists(sFile):
|
2022-11-08 14:15:05 +00:00
|
|
|
try:
|
|
|
|
with open(sFile, 'rt') as oFd:
|
|
|
|
aTRUST_DB = yaml.safe_load(oFd)
|
2022-11-09 09:30:43 +00:00
|
|
|
# reverse lookup of fps to contacts
|
|
|
|
# but...
|
|
|
|
for k,v in aTRUST_DB:
|
|
|
|
aTRUST_DB_INDEX[k] = v
|
|
|
|
if 'fps' in aTRUST_DB[k].keys():
|
|
|
|
for fp in aTRUST_DB[k]['fps']:
|
|
|
|
aTRUST_DB_INDEX[fp] = v
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
LOG.warn(f"Error reading YAML TrustDB {sFile} {e}")
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
if os.path.exists(oArgs.proxy_ctl):
|
2022-11-07 11:38:22 +00:00
|
|
|
controller = oMakeController(sSock=oArgs.proxy_ctl)
|
|
|
|
else:
|
2022-11-08 14:15:05 +00:00
|
|
|
port =int(oArgs.proxy_ctl)
|
2022-11-07 11:38:22 +00:00
|
|
|
controller = oMakeController(port=port)
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
vwait_for_controller(controller, oArgs.wait_boot)
|
|
|
|
|
|
|
|
if oArgs.proof_output:
|
|
|
|
proof_output_tmp = oArgs.proof_output + '.tmp'
|
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
elt = controller.get_conf('UseMicrodescriptors')
|
|
|
|
if elt != '0' :
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.error('"UseMicrodescriptors 0" is required in your /etc/tor/torrc. Exiting.')
|
2022-11-07 11:38:22 +00:00
|
|
|
controller.set_conf('UseMicrodescriptors', 0)
|
|
|
|
# does it work dynamically?
|
2022-11-08 14:15:05 +00:00
|
|
|
return 2
|
|
|
|
|
|
|
|
elt = controller.get_conf(sEXCLUDE_EXIT_KEY)
|
2022-11-07 05:40:00 +00:00
|
|
|
if elt and elt != '{??}':
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already")
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
lGoodOverrideSet = lYamlGoodNodes(oArgs.good_nodes)
|
|
|
|
LOG.info(f"lYamlGoodNodes {len(lGoodOverrideSet)} from {oArgs.good_nodes}")
|
2022-11-07 05:40:00 +00:00
|
|
|
|
|
|
|
if oArgs.white_onions:
|
2022-11-08 14:15:05 +00:00
|
|
|
l = lIntroductionPoints(controller, oArgs.white_onions.split(','))
|
|
|
|
lGoodOverrideSet += l
|
|
|
|
|
|
|
|
exit_excludelist = []
|
|
|
|
if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes):
|
|
|
|
if False and oArgs.bad_sections:
|
|
|
|
# BROKEN
|
|
|
|
sections = oArgs.bad_sections.split(',')
|
|
|
|
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes,
|
|
|
|
lWanted=sections,
|
|
|
|
section=sEXCLUDE_EXIT_KEY)
|
|
|
|
else:
|
|
|
|
exit_excludelist = lYamlBadNodes(oArgs.bad_nodes)
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.info(f"lYamlBadNodes {len(exit_excludelist)}")
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
relays = controller.get_server_descriptors()
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
lProofGoodFps = []
|
|
|
|
iDnsContact = 0
|
2022-11-09 05:43:26 +00:00
|
|
|
lBadContactUrls = []
|
2022-11-08 14:15:05 +00:00
|
|
|
iFakeContact = 0
|
|
|
|
aBadContacts = {}
|
2022-11-07 11:38:22 +00:00
|
|
|
aProofUri = {}
|
2022-11-07 05:40:00 +00:00
|
|
|
lConds = oArgs.contact.split(',')
|
2022-11-09 05:43:26 +00:00
|
|
|
iR = 0
|
2022-11-07 05:40:00 +00:00
|
|
|
for relay in relays:
|
2022-11-09 05:43:26 +00:00
|
|
|
iR += 1
|
2022-11-07 11:38:22 +00:00
|
|
|
if not is_valid_fingerprint(relay.fingerprint):
|
|
|
|
LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint)
|
|
|
|
continue
|
2022-11-08 14:15:05 +00:00
|
|
|
relay.fingerprint = relay.fingerprint.upper()
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
sofar = f"G:{len(list(aProofUri.keys()))} U:{iDnsContact} F:{iFakeContact} BF:{len(exit_excludelist)} GF:{len(lProofGoodFps)} #{iR}"
|
2022-11-08 14:15:05 +00:00
|
|
|
if not relay.exit_policy.is_exiting_allowed():
|
|
|
|
if sEXCLUDE_EXIT_KEY == 'ExcludeNodes':
|
|
|
|
LOG.debug(f"{relay.fingerprint} not an exit {sofar}")
|
|
|
|
else:
|
|
|
|
LOG.warn(f"{relay.fingerprint} not an exit {sofar}")
|
|
|
|
# continue
|
|
|
|
|
|
|
|
if relay.fingerprint in lProofGoodFps:
|
|
|
|
# we already have it.
|
|
|
|
continue
|
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
if relay.fingerprint in aTRUST_DB:
|
2022-11-08 14:15:05 +00:00
|
|
|
if aTRUST_DB[relay.fingerprint]['fps'] and \
|
|
|
|
relay.fingerprint in aTRUST_DB[relay.fingerprint]['fps']:
|
|
|
|
lProofGoodFps += relay.fingerprint
|
|
|
|
continue
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
if relay.contact and b'dns-rsa' in relay.contact.lower():
|
2022-11-09 05:43:26 +00:00
|
|
|
relay.contact = str(relay.contact, 'UTF-8')
|
|
|
|
c = relay.contact.lower()
|
|
|
|
i = c.find('url:')
|
|
|
|
if i >=0:
|
|
|
|
c = c[i+4:]
|
|
|
|
i = c.find(' ')
|
|
|
|
if i >=0:
|
|
|
|
c = c[:i]
|
2022-11-09 09:30:43 +00:00
|
|
|
if c in lKNOWN_NODNS:
|
|
|
|
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {c} {sofar}")
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
continue
|
|
|
|
|
|
|
|
LOG.info(f"skipping 'dns-rsa' {relay.fingerprint}.{c} {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
iDnsContact += 1
|
2022-11-07 11:38:22 +00:00
|
|
|
continue
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
if relay.contact and b'proof:uri-rsa' in relay.contact.lower():
|
2022-11-09 05:43:26 +00:00
|
|
|
relay.contact = str(relay.contact, 'UTF-8')
|
2022-11-07 11:38:22 +00:00
|
|
|
a = aParseContact(relay.contact, relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
if not a:
|
|
|
|
LOG.warn(f"{relay.fingerprint} did not parse {sofar}")
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
continue
|
2022-11-09 09:30:43 +00:00
|
|
|
if 'url' in a and a['url']:
|
|
|
|
if a['url'] in tBAD_URLS:
|
|
|
|
# The fp is using a contact with a URL we know is bad
|
|
|
|
LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}")
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
continue
|
|
|
|
domain = a['url'].replace('https://', '').replace('http://', '')
|
|
|
|
if domain in lKNOWN_NODNS:
|
|
|
|
# The fp is using a contact with a URL we know is bogus
|
|
|
|
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}")
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
continue
|
2022-11-09 05:43:26 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
b = aVerifyContact(list(a.values())[0],
|
|
|
|
relay.fingerprint,
|
|
|
|
oArgs.https_cafile,
|
2022-11-07 11:38:22 +00:00
|
|
|
timeout=oArgs.timeout,
|
|
|
|
host=oArgs.proxy_host,
|
|
|
|
port=oArgs.proxy_port)
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
if not b['fps'] or not b['url']:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# If it's giving contact info that doesnt check out
|
|
|
|
# it could be a bad exit with fake contact info
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
aBadContacts[relay.fingerprint] = b
|
2022-11-07 11:38:22 +00:00
|
|
|
continue
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
if relay.fingerprint not in b['fps']:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# assume a fp is using a bogus contact
|
|
|
|
exit_excludelist.append(relay.fingerprint)
|
|
|
|
iFakeContact += 1
|
|
|
|
aBadContacts[relay.fingerprint] = b
|
|
|
|
continue
|
2022-11-09 09:30:43 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
# great contact had good fps and we are in them
|
|
|
|
lProofGoodFps += b['fps']
|
2022-11-09 09:30:43 +00:00
|
|
|
if relay.fingerprint in aProofUri.keys():
|
|
|
|
# a cached entry
|
|
|
|
continue
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}")
|
|
|
|
# add our contact info to the trustdb
|
|
|
|
aProofUri[relay.fingerprint] = b
|
|
|
|
if oArgs.proof_output and oArgs.log_level <= 20:
|
|
|
|
# as we go along then clobber
|
|
|
|
with open(proof_output_tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(aProofUri, indent=2, stream=oFYaml)
|
|
|
|
oFYaml.close()
|
|
|
|
continue
|
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
if ('Empty' in lConds and not relay.contact) or \
|
|
|
|
('NoEmail' in lConds and relay.contact and not b'@' in relay.contact):
|
2022-11-07 11:38:22 +00:00
|
|
|
exit_excludelist.append(relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
exit_excludelist = list(set(exit_excludelist).difference(set(lGoodOverrideSet)))
|
|
|
|
|
|
|
|
if oArgs.proof_output and aProofUri:
|
|
|
|
with open(proof_output_tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(aProofUri, indent=2, stream=oFYaml)
|
|
|
|
LOG.info(f"Wrote {len(list(aProofUri))} proof details to {oArgs.proof_output}")
|
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.proof_output):
|
|
|
|
bak = oArgs.proof_output +'.bak'
|
|
|
|
os.rename(oArgs.proof_output, bak)
|
|
|
|
os.rename(proof_output_tmp, oArgs.proof_output)
|
|
|
|
|
|
|
|
if oArgs.torrc_output and exit_excludelist:
|
|
|
|
with open(oArgs.torrc_output, 'wt') as oFTorrc:
|
|
|
|
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(exit_excludelist)}\n")
|
|
|
|
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(lProofGoodFps)}\n")
|
|
|
|
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(o[oGOOD_ROOT]['GuardNodes'])}\n")
|
|
|
|
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}")
|
|
|
|
oFTorrc.close()
|
|
|
|
|
|
|
|
if oArgs.bad_contacts and aBadContacts:
|
|
|
|
# for later analysis
|
|
|
|
with open(oArgs.bad_contacts, 'wt') as oFYaml:
|
|
|
|
yaml.dump(aBadContacts, indent=2, stream=oFYaml)
|
|
|
|
oFYaml.close()
|
|
|
|
|
|
|
|
global oBAD_NODES
|
|
|
|
oBAD_NODES['BadNodes']['ExcludeNodes']['BadExit'] = exit_excludelist
|
2022-11-09 09:30:43 +00:00
|
|
|
global lKNOWN_NODNS
|
|
|
|
o[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
|
2022-11-08 14:15:05 +00:00
|
|
|
vwrite_badnodes(oArgs)
|
2022-11-09 09:30:43 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
global oGOOD_NODES
|
|
|
|
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = lProofGoodFps
|
|
|
|
vwrite_goodnodes(oArgs)
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
retval = 0
|
|
|
|
try:
|
|
|
|
logging.getLogger('stem').setLevel(30)
|
2022-11-09 05:43:26 +00:00
|
|
|
try:
|
|
|
|
if exit_excludelist:
|
|
|
|
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(exit_excludelist)} net bad exit nodes")
|
|
|
|
controller.set_conf(sEXCLUDE_EXIT_KEY, exit_excludelist)
|
|
|
|
|
|
|
|
except stem.SocketClosed as e:
|
|
|
|
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit nodes in Tor")
|
|
|
|
retval += 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
if lProofGoodFps:
|
|
|
|
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(lProofGoodFps)} good nodes")
|
|
|
|
controller.set_conf(sINCLUDE_EXIT_KEY, lProofGoodFps)
|
|
|
|
except stem.SocketClosed as e:
|
|
|
|
LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
|
|
|
|
retval += 1
|
|
|
|
|
|
|
|
try:
|
|
|
|
o = oGOOD_NODES
|
|
|
|
if 'GuardNodes' in o[oGOOD_ROOT].keys():
|
|
|
|
LOG.info(f"{sINCLUDE_GUARD_KEY} {len(o[oGOOD_ROOT]['GuardNodes'])} guard nodes")
|
|
|
|
controller.set_conf(sINCLUDE_GUARD_KEY, o[oGOOD_ROOT]['GuardNodes'])
|
|
|
|
except stem.SocketClosed as e:
|
|
|
|
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
|
|
|
|
retval += 1
|
2022-11-09 09:30:43 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
return retval
|
|
|
|
|
|
|
|
except InvalidRequest as e:
|
|
|
|
# Unacceptable option value: Invalid router list.
|
|
|
|
LOG.error(str(e))
|
|
|
|
LOG.warn(f"lProofGoodFps: {lProofGoodFps}")
|
|
|
|
LOG.warn(f"{sEXCLUDE_EXIT_KEY}: {exit_excludelist}")
|
|
|
|
retval = 1
|
|
|
|
return retval
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
return 0
|
|
|
|
except Exception as e:
|
|
|
|
LOG.exception(str(e))
|
|
|
|
retval = 2
|
|
|
|
return retval
|
|
|
|
finally:
|
|
|
|
# wierd we are getting stem errors during the final return
|
|
|
|
# with a traceback that doesnt correspond to any real flow
|
|
|
|
# File "/usr/lib/python3.9/site-packages/stem/control.py", line 2474, in set_conf
|
|
|
|
# self.set_options({param: value}, False)
|
|
|
|
logging.getLogger('stem').setLevel(40)
|
|
|
|
try:
|
|
|
|
for elt in controller._event_listeners:
|
|
|
|
controller.remove_event_listener(elt)
|
|
|
|
controller.close()
|
|
|
|
except Exception as e:
|
|
|
|
LOG.warn(str(e))
|
2022-11-07 05:40:00 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
try:
|
|
|
|
i = iMain(sys.argv[1:])
|
2022-11-07 11:38:22 +00:00
|
|
|
except IncorrectPassword as e:
|
|
|
|
LOG.error(e)
|
|
|
|
i = 1
|
2022-11-08 14:15:05 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
i = 0
|
2022-11-07 05:40:00 +00:00
|
|
|
except Exception as e:
|
|
|
|
LOG.exception(e)
|
2022-11-08 14:15:05 +00:00
|
|
|
i = 2
|
2022-11-07 05:40:00 +00:00
|
|
|
sys.exit(i)
|