2022-11-07 05:40:00 +00:00
|
|
|
# -*- mode: python; indent-tabs-mode: nil; py-indent-offset: 4; coding: utf-8 -*-
|
|
|
|
|
|
|
|
# https://github.com/nusenu/noContactInfo_Exit_Excluder
|
|
|
|
# https://github.com/TheSmashy/TorExitRelayExclude
|
|
|
|
"""
|
|
|
|
This extends nusenu's basic idea of using the stem library to
|
|
|
|
dynamically exclude nodes that are likely to be bad by putting them
|
|
|
|
on the ExcludeNodes or ExcludeExitNodes setting of a running Tor.
|
|
|
|
* https://github.com/nusenu/noContactInfo_Exit_Excluder
|
|
|
|
* https://github.com/TheSmashy/TorExitRelayExclude
|
|
|
|
|
|
|
|
The basic cut is to exclude Exit nodes that do not have a contact.
|
|
|
|
That can be extended to nodes that do not have an email in the contact etc.
|
2022-11-07 11:38:22 +00:00
|
|
|
"""
|
2022-11-08 14:15:05 +00:00
|
|
|
"""But there's a problem, and your Tor notice.log will tell you about it:
|
2022-11-07 05:40:00 +00:00
|
|
|
you could exclude the nodes needed to access hidden services or
|
2022-11-08 14:15:05 +00:00
|
|
|
directorues. So we need to add to the process the concept of a whitelist.
|
2022-11-07 05:40:00 +00:00
|
|
|
In addition, we may have our own blacklist of nodes we want to exclude,
|
|
|
|
or use these lists for other applications like selektor.
|
|
|
|
|
|
|
|
So we make two files that are structured in YAML:
|
|
|
|
```
|
2022-11-08 14:15:05 +00:00
|
|
|
/etc/tor/yaml/torrc-goodnodes.yaml
|
|
|
|
GoodNodes:
|
|
|
|
Relays:
|
|
|
|
IntroductionPoints:
|
|
|
|
- NODEFINGERPRINT
|
2022-11-07 05:40:00 +00:00
|
|
|
...
|
|
|
|
By default all sections of the goodnodes.yaml are used as a whitelist.
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
/etc/tor/yaml/torrc-badnodes.yaml
|
|
|
|
BadNodes:
|
2022-11-07 05:40:00 +00:00
|
|
|
ExcludeExitNodes:
|
|
|
|
BadExit:
|
|
|
|
# $0000000000000000000000000000000000000007
|
|
|
|
```
|
|
|
|
That part requires [PyYAML](https://pyyaml.org/wiki/PyYAML)
|
|
|
|
https://github.com/yaml/pyyaml/
|
|
|
|
|
|
|
|
Right now only the ExcludeExitNodes section is used by we may add ExcludeNodes
|
|
|
|
later, and by default all sub-sections of the badnodes.yaml are used as a
|
|
|
|
ExcludeExitNodes but it can be customized with the lWanted commandline arg.
|
|
|
|
|
|
|
|
The original idea has also been extended to add different conditions for
|
|
|
|
exclusion: the ```--contact``` commandline arg is a comma sep list of conditions:
|
|
|
|
* Empty - no contact info
|
|
|
|
* NoEmail - no @ sign in the contact',
|
|
|
|
More may be added later.
|
|
|
|
|
|
|
|
Because you don't want to exclude the introduction points to any onion
|
2022-11-13 04:37:30 +00:00
|
|
|
you want to connect to, ```--white_services``` should whitelist the
|
2022-11-08 14:15:05 +00:00
|
|
|
introduction points to a comma sep list of onions, but is
|
2022-11-07 05:40:00 +00:00
|
|
|
currently broken in stem 1.8.0: see:
|
|
|
|
* https://github.com/torproject/stem/issues/96
|
|
|
|
* https://gitlab.torproject.org/legacy/trac/-/issues/25417
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
```--torrc_output``` will write the torrc ExcludeNodes configuration to a file.
|
|
|
|
|
|
|
|
Now for the final part: we lookup the Contact info of every server
|
|
|
|
that is currently in our Tor, and check it for its existence.
|
|
|
|
If it fails to provide the well-know url, we assume its a bogus
|
|
|
|
relay and add it to a list of nodes that goes on ExcludeNodes -
|
|
|
|
not just exclude Exit.
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
If the Contact info is good we add the list of fingerprints to add
|
|
|
|
to ExitNodes, a whitelist of relays to use as exits.
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
```--proof_output``` will write the contact info as a ciiss dictionary
|
|
|
|
to a YAML file. If the proof is uri-rsa, the well-known file of fingerprints
|
2022-11-08 14:15:05 +00:00
|
|
|
is downloaded and the fingerprints are added on a 'fps' field we create
|
|
|
|
of that fingerprint's entry of the YAML dictionary. This file is read at the
|
2022-11-07 11:38:22 +00:00
|
|
|
beginning of the program to start with a trust database, and only new
|
2022-11-08 14:15:05 +00:00
|
|
|
contact info from new relays are added to the dictionary.
|
|
|
|
|
|
|
|
You can expect it to take an hour or two the first time this is run:
|
|
|
|
>700 domains.
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
For usage, do ```python3 exclude_badExits.py --help`
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
import os
|
|
|
|
import re
|
2022-11-09 09:30:43 +00:00
|
|
|
import socket
|
2022-11-07 05:40:00 +00:00
|
|
|
import time
|
|
|
|
import argparse
|
2022-11-13 20:13:56 +00:00
|
|
|
import string
|
2022-11-07 11:38:22 +00:00
|
|
|
from io import StringIO
|
2022-11-13 04:37:30 +00:00
|
|
|
import ipaddr
|
|
|
|
# list(ipaddress._find_address_range(ipaddress.IPv4Network('172.16.0.0/12'))
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
from urllib3.util.ssl_match_hostname import CertificateError
|
2022-11-13 04:37:30 +00:00
|
|
|
import stem
|
2022-11-08 14:15:05 +00:00
|
|
|
from stem import InvalidRequest
|
2022-11-07 05:40:00 +00:00
|
|
|
from stem.control import Controller
|
2022-11-07 11:38:22 +00:00
|
|
|
from stem.connection import IncorrectPassword
|
|
|
|
from stem.util.tor_tools import is_valid_fingerprint
|
2022-11-07 05:40:00 +00:00
|
|
|
try:
|
|
|
|
import yaml
|
|
|
|
except:
|
|
|
|
yaml = None
|
|
|
|
try:
|
2022-11-08 14:15:05 +00:00
|
|
|
from unbound import ub_ctx,RR_TYPE_TXT,RR_CLASS_IN
|
|
|
|
except:
|
|
|
|
ub_ctx = RR_TYPE_TXT = RR_CLASS_IN = None
|
|
|
|
|
|
|
|
try:
|
2022-11-07 05:40:00 +00:00
|
|
|
if 'COLOREDLOGS_LEVEL_STYLES' not in os.environ:
|
|
|
|
os.environ['COLOREDLOGS_LEVEL_STYLES'] = 'spam=22;debug=28;verbose=34;notice=220;warning=202;success=118,bold;error=124;critical=background=red'
|
|
|
|
# https://pypi.org/project/coloredlogs/
|
2022-11-08 14:15:05 +00:00
|
|
|
import coloredlogs
|
2022-11-07 05:40:00 +00:00
|
|
|
except ImportError as e:
|
|
|
|
coloredlogs = False
|
|
|
|
|
|
|
|
global LOG
|
|
|
|
import logging
|
2022-11-08 14:15:05 +00:00
|
|
|
import warnings
|
|
|
|
warnings.filterwarnings('ignore')
|
2022-11-07 05:40:00 +00:00
|
|
|
LOG = logging.getLogger()
|
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
import requests
|
|
|
|
from trustor_poc import oDownloadUrlUrllib3 as oDownloadUrl
|
|
|
|
from trustor_poc import idns_validate, TrustorError
|
|
|
|
from support_onions import icheck_torrc, bAreWeConnected, lIntroductionPoints, zResolveDomain, vwait_for_controller, yKNOWN_NODNS
|
|
|
|
LOG.info("imported HTTPSAdapter")
|
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
ETC_DIR = '/etc/tor/yaml'
|
2022-11-07 11:38:22 +00:00
|
|
|
aTRUST_DB = {}
|
2022-11-09 09:30:43 +00:00
|
|
|
aTRUST_DB_INDEX = {}
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL = {}
|
2022-11-07 05:40:00 +00:00
|
|
|
sDETAILS_URL = "https://metrics.torproject.org/rs.html#details/"
|
|
|
|
# You can call this while bootstrapping
|
2022-11-08 14:15:05 +00:00
|
|
|
sEXCLUDE_EXIT_KEY = 'ExcludeNodes'
|
|
|
|
sINCLUDE_EXIT_KEY = 'ExitNodes'
|
|
|
|
sINCLUDE_GUARD_KEY = 'EntryNodes'
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
def oMakeController(sSock='', port=9051):
|
2022-11-08 14:15:05 +00:00
|
|
|
import getpass
|
2022-11-07 11:38:22 +00:00
|
|
|
if sSock and os.path.exists(sSock):
|
2022-11-07 05:40:00 +00:00
|
|
|
controller = Controller.from_socket_file(path=sSock)
|
|
|
|
else:
|
|
|
|
controller = Controller.from_port(port=port)
|
|
|
|
sys.stdout.flush()
|
|
|
|
p = getpass.unix_getpass(prompt='Controller Password: ', stream=sys.stderr)
|
|
|
|
controller.authenticate(p)
|
|
|
|
return controller
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
oBAD_NODES = {}
|
|
|
|
oBAD_ROOT = 'BadNodes'
|
2022-11-13 20:13:56 +00:00
|
|
|
oBAD_NODES[oBAD_ROOT] = {}
|
|
|
|
oBAD_NODES[oBAD_ROOT]['ExcludeNodes'] = {}
|
|
|
|
|
|
|
|
lKNOWN_NODNS = []
|
|
|
|
lMAYBE_NODNS = []
|
2022-11-08 14:15:05 +00:00
|
|
|
def lYamlBadNodes(sFile,
|
|
|
|
section=sEXCLUDE_EXIT_KEY,
|
|
|
|
lWanted=['BadExit']):
|
|
|
|
global oBAD_NODES
|
2022-11-13 20:13:56 +00:00
|
|
|
global lKNOWN_NODNS
|
|
|
|
global lMAYBE_NODNS
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
l = []
|
|
|
|
if not yaml: return l
|
|
|
|
if os.path.exists(sFile):
|
|
|
|
with open(sFile, 'rt') as oFd:
|
2022-11-09 18:43:54 +00:00
|
|
|
oBAD_NODES = yaml.safe_load(oFd)
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
# BROKEN
|
2022-11-09 09:30:43 +00:00
|
|
|
# root = 'ExcludeNodes'
|
2022-11-08 14:15:05 +00:00
|
|
|
# for elt in o[oBAD_ROOT][root][section].keys():
|
|
|
|
# if lWanted and elt not in lWanted: continue
|
|
|
|
# # l += o[oBAD_ROOT][root][section][elt]
|
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
l = oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit']
|
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
root = 'ExcludeDomains'
|
2022-11-09 18:43:54 +00:00
|
|
|
if root not in oBAD_NODES[oBAD_ROOT] or not oBAD_NODES[oBAD_ROOT][root]:
|
2022-11-13 20:13:56 +00:00
|
|
|
lMAYBE_NODNS = yaml.safe_load(StringIO(yKNOWN_NODNS))
|
2022-11-09 09:30:43 +00:00
|
|
|
else:
|
2022-11-13 20:13:56 +00:00
|
|
|
lMAYBE_NODNS = oBAD_NODES[oBAD_ROOT][root]
|
2022-11-07 05:40:00 +00:00
|
|
|
return l
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
oGOOD_NODES = {}
|
|
|
|
oGOOD_ROOT = 'GoodNodes'
|
2022-11-07 05:40:00 +00:00
|
|
|
def lYamlGoodNodes(sFile='/etc/tor/torrc-goodnodes.yaml'):
|
2022-11-08 14:15:05 +00:00
|
|
|
global oGOOD_NODES
|
|
|
|
root = oGOOD_ROOT
|
2022-11-07 05:40:00 +00:00
|
|
|
l = []
|
|
|
|
if not yaml: return l
|
|
|
|
if os.path.exists(sFile):
|
|
|
|
with open(sFile, 'rt') as oFd:
|
|
|
|
o = yaml.safe_load(oFd)
|
2022-11-08 14:15:05 +00:00
|
|
|
oGOOD_NODES = o
|
2022-11-09 12:31:08 +00:00
|
|
|
if 'GuardNodes' in o[oGOOD_ROOT].keys():
|
2022-11-13 04:37:30 +00:00
|
|
|
l = o[oGOOD_ROOT]['GuardNodes']
|
2022-11-07 05:40:00 +00:00
|
|
|
# yq '.Nodes.IntroductionPoints|.[]' < /etc/tor/torrc-goodnodes.yaml
|
|
|
|
return l
|
|
|
|
|
2022-11-14 11:59:33 +00:00
|
|
|
def bdomain_is_bad(domain, fp):
|
2022-11-13 20:13:56 +00:00
|
|
|
global lKNOWN_NODNS
|
|
|
|
if domain in lKNOWN_NODNS: return True
|
|
|
|
if domain in lMAYBE_NODNS:
|
|
|
|
ip = zResolveDomain(domain)
|
|
|
|
if ip == '':
|
|
|
|
LOG.debug(f"{fp} {domain} does not resolve")
|
|
|
|
lKNOWN_NODNS.append(domain)
|
|
|
|
lMAYBE_NODNS.remove(domain)
|
|
|
|
return True
|
2022-11-14 11:59:33 +00:00
|
|
|
|
|
|
|
for elt in '@(){}$!':
|
|
|
|
if elt in domain:
|
|
|
|
LOG.warn(f"{elt} in domain {domain}")
|
|
|
|
return True
|
2022-11-13 20:13:56 +00:00
|
|
|
return False
|
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS = set()
|
2022-11-08 14:15:05 +00:00
|
|
|
lATS = ['abuse', 'email']
|
|
|
|
lINTS = ['ciissversion', 'uplinkbw', 'signingkeylifetime', 'memory']
|
|
|
|
lBOOLS = ['dnssec', 'dnsqname', 'aesni', 'autoupdate', 'dnslocalrootzone',
|
2022-11-07 11:38:22 +00:00
|
|
|
'sandbox', 'offlinemasterkey']
|
2022-11-08 14:15:05 +00:00
|
|
|
def aVerifyContact(a, fp, https_cafile, timeout=20, host='127.0.0.1', port=9050):
|
2022-11-09 09:30:43 +00:00
|
|
|
global tBAD_URLS
|
|
|
|
global lKNOWN_NODNS
|
2022-11-08 14:15:05 +00:00
|
|
|
# cleanups for yaml
|
2022-11-07 11:38:22 +00:00
|
|
|
for elt in lINTS:
|
|
|
|
if elt in a:
|
|
|
|
a[elt] = int(a[elt])
|
|
|
|
for elt in lBOOLS:
|
|
|
|
if elt in a:
|
|
|
|
if a[elt] in ['y','yes', 'true', 'True']:
|
|
|
|
a[elt] = True
|
|
|
|
else:
|
|
|
|
a[elt] = False
|
2022-11-08 14:15:05 +00:00
|
|
|
for elt in lATS:
|
|
|
|
if elt in a:
|
|
|
|
a[elt] = a[elt].replace('[]', '@')
|
|
|
|
|
|
|
|
a.update({'fps': []})
|
2022-11-09 05:43:26 +00:00
|
|
|
keys = list(a.keys())
|
2022-11-13 04:37:30 +00:00
|
|
|
if 'email' not in keys:
|
|
|
|
LOG.warn(f"{fp} 'email' not in {keys}")
|
|
|
|
a['email'] = ''
|
|
|
|
if 'ciissversion' not in keys:
|
|
|
|
aFP_EMAIL[fp] = a['email']
|
|
|
|
LOG.warn(f"{fp} 'ciissversion' not in {keys}")
|
|
|
|
a['ciissversion'] = 2
|
2022-11-07 11:38:22 +00:00
|
|
|
# test the url for fps and add it to the array
|
2022-11-09 05:43:26 +00:00
|
|
|
if 'proof' not in keys:
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL[fp] = a['email']
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{fp} 'proof' not in {keys}")
|
2022-11-07 11:38:22 +00:00
|
|
|
return a
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
if aTRUST_DB_INDEX and fp in aTRUST_DB_INDEX.keys():
|
|
|
|
aCachedContact = aTRUST_DB_INDEX[fp]
|
2022-11-09 18:43:54 +00:00
|
|
|
if aCachedContact['email'] == a['email']:
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"{fp} in aTRUST_DB_INDEX")
|
2022-11-09 09:30:43 +00:00
|
|
|
return aCachedContact
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
if 'url' not in keys:
|
|
|
|
if 'uri' not in keys:
|
2022-11-08 14:15:05 +00:00
|
|
|
a['url'] = ''
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL[fp] = a['email']
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{fp} url and uri not in {keys}")
|
2022-11-08 14:15:05 +00:00
|
|
|
return a
|
|
|
|
a['url'] = a['uri']
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL[fp] = a['email']
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.debug(f"{fp} 'uri' but not 'url' in {keys}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# drop through
|
2022-11-13 20:13:56 +00:00
|
|
|
|
|
|
|
c = a['url'].lstrip('https://').lstrip('http://').strip('/')
|
|
|
|
a['url'] = 'https://' +c
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
# domain should be a unique key for contacts
|
2022-11-13 20:13:56 +00:00
|
|
|
domain = a['url'][8:]
|
2022-11-14 11:59:33 +00:00
|
|
|
if bdomain_is_bad(domain, fp):
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.warn(f"{domain} is bad from {a['url']}")
|
2022-11-14 11:59:33 +00:00
|
|
|
LOG.debug(f"{fp} is bad from {a}")
|
2022-11-13 20:13:56 +00:00
|
|
|
return a
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
ip = zResolveDomain(domain)
|
2022-11-09 18:43:54 +00:00
|
|
|
if ip == '':
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL[fp] = a['email']
|
|
|
|
LOG.debug(f"{fp} {domain} does not resolve")
|
|
|
|
lKNOWN_NODNS.append(domain)
|
|
|
|
return {}
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
if a['proof'] not in ['uri-rsa']:
|
|
|
|
# only support uri for now
|
|
|
|
if False and ub_ctx:
|
|
|
|
fp_domain = fp +'.'+domain
|
|
|
|
if idns_validate(fp_domain,
|
|
|
|
libunbound_resolv_file='resolv.conf',
|
|
|
|
dnssec_DS_file='dnssec-root-trust',
|
|
|
|
) == 0:
|
|
|
|
pass
|
|
|
|
LOG.warn(f"{fp} proof={a['proof']} not supported yet")
|
|
|
|
return a
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.debug(f"{len(keys)} contact fields for {fp}")
|
2022-11-13 20:13:56 +00:00
|
|
|
url="https://"+domain+"/.well-known/tor-relay/rsa-fingerprint.txt"
|
2022-11-07 11:38:22 +00:00
|
|
|
try:
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.debug(f"Downloading from {domain} for {fp}")
|
2022-11-13 20:13:56 +00:00
|
|
|
o = oDownloadUrl(url, https_cafile,
|
2022-11-09 05:43:26 +00:00
|
|
|
timeout=timeout, host=host, port=port)
|
|
|
|
# requests response: text "reason", "status_code"
|
|
|
|
except AttributeError as e:
|
|
|
|
LOG.exception(f"AttributeError downloading from {domain} {e}")
|
|
|
|
except CertificateError as e:
|
|
|
|
LOG.warn(f"CertificateError downloading from {domain} {e}")
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
except TrustorError as e:
|
2022-11-13 04:37:30 +00:00
|
|
|
if e.args == "HTTP Errorcode 404":
|
|
|
|
aFP_EMAIL[fp] = a['email']
|
|
|
|
LOG.warn(f"TrustorError 404 from {domain} {e.args}")
|
|
|
|
else:
|
|
|
|
LOG.warn(f"TrustorError downloading from {domain} {e.args}")
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
except (BaseException ) as e:
|
|
|
|
LOG.error(f"Exception {type(e)} downloading from {domain} {e}")
|
2022-11-07 11:38:22 +00:00
|
|
|
else:
|
2022-11-13 20:13:56 +00:00
|
|
|
if hasattr(o, 'status'):
|
|
|
|
status_code = o.status
|
|
|
|
else:
|
|
|
|
status_code = o.status_code
|
|
|
|
if status_code >= 300:
|
2022-11-13 04:37:30 +00:00
|
|
|
aFP_EMAIL[fp] = a['email']
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.warn(f"Error from {domain} {status_code} {o.reason}")
|
2022-11-09 05:43:26 +00:00
|
|
|
# any reason retry?
|
2022-11-09 09:30:43 +00:00
|
|
|
tBAD_URLS.add(a['url'])
|
2022-11-09 05:43:26 +00:00
|
|
|
return a
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
if hasattr(o, 'text'):
|
|
|
|
data = o.text
|
|
|
|
else:
|
|
|
|
data = str(o.data, 'UTF-8')
|
|
|
|
l = data.upper().strip().split('\n')
|
|
|
|
LOG.debug(f"Downloaded from {domain} {len(l)} lines {len(data)} bytes")
|
|
|
|
|
|
|
|
a['modified'] = int(time.time())
|
2022-11-07 11:38:22 +00:00
|
|
|
if not l:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"Downloading from {domain} empty for {fp}")
|
2022-11-07 11:38:22 +00:00
|
|
|
else:
|
2022-11-08 14:15:05 +00:00
|
|
|
a['fps'] = [elt for elt in l if elt and len(elt) == 40
|
|
|
|
and not elt.startswith('#')]
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"Downloaded from {domain} {len(a['fps'])} FPs")
|
|
|
|
return a
|
|
|
|
|
|
|
|
def aParseContactYaml(contact, fp):
|
|
|
|
"""
|
2022-11-14 11:59:33 +00:00
|
|
|
See the Tor ContactInfo Information Sharing Specification v2
|
2022-11-13 20:13:56 +00:00
|
|
|
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/
|
|
|
|
"""
|
|
|
|
lelts = contact.split()
|
|
|
|
a = {}
|
|
|
|
if len(lelts) % 1 != 0:
|
|
|
|
LOG.warn(f"bad contact for {fp} odd number of components")
|
|
|
|
LOG.debug(f"{fp} {a}")
|
|
|
|
return a
|
|
|
|
key = ''
|
2022-11-14 11:59:33 +00:00
|
|
|
for elt in lelts:
|
2022-11-13 20:13:56 +00:00
|
|
|
if key == '':
|
|
|
|
key = elt
|
|
|
|
continue
|
|
|
|
a[key] = elt
|
|
|
|
key = ''
|
|
|
|
LOG.debug(f"{fp} {len(a.keys())} fields")
|
2022-11-07 11:38:22 +00:00
|
|
|
return a
|
|
|
|
|
|
|
|
def aParseContact(contact, fp):
|
2022-11-08 14:15:05 +00:00
|
|
|
"""
|
2022-11-14 11:59:33 +00:00
|
|
|
See the Tor ContactInfo Information Sharing Specification v2
|
2022-11-08 14:15:05 +00:00
|
|
|
https://nusenu.github.io/ContactInfo-Information-Sharing-Specification/
|
|
|
|
"""
|
2022-11-07 11:38:22 +00:00
|
|
|
l = [line for line in contact.strip().replace('"', '').split(' ')
|
|
|
|
if ':' in line]
|
|
|
|
LOG.debug(f"{fp} {len(l)} fields")
|
|
|
|
s = f'"{fp}":\n'
|
|
|
|
s += '\n'.join([f" {line}\"".replace(':',': \"', 1)
|
|
|
|
for line in l])
|
|
|
|
oFd = StringIO(s)
|
|
|
|
a = yaml.safe_load(oFd)
|
|
|
|
return a
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
def vsetup_logging(log_level, logfile=''):
|
|
|
|
global LOG
|
|
|
|
add = True
|
|
|
|
|
|
|
|
# stem fucks up logging
|
|
|
|
from stem.util import log
|
|
|
|
logging.getLogger('stem').setLevel(30)
|
|
|
|
|
|
|
|
logging._defaultFormatter = logging.Formatter(datefmt='%m-%d %H:%M:%S')
|
|
|
|
logging._defaultFormatter.default_time_format = '%m-%d %H:%M:%S'
|
|
|
|
logging._defaultFormatter.default_msec_format = ''
|
|
|
|
|
|
|
|
kwargs = dict(level=log_level,
|
|
|
|
force=True,
|
2022-11-13 04:37:30 +00:00
|
|
|
format='%(levelname)s %(message)s')
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
if logfile:
|
|
|
|
add = logfile.startswith('+')
|
|
|
|
sub = logfile.startswith('-')
|
|
|
|
if add or sub:
|
|
|
|
logfile = logfile[1:]
|
|
|
|
kwargs['filename'] = logfile
|
|
|
|
|
|
|
|
if coloredlogs:
|
|
|
|
# https://pypi.org/project/coloredlogs/
|
|
|
|
aKw = dict(level=log_level,
|
|
|
|
logger=LOG,
|
|
|
|
stream=sys.stdout if add else None,
|
2022-11-13 04:37:30 +00:00
|
|
|
fmt='%(levelname)s %(message)s'
|
2022-11-08 14:15:05 +00:00
|
|
|
)
|
|
|
|
coloredlogs.install(**aKw)
|
|
|
|
if logfile:
|
|
|
|
oHandler = logging.FileHandler(logfile)
|
|
|
|
LOG.addHandler(oHandler)
|
|
|
|
LOG.info(f"CSetting log_level to {log_level!s}")
|
|
|
|
else:
|
|
|
|
logging.basicConfig(**kwargs)
|
|
|
|
if add and logfile:
|
|
|
|
oHandler = logging.StreamHandler(sys.stdout)
|
|
|
|
LOG.addHandler(oHandler)
|
|
|
|
LOG.info(f"SSetting log_level to {log_level!s}")
|
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
logging._levelToName = {
|
2022-11-14 11:59:33 +00:00
|
|
|
logging.CRITICAL: 'CRITICAL',
|
|
|
|
logging.ERROR: 'ERROR',
|
|
|
|
logging.WARNING: 'WARN',
|
|
|
|
logging.INFO: 'INFO',
|
|
|
|
logging.DEBUG: 'DEBUG',
|
|
|
|
logging.NOTSET: 'NOTSET',
|
2022-11-13 04:37:30 +00:00
|
|
|
}
|
|
|
|
logging._nameToLevel = {
|
2022-11-14 11:59:33 +00:00
|
|
|
'CRITICAL': logging.CRITICAL,
|
|
|
|
'FATAL': logging.FATAL,
|
|
|
|
'ERROR': logging.ERROR,
|
|
|
|
'WARN': logging.WARNING,
|
|
|
|
'WARNING': logging.WARNING,
|
|
|
|
'INFO': logging.INFO,
|
|
|
|
'DEBUG': logging.DEBUG,
|
|
|
|
'NOTSET': logging.NOTSET,
|
2022-11-13 04:37:30 +00:00
|
|
|
}
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
def oMainArgparser(_=None):
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from OpenSSL import SSL
|
|
|
|
lCAfs = SSL._CERTIFICATE_FILE_LOCATIONS
|
|
|
|
except:
|
|
|
|
lCAfs = []
|
|
|
|
|
|
|
|
CAfs = []
|
|
|
|
for elt in lCAfs:
|
|
|
|
if os.path.exists(elt):
|
|
|
|
CAfs.append(elt)
|
|
|
|
if not CAfs:
|
|
|
|
CAfs = ['']
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-07 11:38:22 +00:00
|
|
|
parser = argparse.ArgumentParser(add_help=True,
|
|
|
|
epilog=__doc__)
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--https_cafile', type=str,
|
|
|
|
help="Certificate Authority file (in PEM)",
|
|
|
|
default=CAfs[0])
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--proxy_host', '--proxy-host', type=str,
|
|
|
|
default='127.0.0.1',
|
|
|
|
help='proxy host')
|
2022-11-07 11:38:22 +00:00
|
|
|
parser.add_argument('--proxy_port', '--proxy-port', default=9050, type=int,
|
2022-11-07 05:40:00 +00:00
|
|
|
help='proxy control port')
|
|
|
|
parser.add_argument('--proxy_ctl', '--proxy-ctl',
|
2022-11-09 05:43:26 +00:00
|
|
|
default='/run/tor/control' if os.path.exists('/run/tor/control') else 9051,
|
2022-11-07 11:38:22 +00:00
|
|
|
type=str,
|
|
|
|
help='control socket - or port')
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
parser.add_argument('--torrc',
|
2022-11-09 05:43:26 +00:00
|
|
|
default='/etc/tor/torrc-defaults',
|
2022-11-08 14:15:05 +00:00
|
|
|
type=str,
|
|
|
|
help='torrc to check for suggestions')
|
2022-11-09 05:43:26 +00:00
|
|
|
parser.add_argument('--timeout', default=60, type=int,
|
2022-11-08 14:15:05 +00:00
|
|
|
help='proxy download connect timeout')
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--good_nodes', type=str,
|
2022-11-09 12:31:08 +00:00
|
|
|
default=os.path.join(ETC_DIR, 'torrc-goodnodes.yaml'),
|
2022-11-13 20:13:56 +00:00
|
|
|
help="Yaml file of good info that should not be excluded")
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--bad_nodes', type=str,
|
2022-11-09 12:31:08 +00:00
|
|
|
default=os.path.join(ETC_DIR, 'torrc-badnodes.yaml'),
|
2022-11-07 05:40:00 +00:00
|
|
|
help="Yaml file of bad nodes that should also be excluded")
|
|
|
|
parser.add_argument('--contact', type=str, default='Empty,NoEmail',
|
|
|
|
help="comma sep list of conditions - Empty,NoEmail")
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--bad_contacts', type=str,
|
2022-11-09 12:31:08 +00:00
|
|
|
default=os.path.join(ETC_DIR, 'badcontacts.yaml'),
|
2022-11-08 14:15:05 +00:00
|
|
|
help="Yaml file of bad contacts that bad FPs are using")
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
parser.add_argument('--strict_nodes', type=int, default=0,
|
|
|
|
choices=[0,1],
|
|
|
|
help="Set StrictNodes: 1 is less anonymous but more secure, although some sites may be unreachable")
|
2022-11-07 05:40:00 +00:00
|
|
|
parser.add_argument('--wait_boot', type=int, default=120,
|
|
|
|
help="Seconds to wait for Tor to booststrap")
|
2022-11-13 20:13:56 +00:00
|
|
|
parser.add_argument('--points_timeout', type=int, default=0,
|
|
|
|
help="Timeout for getting introduction points - must be long >120sec. 0 means disabled looking for IPs")
|
2022-11-14 11:59:33 +00:00
|
|
|
parser.add_argument('--log_level', type=int, default=20,
|
2022-11-07 05:40:00 +00:00
|
|
|
help="10=debug 20=info 30=warn 40=error")
|
|
|
|
parser.add_argument('--bad_sections', type=str,
|
2022-11-09 12:31:08 +00:00
|
|
|
default='MyBadExit',
|
2022-11-08 14:15:05 +00:00
|
|
|
help="sections of the badnodes.yaml to use, comma separated, '' BROKEN")
|
2022-11-13 04:37:30 +00:00
|
|
|
parser.add_argument('--white_services', type=str,
|
2022-11-07 05:40:00 +00:00
|
|
|
default='',
|
|
|
|
help="comma sep. list of onions to whitelist their introduction points - BROKEN")
|
2022-11-08 14:15:05 +00:00
|
|
|
parser.add_argument('--torrc_output', type=str, default='',
|
2022-11-07 05:40:00 +00:00
|
|
|
help="Write the torrc configuration to a file")
|
2022-11-13 04:37:30 +00:00
|
|
|
parser.add_argument('--proof_output', type=str, default=os.path.join(ETC_DIR, 'proof.yaml'),
|
2022-11-07 11:38:22 +00:00
|
|
|
help="Write the proof data of the included nodes to a YAML file")
|
2022-11-07 05:40:00 +00:00
|
|
|
return parser
|
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
def vwrite_badnodes(oArgs, oBAD_NODES, slen):
|
2022-11-08 14:15:05 +00:00
|
|
|
if oArgs.bad_nodes:
|
|
|
|
tmp = oArgs.bad_nodes +'.tmp'
|
|
|
|
bak = oArgs.bad_nodes +'.bak'
|
|
|
|
with open(tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(oBAD_NODES, indent=2, stream=oFYaml)
|
2022-11-09 18:43:54 +00:00
|
|
|
LOG.info(f"Wrote {slen} to {oArgs.bad_nodes}")
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.bad_nodes):
|
|
|
|
os.rename(oArgs.bad_nodes, bak)
|
|
|
|
os.rename(tmp, oArgs.bad_nodes)
|
2022-11-14 11:59:33 +00:00
|
|
|
|
|
|
|
def vwrite_goodnodes(oArgs, oGOOD_NODES, ilen):
|
2022-11-08 14:15:05 +00:00
|
|
|
if oArgs.good_nodes:
|
|
|
|
tmp = oArgs.good_nodes +'.tmp'
|
|
|
|
bak = oArgs.good_nodes +'.bak'
|
|
|
|
with open(tmp, 'wt') as oFYaml:
|
|
|
|
yaml.dump(oGOOD_NODES, indent=2, stream=oFYaml)
|
2022-11-14 11:59:33 +00:00
|
|
|
LOG.info(f"Wrote {ilen} good relays to {oArgs.good_nodes}")
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.good_nodes):
|
|
|
|
os.rename(oArgs.good_nodes, bak)
|
|
|
|
os.rename(tmp, oArgs.good_nodes)
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
def iMain(lArgs):
|
2022-11-09 18:43:54 +00:00
|
|
|
global aTRUST_DB
|
|
|
|
global aTRUST_DB_INDEX
|
2022-11-13 04:37:30 +00:00
|
|
|
global oBAD_NODES
|
2022-11-14 11:59:33 +00:00
|
|
|
global oGOOD_NODES
|
2022-11-09 18:43:54 +00:00
|
|
|
global lKNOWN_NODNS
|
2022-11-07 05:40:00 +00:00
|
|
|
parser = oMainArgparser()
|
|
|
|
oArgs = parser.parse_args(lArgs)
|
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
vsetup_logging(oArgs.log_level)
|
|
|
|
if bAreWeConnected() is False:
|
|
|
|
raise SystemExit("we are not connected")
|
|
|
|
|
|
|
|
sFile = oArgs.torrc
|
|
|
|
if sFile and os.path.exists(sFile):
|
|
|
|
icheck_torrc(sFile, oArgs)
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
twhitelist_set = set()
|
2022-11-07 11:38:22 +00:00
|
|
|
sFile = oArgs.proof_output
|
|
|
|
if sFile and os.path.exists(sFile):
|
2022-11-08 14:15:05 +00:00
|
|
|
try:
|
|
|
|
with open(sFile, 'rt') as oFd:
|
|
|
|
aTRUST_DB = yaml.safe_load(oFd)
|
2022-11-13 04:37:30 +00:00
|
|
|
assert type(aTRUST_DB) == dict
|
|
|
|
LOG.info(f"{len(aTRUST_DB.keys())} trusted contacts from {sFile}")
|
2022-11-09 09:30:43 +00:00
|
|
|
# reverse lookup of fps to contacts
|
|
|
|
# but...
|
2022-11-09 18:43:54 +00:00
|
|
|
for k,v in aTRUST_DB.items():
|
2022-11-13 04:37:30 +00:00
|
|
|
if 'modified' not in v.keys():
|
2022-11-13 20:13:56 +00:00
|
|
|
v['modified'] = int(time.time())
|
2022-11-09 09:30:43 +00:00
|
|
|
aTRUST_DB_INDEX[k] = v
|
|
|
|
if 'fps' in aTRUST_DB[k].keys():
|
|
|
|
for fp in aTRUST_DB[k]['fps']:
|
2022-11-13 04:37:30 +00:00
|
|
|
if fp in aTRUST_DB_INDEX:
|
|
|
|
continue
|
2022-11-09 09:30:43 +00:00
|
|
|
aTRUST_DB_INDEX[fp] = v
|
2022-11-13 04:37:30 +00:00
|
|
|
LOG.info(f"{len(aTRUST_DB_INDEX.keys())} good relays from {sFile}")
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 09:30:43 +00:00
|
|
|
except Exception as e:
|
2022-11-09 18:43:54 +00:00
|
|
|
LOG.exception(f"Error reading YAML TrustDB {sFile} {e}")
|
2022-11-07 11:38:22 +00:00
|
|
|
|
2022-11-09 05:43:26 +00:00
|
|
|
if os.path.exists(oArgs.proxy_ctl):
|
2022-11-07 11:38:22 +00:00
|
|
|
controller = oMakeController(sSock=oArgs.proxy_ctl)
|
|
|
|
else:
|
2022-11-08 14:15:05 +00:00
|
|
|
port =int(oArgs.proxy_ctl)
|
2022-11-07 11:38:22 +00:00
|
|
|
controller = oMakeController(port=port)
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
vwait_for_controller(controller, oArgs.wait_boot)
|
|
|
|
|
|
|
|
if oArgs.proof_output:
|
|
|
|
proof_output_tmp = oArgs.proof_output + '.tmp'
|
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
elt = controller.get_conf('UseMicrodescriptors')
|
|
|
|
if elt != '0' :
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.error('"UseMicrodescriptors 0" is required in your /etc/tor/torrc. Exiting.')
|
2022-11-07 11:38:22 +00:00
|
|
|
controller.set_conf('UseMicrodescriptors', 0)
|
|
|
|
# does it work dynamically?
|
2022-11-08 14:15:05 +00:00
|
|
|
return 2
|
|
|
|
|
|
|
|
elt = controller.get_conf(sEXCLUDE_EXIT_KEY)
|
2022-11-07 05:40:00 +00:00
|
|
|
if elt and elt != '{??}':
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.warn(f"{sEXCLUDE_EXIT_KEY} is in use already")
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
twhitelist_set.update(set(lYamlGoodNodes(oArgs.good_nodes)))
|
|
|
|
LOG.info(f"lYamlGoodNodes {len(twhitelist_set)} GuardNodes from {oArgs.good_nodes}")
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
global oGOOD_NODES
|
|
|
|
t = set()
|
|
|
|
if 'IntroductionPoints' in oGOOD_NODES[oGOOD_ROOT]['Relays'].keys():
|
|
|
|
t = set(oGOOD_NODES[oGOOD_ROOT]['Relays']['IntroductionPoints'])
|
|
|
|
# not working = maybe when stem is updated
|
|
|
|
w = set(oGOOD_NODES[oGOOD_ROOT]['Services'])
|
|
|
|
if oArgs.white_services:
|
|
|
|
w.update(oArgs.white_services.split(','))
|
2022-11-13 20:13:56 +00:00
|
|
|
if oArgs.points_timeout > 0:
|
|
|
|
LOG.info(f"{len(w)} services will be checked from IntroductionPoints")
|
|
|
|
t.update(lIntroductionPoints(controller, w, itimeout=oArgs.points_timeout))
|
2022-11-13 04:37:30 +00:00
|
|
|
if len(t) > 0:
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"IntroductionPoints {len(t)} relays from {len(w)} services")
|
2022-11-13 04:37:30 +00:00
|
|
|
twhitelist_set.update(t)
|
|
|
|
|
|
|
|
texclude_set = set()
|
2022-11-08 14:15:05 +00:00
|
|
|
if oArgs.bad_nodes and os.path.exists(oArgs.bad_nodes):
|
|
|
|
if False and oArgs.bad_sections:
|
|
|
|
# BROKEN
|
|
|
|
sections = oArgs.bad_sections.split(',')
|
2022-11-13 20:13:56 +00:00
|
|
|
texclude_set = set(lYamlBadNodes(oArgs.bad_nodes,
|
2022-11-08 14:15:05 +00:00
|
|
|
lWanted=sections,
|
2022-11-13 04:37:30 +00:00
|
|
|
section=sEXCLUDE_EXIT_KEY))
|
2022-11-14 11:59:33 +00:00
|
|
|
LOG.info(f"Preloaded {len(texclude_set)} bad fps")
|
2022-11-07 05:40:00 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
ttrust_db_index = aTRUST_DB_INDEX.keys()
|
2022-11-14 11:59:33 +00:00
|
|
|
tdns_urls = set()
|
2022-11-08 14:15:05 +00:00
|
|
|
iFakeContact = 0
|
2022-11-13 20:13:56 +00:00
|
|
|
iTotalContacts = 0
|
2022-11-08 14:15:05 +00:00
|
|
|
aBadContacts = {}
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-07 05:40:00 +00:00
|
|
|
lConds = oArgs.contact.split(',')
|
2022-11-09 05:43:26 +00:00
|
|
|
iR = 0
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
relays = controller.get_server_descriptors()
|
2022-11-07 05:40:00 +00:00
|
|
|
for relay in relays:
|
2022-11-09 05:43:26 +00:00
|
|
|
iR += 1
|
2022-11-07 11:38:22 +00:00
|
|
|
if not is_valid_fingerprint(relay.fingerprint):
|
|
|
|
LOG.warn('Invalid Fingerprint: %s' % relay.fingerprint)
|
|
|
|
continue
|
2022-11-08 14:15:05 +00:00
|
|
|
relay.fingerprint = relay.fingerprint.upper()
|
2022-11-14 11:59:33 +00:00
|
|
|
|
|
|
|
sofar = f"G:{len(aTRUST_DB.keys())} U:{len(tdns_urls)} F:{iFakeContact} BF:{len(texclude_set)} GF:{len(ttrust_db_index)} TC:{iTotalContacts} #{iR}"
|
2022-11-08 14:15:05 +00:00
|
|
|
if not relay.exit_policy.is_exiting_allowed():
|
|
|
|
if sEXCLUDE_EXIT_KEY == 'ExcludeNodes':
|
2022-11-13 20:13:56 +00:00
|
|
|
pass # LOG.debug(f"{relay.fingerprint} not an exit {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
else:
|
2022-11-13 20:13:56 +00:00
|
|
|
pass # LOG.warn(f"{relay.fingerprint} not an exit {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
# great contact had good fps and we are in them
|
|
|
|
if relay.fingerprint in aTRUST_DB_INDEX.keys():
|
|
|
|
# a cached entry
|
2022-11-08 14:15:05 +00:00
|
|
|
continue
|
2022-11-09 18:43:54 +00:00
|
|
|
|
|
|
|
if type(relay.contact) == bytes:
|
|
|
|
# dunno
|
2022-11-09 05:43:26 +00:00
|
|
|
relay.contact = str(relay.contact, 'UTF-8')
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
if ('Empty' in lConds and not relay.contact) or \
|
2022-11-13 20:13:56 +00:00
|
|
|
('NoEmail' in lConds and relay.contact and not 'email:' in relay.contact):
|
2022-11-13 04:37:30 +00:00
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-07 11:38:22 +00:00
|
|
|
continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
if not relay.contact or not 'ciissversion:' in relay.contact:
|
2022-11-09 18:43:54 +00:00
|
|
|
# should be unreached 'Empty' should always be in lConds
|
|
|
|
continue
|
2022-11-13 20:13:56 +00:00
|
|
|
iTotalContacts += 1
|
2022-11-14 11:59:33 +00:00
|
|
|
|
|
|
|
fp = relay.fingerprint
|
2022-11-13 20:13:56 +00:00
|
|
|
if relay.contact and not 'url:' in relay.contact:
|
2022-11-14 11:59:33 +00:00
|
|
|
LOG.info(f"{fp} skipping bad contact - no url: {sofar}")
|
|
|
|
LOG.debug(f"{fp} {relay.contact} {sofar}")
|
|
|
|
texclude_set.add(fp)
|
2022-11-13 20:13:56 +00:00
|
|
|
continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
c = relay.contact.lower()
|
2022-11-13 04:37:30 +00:00
|
|
|
# first rough cut
|
2022-11-09 18:43:54 +00:00
|
|
|
i = c.find('url:')
|
2022-11-13 20:13:56 +00:00
|
|
|
if i >=0:
|
2022-11-14 11:59:33 +00:00
|
|
|
c = c[i+4:]
|
2022-11-09 18:43:54 +00:00
|
|
|
i = c.find(' ')
|
|
|
|
if i >=0: c = c[:i]
|
2022-11-13 20:13:56 +00:00
|
|
|
c = c.lstrip('https://').lstrip('http://').strip('/')
|
2022-11-13 04:37:30 +00:00
|
|
|
i = c.find('/')
|
|
|
|
if i >=0: c = c[:i]
|
|
|
|
domain = c
|
2022-11-14 11:59:33 +00:00
|
|
|
if domain and bdomain_is_bad(domain, fp):
|
|
|
|
LOG.info(f"{fp} skipping bad {domain} {sofar}")
|
|
|
|
LOG.debug(f"{fp} {relay.contact} {sofar}")
|
|
|
|
texclude_set.add(fp)
|
2022-11-13 04:37:30 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
if domain:
|
2022-11-13 20:13:56 +00:00
|
|
|
ip = zResolveDomain(domain)
|
2022-11-13 04:37:30 +00:00
|
|
|
if not ip:
|
2022-11-14 11:59:33 +00:00
|
|
|
LOG.warn(f"{fp} {domain} did not resolve {sofar}")
|
|
|
|
texclude_set.add(fp)
|
2022-11-13 20:13:56 +00:00
|
|
|
lKNOWN_NODNS.append(domain)
|
2022-11-13 04:37:30 +00:00
|
|
|
iFakeContact += 1
|
|
|
|
continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
|
|
|
if 'dns-rsa' in relay.contact.lower():
|
|
|
|
target = f"{relay.fingerprint}.{domain}"
|
2022-11-13 04:37:30 +00:00
|
|
|
LOG.info(f"skipping 'dns-rsa' {target} {sofar}")
|
2022-11-14 11:59:33 +00:00
|
|
|
tdns_urls.add(target)
|
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
elif 'proof:uri-rsa' in relay.contact.lower():
|
2022-11-07 11:38:22 +00:00
|
|
|
a = aParseContact(relay.contact, relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
if not a:
|
|
|
|
LOG.warn(f"{relay.fingerprint} did not parse {sofar}")
|
2022-11-13 04:37:30 +00:00
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
continue
|
2022-11-09 09:30:43 +00:00
|
|
|
if 'url' in a and a['url']:
|
|
|
|
if a['url'] in tBAD_URLS:
|
|
|
|
# The fp is using a contact with a URL we know is bad
|
|
|
|
LOG.info(f"{relay.fingerprint} skipping in tBAD_URLS {a['url']} {sofar}")
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.debug(f"{relay.fingerprint} {a} {sofar}")
|
2022-11-13 04:37:30 +00:00
|
|
|
iFakeContact += 1
|
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-09 09:30:43 +00:00
|
|
|
continue
|
|
|
|
domain = a['url'].replace('https://', '').replace('http://', '')
|
|
|
|
if domain in lKNOWN_NODNS:
|
|
|
|
# The fp is using a contact with a URL we know is bogus
|
|
|
|
LOG.info(f"{relay.fingerprint} skipping in lKNOWN_NODNS {a['url']} {sofar}")
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.debug(f"{relay.fingerprint} {a} {sofar}")
|
2022-11-13 04:37:30 +00:00
|
|
|
iFakeContact += 1
|
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-09 09:30:43 +00:00
|
|
|
continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
|
|
|
|
b = aVerifyContact(list(a.values())[0],
|
|
|
|
relay.fingerprint,
|
|
|
|
oArgs.https_cafile,
|
2022-11-07 11:38:22 +00:00
|
|
|
timeout=oArgs.timeout,
|
|
|
|
host=oArgs.proxy_host,
|
|
|
|
port=oArgs.proxy_port)
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-09 18:43:54 +00:00
|
|
|
if not b or not 'fps' in b or not b['fps'] or not b['url']:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{relay.fingerprint} did NOT VERIFY {sofar}")
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.debug(f"{relay.fingerprint} {b} {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# If it's giving contact info that doesnt check out
|
|
|
|
# it could be a bad exit with fake contact info
|
2022-11-13 04:37:30 +00:00
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
aBadContacts[relay.fingerprint] = b
|
2022-11-07 11:38:22 +00:00
|
|
|
continue
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
if relay.fingerprint not in b['fps']:
|
2022-11-09 05:43:26 +00:00
|
|
|
LOG.warn(f"{relay.fingerprint} the FP IS NOT in the list of fps {sofar}")
|
2022-11-08 14:15:05 +00:00
|
|
|
# assume a fp is using a bogus contact
|
2022-11-13 04:37:30 +00:00
|
|
|
texclude_set.add(relay.fingerprint)
|
2022-11-08 14:15:05 +00:00
|
|
|
iFakeContact += 1
|
|
|
|
aBadContacts[relay.fingerprint] = b
|
|
|
|
continue
|
2022-11-09 09:30:43 +00:00
|
|
|
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.info(f"{relay.fingerprint} verified {b['url']} {sofar}")
|
|
|
|
# add our contact info to the trustdb
|
2022-11-13 04:37:30 +00:00
|
|
|
aTRUST_DB[relay.fingerprint] = b
|
|
|
|
for elt in b['fps']:
|
|
|
|
aTRUST_DB_INDEX[elt] = b
|
2022-11-08 14:15:05 +00:00
|
|
|
if oArgs.proof_output and oArgs.log_level <= 20:
|
|
|
|
# as we go along then clobber
|
|
|
|
with open(proof_output_tmp, 'wt') as oFYaml:
|
2022-11-13 04:37:30 +00:00
|
|
|
yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"Filtered {len(twhitelist_set)} whitelisted relays")
|
2022-11-13 04:37:30 +00:00
|
|
|
texclude_set = texclude_set.difference(twhitelist_set)
|
2022-11-14 11:59:33 +00:00
|
|
|
# accept the dns-rsa urls for now until we test them
|
|
|
|
texclude_set = texclude_set.difference(tdns_urls)
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"{len(list(aTRUST_DB.keys()))} good contacts out of {iTotalContacts}")
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
if oArgs.proof_output and aTRUST_DB:
|
2022-11-08 14:15:05 +00:00
|
|
|
with open(proof_output_tmp, 'wt') as oFYaml:
|
2022-11-13 04:37:30 +00:00
|
|
|
yaml.dump(aTRUST_DB, indent=2, stream=oFYaml)
|
2022-11-08 14:15:05 +00:00
|
|
|
oFYaml.close()
|
|
|
|
if os.path.exists(oArgs.proof_output):
|
|
|
|
bak = oArgs.proof_output +'.bak'
|
|
|
|
os.rename(oArgs.proof_output, bak)
|
|
|
|
os.rename(proof_output_tmp, oArgs.proof_output)
|
2022-11-13 20:13:56 +00:00
|
|
|
LOG.info(f"Wrote {len(list(aTRUST_DB.keys()))} good contact details to {oArgs.proof_output}")
|
2022-11-08 14:15:05 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
if oArgs.torrc_output and texclude_set:
|
2022-11-08 14:15:05 +00:00
|
|
|
with open(oArgs.torrc_output, 'wt') as oFTorrc:
|
2022-11-13 04:37:30 +00:00
|
|
|
oFTorrc.write(f"{sEXCLUDE_EXIT_KEY} {','.join(texclude_set)}\n")
|
|
|
|
oFTorrc.write(f"{sINCLUDE_EXIT_KEY} {','.join(aTRUST_DB_INDEX.keys())}\n")
|
2022-11-14 11:59:33 +00:00
|
|
|
oFTorrc.write(f"{sINCLUDE_GUARD_KEY} {','.join(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])}\n")
|
2022-11-08 14:15:05 +00:00
|
|
|
LOG.info(f"Wrote tor configuration to {oArgs.torrc_output}")
|
|
|
|
oFTorrc.close()
|
|
|
|
|
|
|
|
if oArgs.bad_contacts and aBadContacts:
|
|
|
|
# for later analysis
|
|
|
|
with open(oArgs.bad_contacts, 'wt') as oFYaml:
|
|
|
|
yaml.dump(aBadContacts, indent=2, stream=oFYaml)
|
|
|
|
oFYaml.close()
|
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
oBAD_NODES[oBAD_ROOT]['ExcludeNodes']['BadExit'] = list(texclude_set)
|
2022-11-09 12:31:08 +00:00
|
|
|
oBAD_NODES[oBAD_ROOT]['ExcludeDomains'] = lKNOWN_NODNS
|
2022-11-13 04:37:30 +00:00
|
|
|
vwrite_badnodes(oArgs, oBAD_NODES, str(len(texclude_set)))
|
2022-11-14 11:59:33 +00:00
|
|
|
|
2022-11-13 04:37:30 +00:00
|
|
|
oGOOD_NODES['GoodNodes']['Relays']['ExitNodes'] = list(aTRUST_DB_INDEX.keys())
|
|
|
|
# GuardNodes are readonl
|
2022-11-14 11:59:33 +00:00
|
|
|
vwrite_goodnodes(oArgs, oGOOD_NODES, len(aTRUST_DB_INDEX.keys()))
|
2022-11-08 14:15:05 +00:00
|
|
|
retval = 0
|
|
|
|
try:
|
|
|
|
logging.getLogger('stem').setLevel(30)
|
2022-11-09 05:43:26 +00:00
|
|
|
try:
|
2022-11-13 04:37:30 +00:00
|
|
|
if texclude_set:
|
|
|
|
LOG.info(f"{sEXCLUDE_EXIT_KEY} {len(texclude_set)} net bad exit relays")
|
|
|
|
controller.set_conf(sEXCLUDE_EXIT_KEY, texclude_set)
|
2022-11-09 05:43:26 +00:00
|
|
|
|
|
|
|
except stem.SocketClosed as e:
|
2022-11-13 04:37:30 +00:00
|
|
|
LOG.error(f"Failed setting {sEXCLUDE_EXIT_KEY} bad exit relays in Tor")
|
2022-11-09 05:43:26 +00:00
|
|
|
retval += 1
|
|
|
|
|
|
|
|
try:
|
2022-11-13 04:37:30 +00:00
|
|
|
if aTRUST_DB_INDEX.keys():
|
|
|
|
LOG.info(f"{sINCLUDE_EXIT_KEY} {len(aTRUST_DB_INDEX.keys())} good relays")
|
|
|
|
controller.set_conf(sINCLUDE_EXIT_KEY, aTRUST_DB_INDEX.keys())
|
2022-11-09 05:43:26 +00:00
|
|
|
except stem.SocketClosed as e:
|
|
|
|
LOG.error(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
|
|
|
|
retval += 1
|
|
|
|
|
|
|
|
try:
|
2022-11-13 04:37:30 +00:00
|
|
|
if 'GuardNodes' in oGOOD_NODES[oGOOD_ROOT].keys():
|
|
|
|
LOG.info(f"{sINCLUDE_GUARD_KEY} {len(oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])} guard nodes")
|
|
|
|
# FixMe for now override StrictNodes it may be unusable otherwise
|
|
|
|
controller.set_conf(sINCLUDE_GUARD_KEY,
|
|
|
|
oGOOD_NODES[oGOOD_ROOT]['GuardNodes'])
|
|
|
|
cur = controller.get_conf('StrictNodes')
|
|
|
|
if oArgs.strict_nodes and int(cur) != oArgs.strict_nodes:
|
|
|
|
LOG.info(f"OVERRIDING StrictNodes to {oArgs.strict_nodes}")
|
|
|
|
controller.set_conf('StrictNodes', oArgs.strict_nodes)
|
|
|
|
else:
|
|
|
|
LOG.info(f"StrictNodes is set to {cur}")
|
2022-11-09 05:43:26 +00:00
|
|
|
except stem.SocketClosed as e:
|
|
|
|
LOG.errro(f"Failed setting {sINCLUDE_EXIT_KEY} good exit nodes in Tor")
|
|
|
|
retval += 1
|
2022-11-09 09:30:43 +00:00
|
|
|
|
2022-11-14 11:59:33 +00:00
|
|
|
sys.stdout.write("dns-rsa domains:\n" +'\n'.join(tdns_urls) +'\n')
|
2022-11-08 14:15:05 +00:00
|
|
|
return retval
|
|
|
|
|
|
|
|
except InvalidRequest as e:
|
|
|
|
# Unacceptable option value: Invalid router list.
|
|
|
|
LOG.error(str(e))
|
|
|
|
retval = 1
|
|
|
|
return retval
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
return 0
|
|
|
|
except Exception as e:
|
|
|
|
LOG.exception(str(e))
|
|
|
|
retval = 2
|
|
|
|
return retval
|
|
|
|
finally:
|
|
|
|
# wierd we are getting stem errors during the final return
|
|
|
|
# with a traceback that doesnt correspond to any real flow
|
|
|
|
# File "/usr/lib/python3.9/site-packages/stem/control.py", line 2474, in set_conf
|
|
|
|
# self.set_options({param: value}, False)
|
|
|
|
logging.getLogger('stem').setLevel(40)
|
|
|
|
try:
|
|
|
|
for elt in controller._event_listeners:
|
|
|
|
controller.remove_event_listener(elt)
|
|
|
|
controller.close()
|
|
|
|
except Exception as e:
|
|
|
|
LOG.warn(str(e))
|
2022-11-07 05:40:00 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
try:
|
|
|
|
i = iMain(sys.argv[1:])
|
2022-11-07 11:38:22 +00:00
|
|
|
except IncorrectPassword as e:
|
|
|
|
LOG.error(e)
|
|
|
|
i = 1
|
2022-11-08 14:15:05 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
i = 0
|
2022-11-07 05:40:00 +00:00
|
|
|
except Exception as e:
|
|
|
|
LOG.exception(e)
|
2022-11-08 14:15:05 +00:00
|
|
|
i = 2
|
2022-11-07 05:40:00 +00:00
|
|
|
sys.exit(i)
|