improved network stability by having automatic tor restarts

This commit is contained in:
Kevin Froman 2019-08-09 20:04:56 -05:00
parent daff149acc
commit 75ec108496
11 changed files with 48 additions and 16 deletions

View File

@ -138,7 +138,7 @@ class OnionrCommunicatorDaemon:
deniableBlockTimer.count = (deniableBlockTimer.frequency - 175) deniableBlockTimer.count = (deniableBlockTimer.frequency - 175)
# Timer to check for connectivity, through Tor to various high-profile onion services # Timer to check for connectivity, through Tor to various high-profile onion services
netCheckTimer = OnionrCommunicatorTimers(self, netcheck.net_check, 600, myArgs=[self]) netCheckTimer = OnionrCommunicatorTimers(self, netcheck.net_check, 500, myArgs=[self], maxThreads=1)
# Announce the public API server transport address to other nodes if security level allows # Announce the public API server transport address to other nodes if security level allows
if config.get('general.security_level', 1) == 0 and config.get('general.announce_node', True): if config.get('general.security_level', 1) == 0 and config.get('general.announce_node', True):
@ -185,7 +185,10 @@ class OnionrCommunicatorDaemon:
for server in self.service_greenlets: for server in self.service_greenlets:
server.stop() server.stop()
localcommand.local_command('shutdown') # shutdown the api localcommand.local_command('shutdown') # shutdown the api
time.sleep(0.5) try:
time.sleep(0.5)
except KeyboardInterrupt:
pass
def lookupAdders(self): def lookupAdders(self):
'''Lookup new peer addresses''' '''Lookup new peer addresses'''

View File

@ -22,6 +22,7 @@ import onionrevents as events
from onionrutils import localcommand from onionrutils import localcommand
from coredb import daemonqueue from coredb import daemonqueue
import filepaths import filepaths
from . import restarttor
def handle_daemon_commands(comm_inst): def handle_daemon_commands(comm_inst):
cmd = daemonqueue.daemon_queue() cmd = daemonqueue.daemon_queue()
response = '' response = ''
@ -43,6 +44,11 @@ def handle_daemon_commands(comm_inst):
response = 'none' response = 'none'
elif cmd[0] == 'localCommand': elif cmd[0] == 'localCommand':
response = localcommand.local_command(cmd[1]) response = localcommand.local_command(cmd[1])
elif cmd[0] == 'clearOffline':
comm_inst.offlinePeers = []
elif cmd[0] == 'restartTor':
restarttor.restart(comm_inst)
comm_inst.offlinePeers = []
elif cmd[0] == 'pex': elif cmd[0] == 'pex':
for i in comm_inst.timers: for i in comm_inst.timers:
if i.timerFunction.__name__ == 'lookupAdders': if i.timerFunction.__name__ == 'lookupAdders':

View File

@ -21,6 +21,7 @@
import logger import logger
from utils import netutils from utils import netutils
from onionrutils import localcommand, epoch from onionrutils import localcommand, epoch
from . import restarttor
def net_check(comm_inst): def net_check(comm_inst):
'''Check if we are connected to the internet or not when we can't connect to any peers''' '''Check if we are connected to the internet or not when we can't connect to any peers'''
rec = False # for detecting if we have received incoming connections recently rec = False # for detecting if we have received incoming connections recently
@ -33,7 +34,9 @@ def net_check(comm_inst):
pass pass
if not rec and not netutils.checkNetwork(torPort=comm_inst.proxyPort): if not rec and not netutils.checkNetwork(torPort=comm_inst.proxyPort):
if not comm_inst.shutdown: if not comm_inst.shutdown:
logger.warn('Network check failed, are you connected to the Internet, and is Tor working?') logger.warn('Network check failed, are you connected to the Internet, and is Tor working?', terminal=True)
restarttor.restart(comm_inst)
comm_inst.offlinePeers = []
comm_inst.isOnline = False comm_inst.isOnline = False
else: else:
comm_inst.isOnline = True comm_inst.isOnline = True

View File

@ -0,0 +1,5 @@
import netcontroller
def restart(comm_inst):
net = comm_inst.shared_state.get(netcontroller.NetController)
net.killTor()
net.startTor()

View File

@ -26,8 +26,8 @@ ONIONR_VERSION_TUPLE = tuple(ONIONR_VERSION.split('.')) # (MAJOR, MINOR, VERSION
API_VERSION = '0' # increments of 1; only change when something fundamental about how the API works changes. This way other nodes know how to communicate without learning too much information about you. API_VERSION = '0' # increments of 1; only change when something fundamental about how the API works changes. This way other nodes know how to communicate without learning too much information about you.
MIN_PY_VERSION = 6 MIN_PY_VERSION = 6
DEVELOPMENT_MODE = True DEVELOPMENT_MODE = True
MAX_BLOCK_TYPE_LENGTH = 15 MAX_BLOCK_TYPE_LENGTH = 15
MAX_BLOCK_CLOCK_SKEW = 120
# Begin OnionrValues migrated values # Begin OnionrValues migrated values
ANNOUNCE_POW = 5 ANNOUNCE_POW = 5

View File

@ -88,12 +88,12 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort)
torrc.close() torrc.close()
return return
def startTor(self): def startTor(self, gen_torrc=True):
''' '''
Start Tor with onion service on port 80 & socks proxy on random port Start Tor with onion service on port 80 & socks proxy on random port
''' '''
if gen_torrc:
self.generateTorrc() self.generateTorrc()
if os.path.exists('./tor'): if os.path.exists('./tor'):
self.torBinary = './tor' self.torBinary = './tor'

View File

@ -0,0 +1,5 @@
import time
from coredb import daemonqueue
def rebuild():
daemonqueue.daemon_queue_add('restartTor')

View File

@ -0,0 +1,8 @@
from stem.control import Controller
import config
def get_controller():
c = Controller.from_port(port=config.get('tor.controlPort'))
c.authenticate(config.get('tor.controlpassword'))
return c

View File

@ -22,11 +22,11 @@ import logger, onionrexceptions
from etc import onionrvalues from etc import onionrvalues
from onionrutils import stringvalidators, epoch, bytesconverter from onionrutils import stringvalidators, epoch, bytesconverter
import config, filepaths, onionrcrypto import config, filepaths, onionrcrypto
def validate_metadata(metadata, blockData): def validate_metadata(metadata, block_data) -> bool:
'''Validate metadata meets onionr spec (does not validate proof value computation), take in either dictionary or json string''' '''Validate metadata meets onionr spec (does not validate proof value computation), take in either dictionary or json string'''
ret_data = False ret_data = False
maxClockDifference = 120 max_clock_difference = onionrvalues.MAX_BLOCK_CLOCK_SKEW
# convert to dict if it is json string # convert to dict if it is json string
if type(metadata) is str: if type(metadata) is str:
@ -36,7 +36,7 @@ def validate_metadata(metadata, blockData):
pass pass
# Validate metadata dict for invalid keys to sizes that are too large # Validate metadata dict for invalid keys to sizes that are too large
maxAge = config.get("general.max_block_age", onionrvalues.DEFAULT_EXPIRE) maxAge = min(config.get("general.max_block_age", onionrvalues.DEFAULT_EXPIRE), onionrvalues.DEFAULT_EXPIRE)
if type(metadata) is dict: if type(metadata) is dict:
for i in metadata: for i in metadata:
try: try:
@ -58,8 +58,8 @@ def validate_metadata(metadata, blockData):
logger.warn('Block metadata time stamp is not integer string or int') logger.warn('Block metadata time stamp is not integer string or int')
break break
isFuture = (metadata[i] - epoch.get_epoch()) isFuture = (metadata[i] - epoch.get_epoch())
if isFuture > maxClockDifference: if isFuture > max_clock_difference:
logger.warn('Block timestamp is skewed to the future over the max %s: %s' (maxClockDifference, isFuture)) logger.warn('Block timestamp is skewed to the future over the max %s: %s' (max_clock_difference, isFuture))
break break
if (epoch.get_epoch() - metadata[i]) > maxAge: if (epoch.get_epoch() - metadata[i]) > maxAge:
logger.warn('Block is outdated: %s' % (metadata[i],)) logger.warn('Block is outdated: %s' % (metadata[i],))
@ -79,7 +79,7 @@ def validate_metadata(metadata, blockData):
else: else:
# if metadata loop gets no errors, it does not break, therefore metadata is valid # if metadata loop gets no errors, it does not break, therefore metadata is valid
# make sure we do not have another block with the same data content (prevent data duplication and replay attacks) # make sure we do not have another block with the same data content (prevent data duplication and replay attacks)
nonce = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(blockData)) nonce = bytesconverter.bytes_to_str(onionrcrypto.hashers.sha3_hash(block_data))
try: try:
with open(filepaths.data_nonce_file, 'r') as nonceFile: with open(filepaths.data_nonce_file, 'r') as nonceFile:
if nonce in nonceFile.read(): if nonce in nonceFile.read():

View File

@ -18,13 +18,14 @@
along with this program. If not, see <https://www.gnu.org/licenses/>. along with this program. If not, see <https://www.gnu.org/licenses/>.
''' '''
from onionrutils import basicrequests from onionrutils import basicrequests
from . import readstatic
from onionrcrypto import cryptoutils
def checkNetwork(torPort=0): def checkNetwork(torPort=0):
'''Check if we are connected to the internet (through Tor)''' '''Check if we are connected to the internet (through Tor)'''
retData = False retData = False
connectURLs = [] connectURLs = []
try: try:
with open('static-data/connect-check.txt', 'r') as connectTest: connectURLs = cryptoutils.random_shuffle(readstatic.read_static('connect-check.txt').split(','))
connectURLs = connectTest.read().split(',')
for url in connectURLs: for url in connectURLs:
if basicrequests.do_get_request(url, port=torPort, ignoreAPI=True) != False: if basicrequests.do_get_request(url, port=torPort, ignoreAPI=True) != False:

View File

@ -9,4 +9,5 @@ def read_static(file, ret_bin=False):
else: else:
mode = 'r' mode = 'r'
with open(static_file, mode) as f: with open(static_file, mode) as f:
return f.read() data = f.read()
return data