sync and onionrservices fixes

This commit is contained in:
Kevin Froman 2019-06-29 13:18:31 -05:00
parent d70afbf92b
commit cff38cb7c2
6 changed files with 24 additions and 17 deletions

View File

@ -33,13 +33,19 @@ from onionrutils import bytesconverter, stringvalidators, epoch, mnemonickeys
config.reload() config.reload()
class FDSafeHandler(WSGIHandler): class FDSafeHandler(WSGIHandler):
'''Our WSGI handler. Doesn't do much non-default except timeouts''' '''Our WSGI handler. Doesn't do much non-default except timeouts'''
def __init__(self, sock, address, server, rfile=None):
self.socket = sock
self.address = address
self.server = server
self.rfile = rfile
def handle(self): def handle(self):
timeout = Timeout(60, exception=Exception) while True:
timeout.start() timeout = Timeout(120, exception=Exception)
try: try:
WSGIHandler.handle(self) FDSafeHandler.handle(self)
except Timeout as ex: timeout.start()
raise except Timeout as ex:
raise
def setBindIP(filePath=''): def setBindIP(filePath=''):
'''Set a random localhost IP to a specified file (intended for private or public API localhost IPs)''' '''Set a random localhost IP to a specified file (intended for private or public API localhost IPs)'''

View File

@ -80,19 +80,21 @@ def download_blocks_from_communicator(comm_inst):
logger.info('Attempting to save block %s...' % blockHash[:12]) logger.info('Attempting to save block %s...' % blockHash[:12])
try: try:
comm_inst._core.setData(content) comm_inst._core.setData(content)
except onionrexceptions.DataExists:
logger.warn('Data is already set for %s ' % (blockHash,))
except onionrexceptions.DiskAllocationReached: except onionrexceptions.DiskAllocationReached:
logger.error('Reached disk allocation allowance, cannot save block %s.' % blockHash) logger.error('Reached disk allocation allowance, cannot save block %s.' % (blockHash,))
removeFromQueue = False removeFromQueue = False
else: else:
comm_inst._core.addToBlockDB(blockHash, dataSaved=True) comm_inst._core.addToBlockDB(blockHash, dataSaved=True)
blockmetadata.process_block_metadata(comm_inst._core, blockHash) # caches block metadata values to block database blockmetadata.process_block_metadata(comm_inst._core, blockHash) # caches block metadata values to block database
else: else:
logger.warn('POW failed for block %s.' % blockHash) logger.warn('POW failed for block %s.' % (blockHash,))
else: else:
if comm_inst._core._blacklist.inBlacklist(realHash): if comm_inst._core._blacklist.inBlacklist(realHash):
logger.warn('Block %s is blacklisted.' % (realHash,)) logger.warn('Block %s is blacklisted.' % (realHash,))
else: else:
logger.warn('Metadata for block %s is invalid.' % blockHash) logger.warn('Metadata for block %s is invalid.' % (blockHash,))
comm_inst._core._blacklist.addToDB(blockHash) comm_inst._core._blacklist.addToDB(blockHash)
else: else:
# if block didn't meet expected hash # if block didn't meet expected hash

View File

@ -21,7 +21,7 @@ import logger
from onionrutils import stringvalidators from onionrutils import stringvalidators
def lookup_new_peer_transports_with_communicator(comm_inst): def lookup_new_peer_transports_with_communicator(comm_inst):
logger.info('Looking up new addresses...') logger.info('Looking up new addresses...', terminal=True)
tryAmount = 1 tryAmount = 1
newPeers = [] newPeers = []
for i in range(tryAmount): for i in range(tryAmount):

View File

@ -18,12 +18,11 @@
along with this program. If not, see <https://www.gnu.org/licenses/>. along with this program. If not, see <https://www.gnu.org/licenses/>.
''' '''
import communicator, onionrblockapi import communicator, onionrblockapi
from onionrutils import stringvalidators from onionrutils import stringvalidators, bytesconverter
def service_creator(daemon): def service_creator(daemon):
assert isinstance(daemon, communicator.OnionrCommunicatorDaemon) assert isinstance(daemon, communicator.OnionrCommunicatorDaemon)
core = daemon._core core = daemon._core
utils = core._utils
# Find socket connection blocks # Find socket connection blocks
# TODO cache blocks and only look at recently received ones # TODO cache blocks and only look at recently received ones
@ -31,9 +30,9 @@ def service_creator(daemon):
for b in con_blocks: for b in con_blocks:
if not b in daemon.active_services: if not b in daemon.active_services:
bl = onionrblockapi.Block(b, core=core, decrypt=True) bl = onionrblockapi.Block(b, core=core, decrypt=True)
bs = utils.bytesToStr(bl.bcontent) + '.onion' bs = bytesconverter.bytes_to_str(bl.bcontent) + '.onion'
if stringvalidators.validate_pub_key(bl.signer) and stringvalidators.validate_transport(bs): if stringvalidators.validate_pub_key(bl.signer) and stringvalidators.validate_transport(bs):
signer = utils.bytesToStr(bl.signer) signer = bytesconverter.bytes_to_str(bl.signer)
daemon.active_services.append(b) daemon.active_services.append(b)
daemon.active_services.append(signer) daemon.active_services.append(signer)
daemon.services.create_server(signer, bs) daemon.services.create_server(signer, bs)

View File

@ -24,7 +24,7 @@ import core, logger, httpapi
import onionrexceptions import onionrexceptions
from netcontroller import getOpenPort from netcontroller import getOpenPort
import api import api
from onionrutils import stringvalidators, basicrequests from onionrutils import stringvalidators, basicrequests, bytesconverter
from . import httpheaders from . import httpheaders
class ConnectionServer: class ConnectionServer:
@ -82,7 +82,7 @@ class ConnectionServer:
raise ConnectionError('Could not reach %s bootstrap address %s' % (peer, address)) raise ConnectionError('Could not reach %s bootstrap address %s' % (peer, address))
else: else:
# If no connection error, create the service and save it to local global key store # If no connection error, create the service and save it to local global key store
self.core_inst.keyStore.put('dc-' + response.service_id, self.core_inst._utils.bytesToStr(peer)) self.core_inst.keyStore.put('dc-' + response.service_id, bytesconverter.bytes_to_str(peer))
logger.info('hosting on %s with %s' % (response.service_id, peer)) logger.info('hosting on %s with %s' % (response.service_id, peer))
http_server.serve_forever() http_server.serve_forever()
http_server.stop() http_server.stop()

View File

@ -31,6 +31,6 @@ def set_data(core_inst, data):
else: else:
raise onionrexceptions.DiskAllocationReached raise onionrexceptions.DiskAllocationReached
else: else:
raise Exception("Data is already set for " + dataHash) raise onionrexceptions.DataExists("Data is already set for " + dataHash)
return dataHash return dataHash