bug fixes

This commit is contained in:
Kevin Froman 2018-08-23 14:46:23 -05:00
parent 1faae80aaf
commit e346c09228
No known key found for this signature in database
GPG Key ID: 0D414D0FE405B63B
5 changed files with 17 additions and 8 deletions

View File

@ -89,7 +89,7 @@ class OnionrCommunicatorDaemon:
OnionrCommunicatorTimers(self, self.lookupBlocks, 7, requiresPeer=True, maxThreads=1)
OnionrCommunicatorTimers(self, self.getBlocks, 10, requiresPeer=True)
OnionrCommunicatorTimers(self, self.clearOfflinePeer, 58)
OnionrCommunicatorTimers(self, self.daemonTools.cleanOldBlocks, 650)
OnionrCommunicatorTimers(self, self.daemonTools.cleanOldBlocks, 65)
OnionrCommunicatorTimers(self, self.lookupKeys, 60, requiresPeer=True)
OnionrCommunicatorTimers(self, self.lookupAdders, 60, requiresPeer=True)
netCheckTimer = OnionrCommunicatorTimers(self, self.daemonTools.netCheck, 600)
@ -152,7 +152,7 @@ class OnionrCommunicatorDaemon:
if not self.isOnline:
break
if self._core._utils.storageCounter.isFull():
logger.warn('Not looking up new blocks due to maximum amount of allowed disk space used')
logger.debug('Not looking up new blocks due to maximum amount of allowed disk space used')
break
peer = self.pickOnlinePeer() # select random online peer
# if we've already tried all the online peers this time around, stop
@ -187,6 +187,7 @@ class OnionrCommunicatorDaemon:
def getBlocks(self):
'''download new blocks in queue'''
for blockHash in self.blockQueue:
removeFromQueue = True
if self.shutdown or not self.isOnline:
# Exit loop if shutting down or offline
break
@ -198,6 +199,8 @@ class OnionrCommunicatorDaemon:
logger.debug('%s is already saved' % (blockHash,))
self.blockQueue.remove(blockHash)
continue
if self._core._utils.storageCounter.isFull():
break
self.currentDownloading.append(blockHash) # So we can avoid concurrent downloading in other threads of same block
logger.info("Attempting to download %s..." % blockHash)
peerUsed = self.pickOnlinePeer()
@ -225,6 +228,7 @@ class OnionrCommunicatorDaemon:
self._core.setData(content)
except onionrexceptions.DiskAllocationReached:
logger.error("Reached disk allocation allowance, cannot save this block.")
removeFromQueue = False
else:
self._core.addToBlockDB(blockHash, dataSaved=True)
self._core._utils.processBlockMetadata(blockHash) # caches block metadata values to block database
@ -246,7 +250,8 @@ class OnionrCommunicatorDaemon:
# Punish peer for sharing invalid block (not always malicious, but is bad regardless)
onionrpeers.PeerProfiles(peerUsed, self._core).addScore(-50)
logger.warn('Block hash validation failed for ' + blockHash + ' got ' + tempHash)
self.blockQueue.remove(blockHash) # remove from block queue both if success or false
if removeFromQueue:
self.blockQueue.remove(blockHash) # remove from block queue both if success or false
self.currentDownloading.remove(blockHash)
self.decrementThreadCount('getBlocks')
return

View File

@ -58,7 +58,7 @@ class DaemonTools:
def netCheck(self):
'''Check if we are connected to the internet or not when we can't connect to any peers'''
if len(self.daemon.onlinePeers) != 0:
if not self.daemon._core._utils.checkNetwork():
if not self.daemon._core._utils.checkNetwork(torPort=self.daemon.proxyPort):
logger.warn('Network check failed, are you connected to the internet?')
self.daemon.isOnline = False
self.daemon.decrementThreadCount('netCheck')

View File

@ -90,7 +90,11 @@ def peerCleanup(coreInst):
if PeerProfiles(address, coreInst).score < minScore:
coreInst.removeAddress(address)
try:
coreInst._blacklist.addToDB(address, dataType=1, expire=300)
if (self.coreInst._utils.getEpoch() - coreInst.getPeerInfo(address, 4)) >= 600:
expireTime = 600
else:
expireTime = 86400
coreInst._blacklist.addToDB(address, dataType=1, expire=expireTime)
except sqlite3.IntegrityError: #TODO just make sure its not a unique constraint issue
pass
logger.warn('Removed address ' + address + '.')

View File

@ -631,7 +631,7 @@ class OnionrUtils:
pass
return data
def checkNetwork(self):
def checkNetwork(self, torPort=0):
'''Check if we are connected to the internet (through Tor)'''
retData = False
connectURLs = []
@ -640,7 +640,7 @@ class OnionrUtils:
connectURLs = connectTest.read().split(',')
for url in connectURLs:
if self.doGetRequest(url) != False:
if self.doGetRequest(url, port=torPort) != False:
retData = True
break
except FileNotFoundError:

View File

@ -51,7 +51,7 @@
},
"allocations":{
"disk": 800,
"disk": 10000000000,
"netTotal": 1000000000,
"blockCache": 5000000,
"blockCacheTotal": 50000000