downloads now start much sooner when blocks are discovered
This commit is contained in:
parent
575c927541
commit
5d1d49368e
@ -107,7 +107,8 @@ class OnionrCommunicatorDaemon:
|
|||||||
|
|
||||||
# Timers to periodically lookup new blocks and download them
|
# Timers to periodically lookup new blocks and download them
|
||||||
OnionrCommunicatorTimers(self, lookupblocks.lookup_blocks_from_communicator, config.get('timers.lookupBlocks', 25), myArgs=[self], requiresPeer=True, maxThreads=1)
|
OnionrCommunicatorTimers(self, lookupblocks.lookup_blocks_from_communicator, config.get('timers.lookupBlocks', 25), myArgs=[self], requiresPeer=True, maxThreads=1)
|
||||||
OnionrCommunicatorTimers(self, self.getBlocks, config.get('timers.getBlocks', 30), requiresPeer=True, maxThreads=2)
|
# The block download timer is accessed by the block lookup function to trigger faster download starts
|
||||||
|
self.download_blocks_timer = OnionrCommunicatorTimers(self, self.getBlocks, config.get('timers.getBlocks', 30), requiresPeer=True, maxThreads=2)
|
||||||
|
|
||||||
# Timer to reset the longest offline peer so contact can be attempted again
|
# Timer to reset the longest offline peer so contact can be attempted again
|
||||||
OnionrCommunicatorTimers(self, onlinepeers.clear_offline_peer, 58, myArgs=[self])
|
OnionrCommunicatorTimers(self, onlinepeers.clear_offline_peer, 58, myArgs=[self])
|
||||||
|
@ -25,10 +25,10 @@ from utils import reconstructhash
|
|||||||
import onionrblacklist
|
import onionrblacklist
|
||||||
blacklist = onionrblacklist.OnionrBlackList()
|
blacklist = onionrblacklist.OnionrBlackList()
|
||||||
def lookup_blocks_from_communicator(comm_inst):
|
def lookup_blocks_from_communicator(comm_inst):
|
||||||
logger.info('Looking up new blocks...')
|
logger.info('Looking up new blocks')
|
||||||
tryAmount = 2
|
tryAmount = 2
|
||||||
newBlocks = ''
|
newBlocks = ''
|
||||||
existingBlocks = blockmetadb.get_block_list()
|
existingBlocks = blockmetadb.get_block_list() # List of existing saved blocks
|
||||||
triedPeers = [] # list of peers we've tried this time around
|
triedPeers = [] # list of peers we've tried this time around
|
||||||
maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion
|
maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion
|
||||||
lastLookupTime = 0 # Last time we looked up a particular peer's list
|
lastLookupTime = 0 # Last time we looked up a particular peer's list
|
||||||
@ -89,5 +89,6 @@ def lookup_blocks_from_communicator(comm_inst):
|
|||||||
if new_block_count > 1:
|
if new_block_count > 1:
|
||||||
block_string = "s"
|
block_string = "s"
|
||||||
logger.info('Discovered %s new block%s' % (new_block_count, block_string), terminal=True)
|
logger.info('Discovered %s new block%s' % (new_block_count, block_string), terminal=True)
|
||||||
|
comm_inst.download_blocks_timer.count = int(comm_inst.download_blocks_timer.frequency * 0.99)
|
||||||
comm_inst.decrementThreadCount('lookup_blocks_from_communicator')
|
comm_inst.decrementThreadCount('lookup_blocks_from_communicator')
|
||||||
return
|
return
|
Loading…
Reference in New Issue
Block a user