More decoupling, removed unnecessary announceCache

This commit is contained in:
Kevin 2020-07-26 21:45:17 -05:00
parent fad5e8547e
commit 080f33bf1f
5 changed files with 10 additions and 12 deletions

View File

@ -71,6 +71,7 @@ class OnionrCommunicatorDaemon:
self.kv.put('dbTimestamps', {})
self.kv.put('blocksToUpload', [])
self.kv.put('cooldownPeer', {})
self.kv.put('generating_blocks', [])
if config.get('general.offline_mode', False):
self.isOnline = False
@ -93,10 +94,6 @@ class OnionrCommunicatorDaemon:
# list of peer's profiles (onionrpeers.PeerProfile instances)
self.peerProfiles = []
self.announceProgress = {}
self.generating_blocks = []
# amount of threads running by name, used to prevent too many
self.threadCounts = {}

View File

@ -4,6 +4,7 @@ Remove block hash from daemon's upload list.
"""
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from deadsimplekv import DeadSimpleKV
from communicator import OnionrCommunicatorDaemon
from onionrtypes import BlockHash
"""
@ -25,7 +26,8 @@ if TYPE_CHECKING:
def remove_from_insert_queue(comm_inst: "OnionrCommunicatorDaemon",
b_hash: "BlockHash"):
"""Remove block hash from daemon's upload list."""
kv: "DeadSimpleKV" = comm_inst.shared_state.get_by_string("DeadSimpleKV")
try:
comm_inst.generating_blocks.remove(b_hash)
kv.get('generating_blocks').remove(b_hash)
except ValueError:
pass

View File

@ -25,8 +25,9 @@ if TYPE_CHECKING:
def clear_offline_peer(comm_inst: 'OnionrCommunicatorDaemon'):
"""Remove the longest offline peer to retry later."""
kv: "DeadSimpleKV" = comm_inst.shared_state.get_by_string("DeadSimpleKV")
try:
removed = comm_inst..pop(0)
removed = kv.get('offlinePeers').pop(0)
except IndexError:
pass
else:

View File

@ -44,8 +44,7 @@ def announce_node(daemon):
if daemon.config.get('general.security_level', 0) == 0:
# Announce to random online peers
for i in kv.get('onlinePeers'):
if i not in kv.get('announceCache') and\
i not in daemon.announceProgress:
if i not in kv.get('announceCache'):
peer = i
break
else:

View File

@ -35,18 +35,17 @@ def client_api_insert_block():
insert_data: JSONSerializable = request.get_json(force=True)
message = insert_data['message']
message_hash = bytesconverter.bytes_to_str(hashers.sha3_hash(message))
kv: 'DeadSimpleKV' = g.too_many.get_by_string('DeadSimpleKV')
# Detect if message (block body) is not specified
if type(message) is None:
return 'failure due to unspecified message', 400
# Detect if block with same message is already being inserted
if message_hash in g.too_many.get_by_string(
"OnionrCommunicatorDaemon").generating_blocks:
if message_hash in kv.get('generating_blocks'):
return 'failure due to duplicate insert', 400
else:
g.too_many.get_by_string(
"OnionrCommunicatorDaemon").generating_blocks.append(message_hash)
kv.get('generating_blocks').append(message_hash)
encrypt_type = ''
sign = True