2022-03-02 13:29:59 +00:00
|
|
|
from threading import Thread
|
|
|
|
from time import sleep
|
2022-05-20 15:13:12 +00:00
|
|
|
from traceback import format_exc
|
2022-03-02 13:29:59 +00:00
|
|
|
from typing import TYPE_CHECKING
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from socket import socket
|
|
|
|
|
|
|
|
from onionrplugins import onionrevents
|
2022-05-20 15:13:12 +00:00
|
|
|
import logger
|
2022-03-02 13:29:59 +00:00
|
|
|
|
2022-06-05 20:11:53 +00:00
|
|
|
from socks import GeneralProxyError
|
|
|
|
|
2022-03-02 13:29:59 +00:00
|
|
|
from ..peer import Peer
|
|
|
|
from ..commands import GossipCommands, command_to_byte
|
|
|
|
from ..constants import PEER_AMOUNT_TO_ASK, TRANSPORT_SIZE_BYTES
|
|
|
|
from .. import connectpeer
|
2022-03-21 06:03:53 +00:00
|
|
|
from ..peerset import gossip_peer_set
|
2022-03-02 13:29:59 +00:00
|
|
|
|
|
|
|
MAX_PEERS = 10
|
|
|
|
|
|
|
|
|
2022-05-20 15:13:12 +00:00
|
|
|
def _do_ask_peer(peer):
|
|
|
|
try:
|
|
|
|
_ask_peer(peer)
|
|
|
|
except TimeoutError:
|
|
|
|
logger.debug("Timed out when asking for new peers")
|
2022-06-05 20:11:53 +00:00
|
|
|
except GeneralProxyError:
|
|
|
|
logger.debug("Proxy error")
|
|
|
|
logger.debug(format_exc(), terminal=True)
|
2022-05-20 15:13:12 +00:00
|
|
|
except Exception:
|
|
|
|
logger.error(format_exc(), terminal=True)
|
|
|
|
|
2022-03-21 06:03:53 +00:00
|
|
|
def _ask_peer(peer):
|
2022-03-20 17:51:25 +00:00
|
|
|
s: 'socket' = peer.get_socket(12)
|
2022-06-05 20:11:53 +00:00
|
|
|
|
2022-03-02 13:29:59 +00:00
|
|
|
s.sendall(command_to_byte(GossipCommands.PEER_EXCHANGE))
|
|
|
|
# Get 10 max peers
|
|
|
|
for _ in range(MAX_PEERS):
|
2022-04-20 05:28:29 +00:00
|
|
|
peer = b''
|
|
|
|
c = b''
|
|
|
|
while c != b'\n':
|
2022-05-16 01:54:14 +00:00
|
|
|
if len(peer) > TRANSPORT_SIZE_BYTES:
|
|
|
|
raise OverflowError
|
2022-04-20 05:28:29 +00:00
|
|
|
c = s.recv(1)
|
|
|
|
peer += c
|
2022-03-02 13:29:59 +00:00
|
|
|
if not peer:
|
|
|
|
break
|
|
|
|
connect_data = {
|
|
|
|
'address': peer,
|
2022-03-21 06:03:53 +00:00
|
|
|
'callback': connectpeer.connect_peer
|
2022-03-02 13:29:59 +00:00
|
|
|
}
|
2022-06-26 05:34:49 +00:00
|
|
|
logger.info("Got new peer from exchange " + peer.decode('utf-8'), terminal=True)
|
2022-03-02 13:29:59 +00:00
|
|
|
onionrevents.event('announce_rec', data=connect_data, threaded=True)
|
|
|
|
s.close()
|
|
|
|
|
|
|
|
|
2022-03-21 06:03:53 +00:00
|
|
|
def get_new_peers():
|
2022-05-13 05:17:06 +00:00
|
|
|
if not len(gossip_peer_set):
|
2022-06-05 20:11:53 +00:00
|
|
|
logger.debug("Peer set empty, cannot get new peers")
|
|
|
|
return
|
2022-03-02 13:29:59 +00:00
|
|
|
|
|
|
|
# Deep copy the peer list
|
2022-03-21 06:03:53 +00:00
|
|
|
peer_list: Peer = list(gossip_peer_set)
|
2022-03-02 13:29:59 +00:00
|
|
|
peers_we_ask: Peer = []
|
|
|
|
asked_count = 0
|
|
|
|
|
|
|
|
while asked_count < PEER_AMOUNT_TO_ASK:
|
|
|
|
try:
|
|
|
|
peers_we_ask.append(peer_list.pop())
|
|
|
|
except IndexError:
|
|
|
|
break
|
|
|
|
asked_count += 1
|
|
|
|
|
|
|
|
if not len(peers_we_ask):
|
|
|
|
raise ValueError("No peers present in pool during get_new_peers")
|
|
|
|
peer_list.clear() # Clear the deep copy so it doesn't occupy memory
|
|
|
|
|
|
|
|
# Start threads to ask the peers for more peers
|
|
|
|
threads = []
|
|
|
|
for peer in peers_we_ask:
|
2022-05-20 15:13:12 +00:00
|
|
|
t = Thread(target=_do_ask_peer, args=[peer], daemon=True)
|
2022-03-02 13:29:59 +00:00
|
|
|
t.start()
|
|
|
|
threads.append(t)
|
|
|
|
peers_we_ask.clear()
|
|
|
|
# Wait for the threads to finish because this function is on a timer
|
|
|
|
for thread in threads:
|
|
|
|
thread.join()
|
|
|
|
|