Onionr/onionr/communicator.py

202 lines
7.5 KiB
Python
Raw Normal View History

2018-01-07 08:55:44 +00:00
#!/usr/bin/env python3
'''
Onionr - P2P Microblogging Platform & Social network.
This file contains both the OnionrCommunicate class for communcating with peers
and code to operate as a daemon, getting commands from the command queue database (see core.Core.daemonQueue)
'''
'''
2018-01-07 08:55:44 +00:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
import sqlite3, requests, hmac, hashlib, time, sys, os, math, logger, urllib.parse
import core, onionrutils, onionrcrypto
2018-01-07 08:55:44 +00:00
class OnionrCommunicate:
2018-01-15 08:03:13 +00:00
def __init__(self, debug, developmentMode):
'''
OnionrCommunicate
This class handles communication with nodes in the Onionr network.
'''
self._core = core.Core()
2018-01-27 01:16:15 +00:00
self._utils = onionrutils.OnionrUtils(self._core)
self._crypto = onionrcrypto.OnionrCrypto(self._core)
2018-01-25 22:39:09 +00:00
blockProcessTimer = 0
2018-01-26 06:28:11 +00:00
blockProcessAmount = 5
2018-01-27 21:49:48 +00:00
heartBeatTimer = 0
heartBeatRate = 10
2018-01-26 07:22:48 +00:00
logger.debug('Communicator debugging enabled.')
2018-01-20 07:23:09 +00:00
torID = open('data/hs/hostname').read()
self.peerData = {} # Session data for peers (recent reachability, speed, etc)
if os.path.exists(self._core.queueDB):
self._core.clearDaemonQueue()
while True:
command = self._core.daemonQueue()
2018-01-25 22:39:09 +00:00
# Process blocks based on a timer
blockProcessTimer += 1
2018-01-27 21:49:48 +00:00
heartBeatTimer += 1
if heartBeatRate == heartBeatTimer:
logger.debug('Communicator heartbeat')
heartBeatTimer = 0
2018-01-25 22:39:09 +00:00
if blockProcessTimer == blockProcessAmount:
2018-01-26 06:28:11 +00:00
self.lookupBlocks()
self.processBlocks()
2018-01-25 22:39:09 +00:00
blockProcessTimer = 0
2018-01-27 21:49:48 +00:00
#logger.debug('Communicator daemon heartbeat')
if command != False:
if command[0] == 'shutdown':
2018-01-26 07:22:48 +00:00
logger.warn('Daemon recieved exit command.')
break
time.sleep(1)
2018-01-07 08:55:44 +00:00
return
2018-01-26 06:28:11 +00:00
def lookupBlocks(self):
'''
Lookup blocks and merge new ones
'''
2018-01-26 06:28:11 +00:00
peerList = self._core.listPeers()
blocks = ''
for i in peerList:
lastDB = self._core.getPeerInfo(i, 'blockDBHash')
if lastDB == None:
logger.debug('Fetching hash from ' + i + ' No previous known.')
else:
logger.debug('Fetching hash from ' + i + ', ' + lastDB + ' last known')
2018-01-26 06:28:11 +00:00
currentDB = self.performGet('getDBHash', i)
2018-01-28 22:29:16 +00:00
if currentDB != False:
logger.debug(i + " hash db (from request): " + currentDB)
else:
logger.warn("Error getting hash db status for " + i)
2018-01-27 01:16:15 +00:00
if currentDB != False:
if lastDB != currentDB:
logger.debug('Fetching hash from ' + i + ' - ' + currentDB + ' current hash.')
2018-01-27 01:16:15 +00:00
blocks += self.performGet('getBlockHashes', i)
2018-01-28 01:53:24 +00:00
if self._utils.validateHash(currentDB):
self._core.setPeerInfo(i, "blockDBHash", currentDB)
2018-01-28 22:26:34 +00:00
logger.debug('BLOCKS: \n' + blocks)
2018-01-26 06:28:11 +00:00
blockList = blocks.split('\n')
for i in blockList:
2018-01-29 02:54:39 +00:00
if len(i.strip()) == 0:
continue
2018-01-29 02:52:48 +00:00
if self._utils.hasBlock(i):
continue
logger.debug('Exchanged block (blockList): ' + i)
2018-01-27 01:16:15 +00:00
if not self._utils.validateHash(i):
2018-01-26 06:28:11 +00:00
# skip hash if it isn't valid
2018-01-29 02:54:39 +00:00
logger.warn('Hash ' + i + ' is not valid')
2018-01-26 06:28:11 +00:00
continue
else:
2018-01-26 07:22:48 +00:00
logger.debug('Adding ' + i + ' to hash database...')
2018-01-26 06:28:11 +00:00
self._core.addToBlockDB(i)
2018-01-26 06:28:11 +00:00
return
def processBlocks(self):
'''
Work with the block database and download any missing blocks
This is meant to be called from the communicator daemon on its timer.
'''
2018-01-28 22:03:55 +00:00
for i in self._core.getBlockList(True).split("\n"):
if i != "":
2018-01-28 22:15:41 +00:00
logger.warn('UNSAVED BLOCK: ' + i)
2018-01-28 22:14:19 +00:00
data = self.downloadBlock(i)
return
2018-01-28 22:14:19 +00:00
def downloadBlock(self, hash):
'''
Download a block from random order of peers
'''
2018-01-28 22:14:19 +00:00
peerList = self._core.listPeers()
blocks = ''
for i in peerList:
hasher = hashlib.sha3_256()
data = self.performGet('getData', i, hash)
if data == False or len(data) > 10000000:
continue
hasher.update(data.encode())
digest = hasher.hexdigest()
if type(digest) is bytes:
digest = digest.decode()
if digest == hash.strip():
2018-01-28 22:14:19 +00:00
self._core.setData(data)
2018-02-02 09:15:28 +00:00
if data.startswith('-txt-'):
self._core.setBlockType(hash, 'txt')
2018-01-28 22:14:19 +00:00
logger.info('Successfully obtained data for ' + hash)
if len(data) < 120:
logger.debug('Block text:\n' + data)
else:
logger.warn("Failed to validate " + hash)
2018-01-26 07:22:48 +00:00
return
def urlencode(self, data):
'''
URL encodes the data
'''
return urllib.parse.quote_plus(data)
2018-01-21 01:02:56 +00:00
def performGet(self, action, peer, data=None, type='tor'):
'''
Performs a request to a peer through Tor or i2p (currently only Tor)
'''
2018-01-21 01:02:56 +00:00
if not peer.endswith('.onion') and not peer.endswith('.onion/'):
raise PeerError('Currently only Tor .onion peers are supported. You must manually specify .onion')
# Store peer in peerData dictionary (non permanent)
if not peer in self.peerData:
self.peerData[peer] = {'connectCount': 0, 'failCount': 0, 'lastConnectTime': math.floor(time.time())}
2018-01-21 01:02:56 +00:00
socksPort = sys.argv[2]
2018-01-28 01:53:24 +00:00
'''We use socks5h to use tor as DNS'''
proxies = {'http': 'socks5h://127.0.0.1:' + str(socksPort), 'https': 'socks5h://127.0.0.1:' + str(socksPort)}
2018-01-21 01:02:56 +00:00
headers = {'user-agent': 'PyOnionr'}
2018-02-04 05:22:34 +00:00
url = 'http://' + peer + '/public/?action=' + self.urlencode(action)
2018-01-21 01:02:56 +00:00
if data != None:
2018-02-04 05:22:34 +00:00
url = url + '&data=' + self.urlencode(data)
try:
2018-02-02 09:15:28 +00:00
r = requests.get(url, headers=headers, proxies=proxies, timeout=(15, 30))
retData = r.text
2018-01-28 01:53:24 +00:00
except requests.exceptions.RequestException as e:
2018-01-28 01:58:30 +00:00
logger.warn(action + " failed with peer " + peer + ": " + str(e))
retData = False
if not retData:
self.peerData[peer]['failCount'] += 1
else:
self.peerData[peer]['connectCount'] += 1
self.peerData[peer]['lastConnectTime'] = math.floor(time.time())
return retData
2018-01-26 07:22:48 +00:00
2018-01-20 00:59:05 +00:00
shouldRun = False
2018-01-27 01:16:15 +00:00
debug = True
developmentMode = False
if os.path.exists('dev-enabled'):
developmentMode = True
try:
if sys.argv[1] == 'run':
shouldRun = True
except IndexError:
pass
if shouldRun:
2018-01-15 08:03:13 +00:00
try:
OnionrCommunicate(debug, developmentMode)
except KeyboardInterrupt:
pass