Onionr/onionr/communicator.py

184 lines
7.4 KiB
Python
Raw Normal View History

2018-01-07 08:55:44 +00:00
#!/usr/bin/env python3
'''
Onionr - P2P Microblogging Platform & Social network.
This file contains both the OnionrCommunicate class for communcating with peers
and code to operate as a daemon, getting commands from the command queue database (see core.Core.daemonQueue)
'''
'''
2018-01-07 08:55:44 +00:00
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
2018-01-26 07:22:48 +00:00
import sqlite3, requests, hmac, hashlib, time, sys, os, logger
2018-01-27 01:16:15 +00:00
import core, onionrutils
2018-01-07 08:55:44 +00:00
class OnionrCommunicate:
2018-01-15 08:03:13 +00:00
def __init__(self, debug, developmentMode):
''' OnionrCommunicate
This class handles communication with nodes in the Onionr network.
'''
self._core = core.Core()
2018-01-27 01:16:15 +00:00
self._utils = onionrutils.OnionrUtils(self._core)
2018-01-25 22:39:09 +00:00
blockProcessTimer = 0
2018-01-26 06:28:11 +00:00
blockProcessAmount = 5
2018-01-27 21:49:48 +00:00
heartBeatTimer = 0
heartBeatRate = 10
2018-01-26 07:22:48 +00:00
logger.debug('Communicator debugging enabled.')
2018-01-20 07:23:09 +00:00
torID = open('data/hs/hostname').read()
# get our own PGP fingerprint
fingerprintFile = 'data/own-fingerprint.txt'
if not os.path.exists(fingerprintFile):
self._core.generateMainPGP(torID)
with open(fingerprintFile,'r') as f:
self.pgpOwnFingerprint = f.read()
2018-01-26 07:22:48 +00:00
logger.info('My PGP fingerprint is ' + logger.colors.underline + self.pgpOwnFingerprint + logger.colors.reset + logger.colors.fg.green + '.')
if os.path.exists(self._core.queueDB):
self._core.clearDaemonQueue()
while True:
command = self._core.daemonQueue()
2018-01-25 22:39:09 +00:00
# Process blocks based on a timer
blockProcessTimer += 1
2018-01-27 21:49:48 +00:00
heartBeatTimer += 1
if heartBeatRate == heartBeatTimer:
logger.debug('Communicator heartbeat')
heartBeatTimer = 0
2018-01-25 22:39:09 +00:00
if blockProcessTimer == blockProcessAmount:
2018-01-26 06:28:11 +00:00
self.lookupBlocks()
self.processBlocks()
2018-01-25 22:39:09 +00:00
blockProcessTimer = 0
2018-01-27 21:49:48 +00:00
#logger.debug('Communicator daemon heartbeat')
if command != False:
if command[0] == 'shutdown':
2018-01-26 07:22:48 +00:00
logger.warn('Daemon recieved exit command.')
break
time.sleep(1)
2018-01-07 08:55:44 +00:00
return
def getRemotePeerKey(self, peerID):
2018-01-12 09:06:24 +00:00
'''This function contacts a peer and gets their main PGP key.
This is safe because Tor or I2P is used, but it does not ensure that the person is who they say they are
2018-01-12 09:06:24 +00:00
'''
url = 'http://' + peerID + '/public/?action=getPGP'
r = requests.get(url, headers=headers)
response = r.text
return response
def shareHMAC(self, peerID, key):
'''This function shares an HMAC key to a peer
'''
return
def getPeerProof(self, peerID):
'''This function gets the current peer proof requirement'''
return
def sendPeerProof(self, peerID, data):
'''This function sends the proof result to a peer previously fetched with getPeerProof'''
return
2018-01-26 06:28:11 +00:00
def lookupBlocks(self):
'''Lookup blocks and merge new ones'''
peerList = self._core.listPeers()
blocks = ''
for i in peerList:
lastDB = self._core.getPeerInfo(i, 'blockDBHash')
if lastDB == None:
logger.debug('Fetching hash from ' + i + ' No previous known.')
else:
logger.debug('Fetching hash from ' + i + ', ' + lastDB + ' last known')
2018-01-26 06:28:11 +00:00
currentDB = self.performGet('getDBHash', i)
2018-01-28 22:29:16 +00:00
if currentDB != False:
logger.debug(i + " hash db (from request): " + currentDB)
else:
logger.warn("Error getting hash db status for " + i)
2018-01-27 01:16:15 +00:00
if currentDB != False:
if lastDB != currentDB:
logger.debug('Fetching hash from ' + i + ' - ' + currentDB + ' current hash.')
2018-01-27 01:16:15 +00:00
blocks += self.performGet('getBlockHashes', i)
2018-01-28 01:53:24 +00:00
if self._utils.validateHash(currentDB):
self._core.setPeerInfo(i, "blockDBHash", currentDB)
2018-01-28 22:26:34 +00:00
logger.debug('BLOCKS: \n' + blocks)
2018-01-26 06:28:11 +00:00
blockList = blocks.split('\n')
for i in blockList:
logger.debug('Exchanged block (blockList): ' + i)
2018-01-27 01:16:15 +00:00
if not self._utils.validateHash(i):
2018-01-26 06:28:11 +00:00
# skip hash if it isn't valid
if len(i.strip()) != 0:
logger.warn('Hash ' + i + ' is not valid')
2018-01-26 06:28:11 +00:00
continue
else:
2018-01-26 07:22:48 +00:00
logger.debug('Adding ' + i + ' to hash database...')
2018-01-26 06:28:11 +00:00
self._core.addToBlockDB(i)
return
def processBlocks(self):
'''
Work with the block database and download any missing blocks
This is meant to be called from the communicator daemon on its timer.
'''
2018-01-28 22:03:55 +00:00
for i in self._core.getBlockList(True).split("\n"):
if i != "":
2018-01-28 22:15:41 +00:00
logger.warn('UNSAVED BLOCK: ' + i)
2018-01-28 22:14:19 +00:00
data = self.downloadBlock(i)
return
2018-01-28 22:14:19 +00:00
def downloadBlock(self, hash):
peerList = self._core.listPeers()
blocks = ''
for i in peerList:
hasher = hashlib.sha3_256()
data = self.performGet('getData', i, hash)
if data == False or len(data) > 10000000:
continue
hasher.update(data.encode())
digest = hasher.hexdigest()
if type(digest) is bytes:
digest = digest.decode()
if digest == hash.strip():
2018-01-28 22:14:19 +00:00
self._core.setData(data)
logger.info('Successfully obtained data for ' + hash)
else:
logger.warn("Failed to validate " + hash)
2018-01-26 07:22:48 +00:00
2018-01-21 01:02:56 +00:00
def performGet(self, action, peer, data=None, type='tor'):
2018-01-27 00:52:20 +00:00
'''Performs a request to a peer through Tor or i2p (currently only tor)'''
2018-01-21 01:02:56 +00:00
if not peer.endswith('.onion') and not peer.endswith('.onion/'):
raise PeerError('Currently only Tor .onion peers are supported. You must manually specify .onion')
socksPort = sys.argv[2]
2018-01-28 01:53:24 +00:00
'''We use socks5h to use tor as DNS'''
proxies = {'http': 'socks5h://127.0.0.1:' + str(socksPort), 'https': 'socks5h://127.0.0.1:' + str(socksPort)}
2018-01-21 01:02:56 +00:00
headers = {'user-agent': 'PyOnionr'}
url = 'http://' + peer + '/public/?action=' + action
if data != None:
url = url + '&data=' + data
try:
r = requests.get(url, headers=headers, proxies=proxies)
2018-01-28 01:53:24 +00:00
except requests.exceptions.RequestException as e:
2018-01-28 01:58:30 +00:00
logger.warn(action + " failed with peer " + peer + ": " + str(e))
return False
2018-01-21 01:02:56 +00:00
return r.text
2018-01-26 07:22:48 +00:00
2018-01-20 00:59:05 +00:00
shouldRun = False
2018-01-27 01:16:15 +00:00
debug = True
developmentMode = False
if os.path.exists('dev-enabled'):
developmentMode = True
try:
if sys.argv[1] == 'run':
shouldRun = True
except IndexError:
pass
if shouldRun:
2018-01-15 08:03:13 +00:00
try:
OnionrCommunicate(debug, developmentMode)
except KeyboardInterrupt:
pass