code cleanup, defunct code removal, and some module splitting
This commit is contained in:
parent
b09dae276c
commit
944c76d2e9
@ -25,6 +25,8 @@ import onionrdaemontools, onionrsockets, onionr, onionrproofs, proofofmemory
|
||||
import binascii
|
||||
from dependencies import secrets
|
||||
from defusedxml import minidom
|
||||
from utils import networkmerger
|
||||
|
||||
config.reload()
|
||||
class OnionrCommunicatorDaemon:
|
||||
def __init__(self, onionrInst, proxyPort, developmentMode=config.get('general.dev_mode', False)):
|
||||
@ -130,7 +132,6 @@ class OnionrCommunicatorDaemon:
|
||||
self.socketServer.start()
|
||||
self.socketClient = onionrsockets.OnionrSocketClient(self._core)
|
||||
|
||||
|
||||
# Main daemon loop, mainly for calling timers, don't do any complex operations here to avoid locking
|
||||
try:
|
||||
while not self.shutdown:
|
||||
@ -159,7 +160,7 @@ class OnionrCommunicatorDaemon:
|
||||
# Download new peer address list from random online peers
|
||||
peer = self.pickOnlinePeer()
|
||||
newAdders = self.peerAction(peer, action='pex')
|
||||
self._core._utils.mergeAdders(newAdders)
|
||||
networkmerger.mergeAdders(newAdders, self._core)
|
||||
self.decrementThreadCount('lookupAdders')
|
||||
|
||||
def lookupBlocks(self):
|
||||
|
@ -125,6 +125,7 @@ class Core:
|
||||
'''
|
||||
Adds a public key to the key database (misleading function name)
|
||||
'''
|
||||
assert peerID not in self.listPeers()
|
||||
|
||||
# This function simply adds a peer to the DB
|
||||
if not self._utils.validatePubKey(peerID):
|
||||
|
@ -1,7 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*- (because 0xFF, even : "Yucatán")
|
||||
|
||||
import os, re, sys, binascii, base64
|
||||
'''This file is adapted from https://github.com/thblt/pgp-words by github user 'thblt' ('Thibault Polge), GPL v3 license'''
|
||||
|
||||
import os, re, sys, binascii
|
||||
|
||||
_words = [
|
||||
["aardvark", "adroitness"],
|
||||
@ -275,7 +277,7 @@ def wordify(seq):
|
||||
|
||||
ret = []
|
||||
for i in range(0, len(seq), 2):
|
||||
ret.append(_words[int(seq[i:i+2], 16)][(i//2)%2])
|
||||
ret.append(_words[int(seq[i:i+2], 16)][(i//2)%2].lower())
|
||||
return ret
|
||||
|
||||
def hexify(seq, delim=' '):
|
||||
@ -292,40 +294,4 @@ def hexify(seq, delim=' '):
|
||||
if word in wordPair:
|
||||
ret += bytes([(count)])
|
||||
count += 1
|
||||
return binascii.hexlify(ret)
|
||||
|
||||
def usage():
|
||||
print("Usage:")
|
||||
print(" {0} [fingerprint...]".format(os.path.basename(sys.argv[0])))
|
||||
print("")
|
||||
print("If called with multiple arguments, they will be concatenated")
|
||||
print("and treated as a single fingerprint.")
|
||||
print("")
|
||||
print("If called with no arguments, input is read from stdin,")
|
||||
print("and each line is treated as a single fingerprint. In this")
|
||||
print("mode, invalid values are silently ignored.")
|
||||
exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if 1 == len(sys.argv):
|
||||
fps = sys.stdin.readlines()
|
||||
else:
|
||||
fps = [" ".join(sys.argv[1:])]
|
||||
for fp in fps:
|
||||
try:
|
||||
words = wordify(fp)
|
||||
print("\n{0}: ".format(fp.strip()))
|
||||
sys.stdout.write("\t")
|
||||
for i in range(0, len(words)):
|
||||
sys.stdout.write(words[i] + " ")
|
||||
if (not (i+1) % 4) and not i == len(words)-1:
|
||||
sys.stdout.write("\n\t")
|
||||
print("")
|
||||
|
||||
except Exception as e:
|
||||
if len(fps) == 1:
|
||||
print (e)
|
||||
usage()
|
||||
|
||||
print("")
|
||||
|
||||
return binascii.hexlify(ret)
|
@ -18,7 +18,9 @@
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
|
||||
import re, sys, time, traceback
|
||||
import re, sys, time, traceback, os
|
||||
|
||||
MAX_LOG_SIZE = 100000000
|
||||
|
||||
class colors:
|
||||
'''
|
||||
@ -132,11 +134,12 @@ def raw(data, fd = sys.stdout, sensitive = False):
|
||||
if get_settings() & OUTPUT_TO_CONSOLE:
|
||||
ts = fd.write('%s\n' % data)
|
||||
if get_settings() & OUTPUT_TO_FILE and not sensitive:
|
||||
try:
|
||||
with open(_outputfile, "a+") as f:
|
||||
f.write(colors.filter(data) + '\n')
|
||||
except OSError:
|
||||
pass
|
||||
if os.path.getsize(_outputfile) < MAX_LOG_SIZE:
|
||||
try:
|
||||
with open(_outputfile, "a+") as f:
|
||||
f.write(colors.filter(data) + '\n')
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True, sensitive = False):
|
||||
'''
|
||||
|
@ -563,24 +563,12 @@ class Onionr:
|
||||
if self.onionrUtils.hasKey(newPeer):
|
||||
logger.info('We already have that key')
|
||||
return
|
||||
if not '-' in newPeer:
|
||||
logger.info('Since no POW token was supplied for that key, one is being generated')
|
||||
proof = onionrproofs.DataPOW(newPeer)
|
||||
while True:
|
||||
result = proof.getResult()
|
||||
if result == False:
|
||||
time.sleep(0.5)
|
||||
else:
|
||||
break
|
||||
newPeer += '-' + base64.b64encode(result[1]).decode()
|
||||
logger.info(newPeer)
|
||||
|
||||
logger.info("Adding peer: " + logger.colors.underline + newPeer)
|
||||
if self.onionrUtils.mergeKeys(newPeer):
|
||||
logger.info('Successfully added key')
|
||||
else:
|
||||
try:
|
||||
if self.onionrCore.addPeer(newPeer):
|
||||
logger.info('Successfully added key')
|
||||
except AssertionError:
|
||||
logger.error('Failed to add key')
|
||||
|
||||
return
|
||||
|
||||
def addAddress(self):
|
||||
|
@ -21,6 +21,7 @@
|
||||
import onionrexceptions, onionrpeers, onionrproofs, logger, onionrusers
|
||||
import base64, sqlite3, os
|
||||
from dependencies import secrets
|
||||
from utils import netutils
|
||||
|
||||
class DaemonTools:
|
||||
'''
|
||||
@ -86,7 +87,7 @@ class DaemonTools:
|
||||
def netCheck(self):
|
||||
'''Check if we are connected to the internet or not when we can't connect to any peers'''
|
||||
if len(self.daemon.onlinePeers) == 0:
|
||||
if not self.daemon._core._utils.checkNetwork(torPort=self.daemon.proxyPort):
|
||||
if not netutils.checkNetwork(self.daemon._core._utils, torPort=self.daemon.proxyPort):
|
||||
logger.warn('Network check failed, are you connected to the internet?')
|
||||
self.daemon.isOnline = False
|
||||
else:
|
||||
@ -192,10 +193,11 @@ class DaemonTools:
|
||||
|
||||
def insertDeniableBlock(self):
|
||||
'''Insert a fake block in order to make it more difficult to track real blocks'''
|
||||
fakePeer = self.daemon._core._crypto.generatePubKey()[0]
|
||||
fakePeer = ''
|
||||
chance = 10
|
||||
if secrets.randbelow(chance) == (chance - 1):
|
||||
fakePeer = self.daemon._core._crypto.generatePubKey()[0]
|
||||
data = secrets.token_hex(secrets.randbelow(500) + 1)
|
||||
self.daemon._core.insertBlock(data, header='pm', encryptType='asym', asymPeer=fakePeer)
|
||||
self.daemon._core.insertBlock(data, header='pm', encryptType='asym', asymPeer=fakePeer, meta={'subject': 'foo'})
|
||||
self.daemon.decrementThreadCount('insertDeniableBlock')
|
||||
return
|
@ -67,90 +67,6 @@ class OnionrUtils:
|
||||
'''
|
||||
epoch = self.getEpoch()
|
||||
return epoch - (epoch % roundS)
|
||||
|
||||
def mergeKeys(self, newKeyList):
|
||||
'''
|
||||
Merge ed25519 key list to our database, comma seperated string
|
||||
'''
|
||||
try:
|
||||
retVal = False
|
||||
if newKeyList != False:
|
||||
for key in newKeyList.split(','):
|
||||
key = key.split('-')
|
||||
# Test if key is valid
|
||||
try:
|
||||
if len(key[0]) > 60 or len(key[1]) > 1000:
|
||||
logger.warn('%s or its pow value is too large.' % key[0])
|
||||
continue
|
||||
except IndexError:
|
||||
logger.warn('No pow token')
|
||||
continue
|
||||
try:
|
||||
value = base64.b64decode(key[1])
|
||||
except binascii.Error:
|
||||
continue
|
||||
# Load the pow token
|
||||
hashedKey = self._core._crypto.blake2bHash(key[0])
|
||||
powHash = self._core._crypto.blake2bHash(value + hashedKey)
|
||||
try:
|
||||
powHash = powHash.encode()
|
||||
except AttributeError:
|
||||
pass
|
||||
# if POW meets required difficulty, TODO make configurable/dynamic
|
||||
if powHash.startswith(b'0000'):
|
||||
# if we don't already have the key and its not our key, add it.
|
||||
if not key[0] in self._core.listPeers(randomOrder=False) and type(key) != None and key[0] != self._core._crypto.pubKey:
|
||||
if self._core.addPeer(key[0], key[1]):
|
||||
# Check if the peer has a set username already
|
||||
onionrusers.OnionrUser(self._core, key[0]).findAndSetID()
|
||||
retVal = True
|
||||
else:
|
||||
logger.warn("Failed to add key")
|
||||
else:
|
||||
pass
|
||||
#logger.debug('%s pow failed' % key[0])
|
||||
return retVal
|
||||
except Exception as error:
|
||||
logger.error('Failed to merge keys.', error=error)
|
||||
return False
|
||||
|
||||
|
||||
def mergeAdders(self, newAdderList):
|
||||
'''
|
||||
Merge peer adders list to our database
|
||||
'''
|
||||
try:
|
||||
retVal = False
|
||||
if newAdderList != False:
|
||||
for adder in newAdderList.split(','):
|
||||
adder = adder.strip()
|
||||
if not adder in self._core.listAdders(randomOrder = False) and adder != self.getMyAddress() and not self._core._blacklist.inBlacklist(adder):
|
||||
if not config.get('tor.v3onions') and len(adder) == 62:
|
||||
continue
|
||||
if self._core.addAddress(adder):
|
||||
# Check if we have the maxmium amount of allowed stored peers
|
||||
if config.get('peers.max_stored_peers') > len(self._core.listAdders()):
|
||||
logger.info('Added %s to db.' % adder, timestamp = True)
|
||||
retVal = True
|
||||
else:
|
||||
logger.warn('Reached the maximum amount of peers in the net database as allowed by your config.')
|
||||
else:
|
||||
pass
|
||||
#logger.debug('%s is either our address or already in our DB' % adder)
|
||||
return retVal
|
||||
except Exception as error:
|
||||
logger.error('Failed to merge adders.', error = error)
|
||||
return False
|
||||
|
||||
def getMyAddress(self):
|
||||
try:
|
||||
with open('./' + self._core.dataDir + 'hs/hostname', 'r') as hostname:
|
||||
return hostname.read().strip()
|
||||
except FileNotFoundError:
|
||||
return ""
|
||||
except Exception as error:
|
||||
logger.error('Failed to read my address.', error = error)
|
||||
return None
|
||||
|
||||
def getClientAPIServer(self):
|
||||
retData = ''
|
||||
@ -195,27 +111,6 @@ class OnionrUtils:
|
||||
|
||||
return retData
|
||||
|
||||
def getPassword(self, message='Enter password: ', confirm = True):
|
||||
'''
|
||||
Get a password without showing the users typing and confirm the input
|
||||
'''
|
||||
# Get a password safely with confirmation and return it
|
||||
while True:
|
||||
print(message)
|
||||
pass1 = getpass.getpass()
|
||||
if confirm:
|
||||
print('Confirm password: ')
|
||||
pass2 = getpass.getpass()
|
||||
if pass1 != pass2:
|
||||
logger.error("Passwords do not match.")
|
||||
logger.readline()
|
||||
else:
|
||||
break
|
||||
else:
|
||||
break
|
||||
|
||||
return pass1
|
||||
|
||||
def getHumanReadableID(self, pub=''):
|
||||
'''gets a human readable ID from a public key'''
|
||||
if pub == '':
|
||||
@ -225,12 +120,14 @@ class OnionrUtils:
|
||||
|
||||
def convertHumanReadableID(self, pub):
|
||||
'''Convert a human readable pubkey id to base32'''
|
||||
pub = pub.lower()
|
||||
return self.bytesToStr(base64.b32encode(binascii.unhexlify(pgpwords.hexify(pub.strip()))))
|
||||
|
||||
def getBlockMetadataFromData(self, blockData):
|
||||
'''
|
||||
accepts block contents as string, returns a tuple of metadata, meta (meta being internal metadata, which will be returned as an encrypted base64 string if it is encrypted, dict if not).
|
||||
|
||||
accepts block contents as string, returns a tuple of
|
||||
metadata, meta (meta being internal metadata, which will be
|
||||
returned as an encrypted base64 string if it is encrypted, dict if not).
|
||||
'''
|
||||
meta = {}
|
||||
metadata = {}
|
||||
@ -255,34 +152,6 @@ class OnionrUtils:
|
||||
meta = metadata['meta']
|
||||
return (metadata, meta, data)
|
||||
|
||||
def checkPort(self, port, host=''):
|
||||
'''
|
||||
Checks if a port is available, returns bool
|
||||
'''
|
||||
# inspired by https://www.reddit.com/r/learnpython/comments/2i4qrj/how_to_write_a_python_script_that_checks_to_see/ckzarux/
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
retVal = False
|
||||
try:
|
||||
sock.bind((host, port))
|
||||
except OSError as e:
|
||||
if e.errno is 98:
|
||||
retVal = True
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
return retVal
|
||||
|
||||
def checkIsIP(self, ip):
|
||||
'''
|
||||
Check if a string is a valid IPv4 address
|
||||
'''
|
||||
try:
|
||||
socket.inet_aton(ip)
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def processBlockMetadata(self, blockHash):
|
||||
'''
|
||||
Read metadata from a block and cache it to the block database
|
||||
@ -366,7 +235,7 @@ class OnionrUtils:
|
||||
|
||||
def validateHash(self, data, length=64):
|
||||
'''
|
||||
Validate if a string is a valid hex formatted hash
|
||||
Validate if a string is a valid hash hex digest (does not compare, just checks length and charset)
|
||||
'''
|
||||
retVal = True
|
||||
if data == False or data == True:
|
||||
@ -533,22 +402,6 @@ class OnionrUtils:
|
||||
except:
|
||||
return False
|
||||
|
||||
def getPeerByHashId(self, hash):
|
||||
'''
|
||||
Return the pubkey of the user if known from the hash
|
||||
'''
|
||||
if self._core._crypto.pubKeyHashID() == hash:
|
||||
retData = self._core._crypto.pubKey
|
||||
return retData
|
||||
conn = sqlite3.connect(self._core.peerDB)
|
||||
c = conn.cursor()
|
||||
command = (hash,)
|
||||
retData = ''
|
||||
for row in c.execute('SELECT id FROM peers WHERE hashID = ?', command):
|
||||
if row[0] != '':
|
||||
retData = row[0]
|
||||
return retData
|
||||
|
||||
def isCommunicatorRunning(self, timeout = 5, interval = 0.1):
|
||||
try:
|
||||
runcheck_file = self._core.dataDir + '.runcheck'
|
||||
@ -569,13 +422,6 @@ class OnionrUtils:
|
||||
except:
|
||||
return False
|
||||
|
||||
def token(self, size = 32):
|
||||
'''
|
||||
Generates a secure random hex encoded token
|
||||
'''
|
||||
|
||||
return binascii.hexlify(os.urandom(size))
|
||||
|
||||
def importNewBlocks(self, scanDir=''):
|
||||
'''
|
||||
This function is intended to scan for new blocks ON THE DISK and import them
|
||||
@ -697,22 +543,6 @@ class OnionrUtils:
|
||||
pass
|
||||
return data
|
||||
|
||||
def checkNetwork(self, torPort=0):
|
||||
'''Check if we are connected to the internet (through Tor)'''
|
||||
retData = False
|
||||
connectURLs = []
|
||||
try:
|
||||
with open('static-data/connect-check.txt', 'r') as connectTest:
|
||||
connectURLs = connectTest.read().split(',')
|
||||
|
||||
for url in connectURLs:
|
||||
if self.doGetRequest(url, port=torPort, ignoreAPI=True) != False:
|
||||
retData = True
|
||||
break
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return retData
|
||||
|
||||
def size(path='.'):
|
||||
'''
|
||||
Returns the size of a folder's contents in bytes
|
||||
@ -737,4 +567,4 @@ def humanSize(num, suffix='B'):
|
||||
if abs(num) < 1024.0:
|
||||
return "%.1f %s%s" % (num, unit, suffix)
|
||||
num /= 1024.0
|
||||
return "%.1f %s%s" % (num, 'Yi', suffix)
|
||||
return "%.1f %s%s" % (num, 'Yi', suffix)
|
@ -41,7 +41,7 @@ def _processForwardKey(api, myBlock):
|
||||
else:
|
||||
raise onionrexceptions.InvalidPubkey("%s is nota valid pubkey key" % (key,))
|
||||
|
||||
def on_processblocks(api):
|
||||
def on_processblocks(api, data=None):
|
||||
# Generally fired by utils.
|
||||
myBlock = api.data['block']
|
||||
blockType = api.data['type']
|
||||
|
34
onionr/utils/netutils.py
Normal file
34
onionr/utils/netutils.py
Normal file
@ -0,0 +1,34 @@
|
||||
'''
|
||||
Onionr - P2P Microblogging Platform & Social network
|
||||
|
||||
OnionrUtils offers various useful functions to Onionr networking.
|
||||
'''
|
||||
'''
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
def checkNetwork(utilsInst, torPort=0):
|
||||
'''Check if we are connected to the internet (through Tor)'''
|
||||
retData = False
|
||||
connectURLs = []
|
||||
try:
|
||||
with open('static-data/connect-check.txt', 'r') as connectTest:
|
||||
connectURLs = connectTest.read().split(',')
|
||||
|
||||
for url in connectURLs:
|
||||
if utilsInst.doGetRequest(url, port=torPort, ignoreAPI=True) != False:
|
||||
retData = True
|
||||
break
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return retData
|
46
onionr/utils/networkmerger.py
Normal file
46
onionr/utils/networkmerger.py
Normal file
@ -0,0 +1,46 @@
|
||||
'''
|
||||
Onionr - P2P Microblogging Platform & Social network
|
||||
|
||||
Merges peer and block lists
|
||||
'''
|
||||
'''
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
'''
|
||||
import logger
|
||||
def mergeAdders(newAdderList, coreInst):
|
||||
'''
|
||||
Merge peer adders list to our database
|
||||
'''
|
||||
try:
|
||||
retVal = False
|
||||
if newAdderList != False:
|
||||
for adder in newAdderList.split(','):
|
||||
adder = adder.strip()
|
||||
if not adder in coreInst.listAdders(randomOrder = False) and adder != coreInst.hsAddress and not coreInst._blacklist.inBlacklist(adder):
|
||||
if not config.get('tor.v3onions') and len(adder) == 62:
|
||||
continue
|
||||
if coreInst.addAddress(adder):
|
||||
# Check if we have the maxmium amount of allowed stored peers
|
||||
if config.get('peers.max_stored_peers') > len(coreInst.listAdders()):
|
||||
logger.info('Added %s to db.' % adder, timestamp = True)
|
||||
retVal = True
|
||||
else:
|
||||
logger.warn('Reached the maximum amount of peers in the net database as allowed by your config.')
|
||||
else:
|
||||
pass
|
||||
#logger.debug('%s is either our address or already in our DB' % adder)
|
||||
return retVal
|
||||
except Exception as error:
|
||||
logger.error('Failed to merge adders.', error = error)
|
||||
return False
|
Loading…
Reference in New Issue
Block a user