diff --git a/.gitignore b/.gitignore
index 0ee81636..3c70da06 100644
--- a/.gitignore
+++ b/.gitignore
@@ -11,3 +11,4 @@ onionr/gnupg/*
run.sh
onionr/data-encrypted.dat
onionr/.onionr-lock
+core
diff --git a/Makefile b/Makefile
index 78ff7939..23b32ccc 100644
--- a/Makefile
+++ b/Makefile
@@ -16,6 +16,8 @@ uninstall:
sudo rm -f /usr/bin/onionr
test:
+ @./RUN-LINUX.sh stop
+ @sleep 1
@rm -rf onionr/data-backup
@mv onionr/data onionr/data-backup | true > /dev/null 2>&1
-@cd onionr; ./tests.py; ./cryptotests.py;
@@ -30,4 +32,9 @@ soft-reset:
reset:
@echo "Hard-resetting Onionr..."
rm -rf onionr/data/ | true > /dev/null 2>&1
+ #@./RUN-LINUX.sh version | grep -v "Failed" --color=always
+
+plugins-reset:
+ @echo "Resetting plugins..."
+ rm -rf onionr/data/plugins/ | true > /dev/null 2>&1
@./RUN-LINUX.sh version | grep -v "Failed" --color=always
diff --git a/onionr/api.py b/onionr/api.py
index b6b115a5..d3d86d9f 100755
--- a/onionr/api.py
+++ b/onionr/api.py
@@ -50,7 +50,7 @@ class API:
'''
config.reload()
-
+
if config.get('devmode', True):
self._developmentMode = True
logger.set_level(logger.LEVEL_DEBUG)
@@ -227,8 +227,8 @@ class API:
response = 'none'
resp = Response(response)
elif action == 'kex':
- peers = self._core.listPeers()
- response = ','.join(self._core.listPeers())
+ peers = self._core.listPeers(getPow=True)
+ response = ','.join(peers)
resp = Response(response)
else:
resp = Response("")
diff --git a/onionr/bitpeer b/onionr/bitpeer
deleted file mode 160000
index a74e826e..00000000
--- a/onionr/bitpeer
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit a74e826e9c69e643ead7950f9f76a05ab8664ddc
diff --git a/onionr/btc.py b/onionr/btc.py
deleted file mode 100644
index f09ec622..00000000
--- a/onionr/btc.py
+++ /dev/null
@@ -1,44 +0,0 @@
-'''
- Onionr - P2P Microblogging Platform & Social network
-
- Handle bitcoin operations
-'''
-'''
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-'''
-from bitpeer.node import *
-from bitpeer.storage.shelve import ShelveStorage
-import logging, time
-import socks, sys
-class OnionrBTC:
- def __init__(self, lastBlock='00000000000000000021ee6242d08e3797764c9258e54e686bc2afff51baf599', lastHeight=510613, torP=9050):
- stream = logging.StreamHandler()
- logger = logging.getLogger('halfnode')
- logger.addHandler(stream)
- logger.setLevel (10)
-
- LASTBLOCK = lastBlock
- LASTBLOCKINDEX = lastHeight
- self.node = Node ('BTC', ShelveStorage ('data/btc-blocks.db'), lastblockhash=LASTBLOCK, lastblockheight=LASTBLOCKINDEX, torPort=torP)
-
- self.node.bootstrap ()
- self.node.connect ()
- self.node.loop ()
-
-if __name__ == "__main__":
- torPort = int(sys.argv[1])
- bitcoin = OnionrBTC(torPort)
- while True:
- print(bitcoin.node.getBlockHash(bitcoin.node.getLastBlockHeight())) # Using print on purpose, do not change to logger
- time.sleep(5)
diff --git a/onionr/communicator.py b/onionr/communicator.py
index 71ec4a29..777660ad 100755
--- a/onionr/communicator.py
+++ b/onionr/communicator.py
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
'''
-Onionr - P2P Microblogging Platform & Social network.
+ Onionr - P2P Microblogging Platform & Social network.
-This file contains both the OnionrCommunicate class for communcating with peers
-and code to operate as a daemon, getting commands from the command queue database (see core.Core.daemonQueue)
+ This file contains both the OnionrCommunicate class for communcating with peers
+ and code to operate as a daemon, getting commands from the command queue database (see core.Core.daemonQueue)
'''
'''
This program is free software: you can redistribute it and/or modify
@@ -19,8 +19,8 @@ and code to operate as a daemon, getting commands from the command queue databas
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
-import sqlite3, requests, hmac, hashlib, time, sys, os, math, logger, urllib.parse, base64, binascii, random, json
-import core, onionrutils, onionrcrypto, netcontroller, onionrproofs, btc, config, onionrplugins as plugins
+import sqlite3, requests, hmac, hashlib, time, sys, os, math, logger, urllib.parse, base64, binascii, random, json, threading
+import core, onionrutils, onionrcrypto, netcontroller, onionrproofs, config, onionrplugins as plugins
class OnionrCommunicate:
def __init__(self, debug, developmentMode):
@@ -40,28 +40,27 @@ class OnionrCommunicate:
self.ignoredHashes = []
self.highFailureAmount = 7
- '''
- logger.info('Starting Bitcoin Node... with Tor socks port:' + str(sys.argv[2]), timestamp=True)
- try:
- self.bitcoin = btc.OnionrBTC(torP=int(sys.argv[2]))
- except _gdbm.error:
- pass
- logger.info('Bitcoin Node started, on block: ' + self.bitcoin.node.getBlockHash(self.bitcoin.node.getLastBlockHeight()), timestamp=True)
- '''
- #except:
- #logger.fatal('Failed to start Bitcoin Node, exiting...')
- #exit(1)
- blockProcessTimer = 0
- blockProcessAmount = 5
- highFailureTimer = 0
- highFailureRate = 10
- heartBeatTimer = 0
- heartBeatRate = 0
- pexTimer = 25 # How often we should check for new peers
- pexCount = 0
+ self.communicatorThreads = 0
+ self.maxThreads = 75
+ self.processBlocksThreads = 0
+ self.lookupBlocksThreads = 0
+
+ self.blocksProcessing = [] # list of blocks currently processing, to avoid trying a block twice at once in 2 seperate threads
+ self.peerStatus = {} # network actions (active requests) for peers used mainly to prevent conflicting actions in threads
+
+ self.communicatorTimers = {} # communicator timers, name: rate (in seconds)
+ self.communicatorTimerCounts = {}
+ self.communicatorTimerFuncs = {}
+
+ self.registerTimer('blockProcess', 20)
+ self.registerTimer('highFailure', 10)
+ self.registerTimer('heartBeat', 10)
+ self.registerTimer('pex', 120)
logger.debug('Communicator debugging enabled.')
- torID = open('data/hs/hostname').read()
+
+ with open('data/hs/hostname', 'r') as torID:
+ todID = torID.read()
apiRunningCheckRate = 10
apiRunningCheckCount = 0
@@ -77,24 +76,44 @@ class OnionrCommunicate:
while True:
command = self._core.daemonQueue()
# Process blocks based on a timer
- blockProcessTimer += 1
- heartBeatTimer += 1
- pexCount += 1
- if highFailureTimer == highFailureRate:
- highFailureTimer = 0
+ self.timerTick()
+ # TODO: migrate below if statements to be own functions which are called in the above timerTick() function
+ if self.communicatorTimers['highFailure'] == self.communicatorTimerCounts['highFailure']:
+ self.communicatorTimerCounts['highFailure'] = 0
for i in self.peerData:
if self.peerData[i]['failCount'] >= self.highFailureAmount:
self.peerData[i]['failCount'] -= 1
- if pexTimer == pexCount:
- self.getNewPeers()
- pexCount = 0 # TODO: do not reset timer if low peer count
- if heartBeatRate == heartBeatTimer:
+ if self.communicatorTimers['pex'] == self.communicatorTimerCounts['pex']:
+ pT1 = threading.Thread(target=self.getNewPeers, name="pT1")
+ pT1.start()
+ pT2 = threading.Thread(target=self.getNewPeers, name="pT2")
+ pT2.start()
+ self.communicatorTimerCounts['pex'] = 0# TODO: do not reset timer if low peer count
+ if self.communicatorTimers['heartBeat'] == self.communicatorTimerCounts['heartBeat']:
logger.debug('Communicator heartbeat')
- heartBeatTimer = 0
- if blockProcessTimer == blockProcessAmount:
- self.lookupBlocks()
- self.processBlocks()
- blockProcessTimer = 0
+ self.communicatorTimerCounts['heartBeat'] = 0
+ if self.communicatorTimers['blockProcess'] == self.communicatorTimerCounts['blockProcess']:
+ lT1 = threading.Thread(target=self.lookupBlocks, name="lt1", args=(True,))
+ lT2 = threading.Thread(target=self.lookupBlocks, name="lt2", args=(True,))
+ lT3 = threading.Thread(target=self.lookupBlocks, name="lt3", args=(True,))
+ lT4 = threading.Thread(target=self.lookupBlocks, name="lt4", args=(True,))
+ pbT1 = threading.Thread(target=self.processBlocks, name='pbT1', args=(True,))
+ pbT2 = threading.Thread(target=self.processBlocks, name='pbT2', args=(True,))
+ pbT3 = threading.Thread(target=self.processBlocks, name='pbT3', args=(True,))
+ pbT4 = threading.Thread(target=self.processBlocks, name='pbT4', args=(True,))
+ if (self.maxThreads - 8) >= threading.active_count():
+ lT1.start()
+ lT2.start()
+ lT3.start()
+ lT4.start()
+ pbT1.start()
+ pbT2.start()
+ pbT3.start()
+ pbT4.start()
+ self.communicatorTimerCounts['blockProcess'] = 0
+ else:
+ logger.debug(threading.active_count())
+ logger.debug('Too many threads.')
if command != False:
if command[0] == 'shutdown':
logger.info('Daemon received exit command.', timestamp=True)
@@ -114,6 +133,8 @@ class OnionrCommunicate:
elif command[0] == 'runCheck':
logger.info('Status check; looks good.')
open('data/.runcheck', 'w+').close()
+ elif command[0] == 'kex':
+ self.pexCount = pexTimer - 1
elif command[0] == 'event':
# todo
pass
@@ -165,6 +186,28 @@ class OnionrCommunicate:
connection_handlers = {}
id_peer_cache = {}
+ def registerTimer(self, timerName, rate, timerFunc=None):
+ '''Register a communicator timer'''
+ self.communicatorTimers[timerName] = rate
+ self.communicatorTimerCounts[timerName] = 0
+ self.communicatorTimerFuncs[timerName] = timerFunc
+
+ def timerTick(self):
+ '''Increments timers "ticks" and calls funcs if applicable'''
+ tName = ''
+ for i in self.communicatorTimers.items():
+ tName = i[0]
+ self.communicatorTimerCounts[tName] += 1
+
+ if self.communicatorTimerCounts[tName] == self.communicatorTimers[tName]:
+ try:
+ self.communicatorTimerFuncs[tName]()
+ except TypeError:
+ pass
+ else:
+ self.communicatorTimerCounts[tName] = 0
+
+
def get_connection_handlers(self, name = None):
'''
Returns a list of callback handlers by name, or, if name is None, it returns all handlers.
@@ -174,7 +217,7 @@ class OnionrCommunicate:
return self.connection_handlers
elif name in self.connection_handlers:
return self.connection_handlers[name]
- else
+ else:
return list()
def add_connection_handler(self, name, handler):
@@ -274,7 +317,7 @@ class OnionrCommunicate:
events.event('outgoing_direct_connection', data = {'callback' : True, 'communicator' : self, 'data' : data, 'id' : identifier, 'token' : token, 'peer' : peer, 'callback' : callback, 'log' : log})
- logger.debug('Direct connection (identifier: "%s"): %s' + (identifier, data_str))
+ logger.debug('Direct connection (identifier: "%s"): %s' % (identifier, data_str))
try:
self.performGet('directMessage', peer, data_str)
except:
@@ -360,10 +403,10 @@ class OnionrCommunicate:
def getNewPeers(self):
'''
- Get new peers and keys
+ Get new peers and ed25519 keys
'''
- peersCheck = 5 # Amount of peers to ask for new peers + keys
+ peersCheck = 1 # Amount of peers to ask for new peers + keys
peersChecked = 0
peerList = list(self._core.listAdders()) # random ordered list of peers
newKeys = []
@@ -380,40 +423,49 @@ class OnionrCommunicate:
while peersCheck > peersChecked:
#i = secrets.randbelow(maxN) # cant use prior to 3.6
i = random.randint(0, maxN)
- logger.info('Using ' + peerList[i] + ' to find new peers', timestamp=True)
+
+ try:
+ if self.peerStatusTaken(peerList[i], 'pex') or self.peerStatusTaken(peerList[i], 'kex'):
+ continue
+ except IndexError:
+ pass
+
+ logger.info('Using %s to find new peers...' % peerList[i], timestamp=True)
try:
newAdders = self.performGet('pex', peerList[i], skipHighFailureAddress=True)
- logger.debug('Attempting to merge address: ')
- logger.debug(newAdders)
- self._utils.mergeAdders(newAdders)
+ if not newAdders is False: # keep the is False thing in there, it might not be bool
+ logger.debug('Attempting to merge address: %s' % str(newAdders))
+ self._utils.mergeAdders(newAdders)
except requests.exceptions.ConnectionError:
- logger.info(peerList[i] + ' connection failed', timestamp=True)
+ logger.info('%s connection failed' % peerList[i], timestamp=True)
continue
else:
try:
- logger.info('Using ' + peerList[i] + ' to find new keys')
+ logger.info('Using %s to find new keys...' % peerList[i])
newKeys = self.performGet('kex', peerList[i], skipHighFailureAddress=True)
- logger.debug('Attempting to merge pubkey: ')
- logger.debug(newKeys)
+ logger.debug('Attempting to merge pubkey: %s' % str(newKeys))
# TODO: Require keys to come with POW token (very large amount of POW)
self._utils.mergeKeys(newKeys)
except requests.exceptions.ConnectionError:
- logger.info(peerList[i] + ' connection failed', timestamp=True)
+ logger.info('%s connection failed' % peerList[i], timestamp=True)
continue
else:
peersChecked += 1
return
- def lookupBlocks(self):
+ def lookupBlocks(self, isThread=False):
'''
Lookup blocks and merge new ones
'''
-
+ if isThread:
+ self.lookupBlocksThreads += 1
peerList = self._core.listAdders()
- blocks = ''
+ blockList = list()
for i in peerList:
+ if self.peerStatusTaken(i, 'getBlockHashes') or self.peerStatusTaken(i, 'getDBHash'):
+ continue
try:
if self.peerData[i]['failCount'] >= self.highFailureAmount:
continue
@@ -423,66 +475,70 @@ class OnionrCommunicate:
lastDB = self._core.getAddressInfo(i, 'DBHash')
if lastDB == None:
- logger.debug('Fetching hash from ' + str(i) + ', no previous known.')
+ logger.debug('Fetching hash from %s, no previous known.' % str(i))
else:
- logger.debug('Fetching hash from ' + str(i) + ', ' + str(lastDB) + ' last known')
+ logger.debug('Fetching hash from %s, %s last known' % (str(i), str(lastDB)))
currentDB = self.performGet('getDBHash', i)
if currentDB != False:
- logger.debug(i + " hash db (from request): " + currentDB)
+ logger.debug('%s hash db (from request): %s' % (str(i), str(currentDB)))
else:
- logger.warn("Error getting hash db status for " + i)
+ logger.warn('Failed to get hash db status for %s' % str(i))
if currentDB != False:
if lastDB != currentDB:
- logger.debug('Fetching hash from ' + i + ' - ' + currentDB + ' current hash.')
+ logger.debug('Fetching hash from %s - %s current hash.' % (str(i), currentDB))
try:
- blocks += self.performGet('getBlockHashes', i)
+ blockList.append(self.performGet('getBlockHashes', i))
except TypeError:
- logger.warn('Failed to get data hash from ' + i)
+ logger.warn('Failed to get data hash from %s' % str(i))
self.peerData[i]['failCount'] -= 1
if self._utils.validateHash(currentDB):
self._core.setAddressInfo(i, "DBHash", currentDB)
- if len(blocks.strip()) != 0:
+ if len(blockList) != 0:
pass
- #logger.debug('BLOCKS:' + blocks)
-
- blockList = blocks.split('\n')
for i in blockList:
if len(i.strip()) == 0:
continue
- if self._utils.hasBlock(i):
- continue
+ try:
+ if self._utils.hasBlock(i):
+ continue
+ except:
+ logger.warn('Invalid hash') # TODO: move below validate hash check below
+ pass
if i in self.ignoredHashes:
continue
#logger.debug('Exchanged block (blockList): ' + i)
if not self._utils.validateHash(i):
# skip hash if it isn't valid
- logger.warn('Hash ' + i + ' is not valid')
+ logger.warn('Hash %s is not valid' % str(i))
continue
else:
self.newHashes[i] = 0
- logger.debug('Adding ' + i + ' to hash database...')
+ logger.debug('Adding %s to hash database...' % str(i))
self._core.addToBlockDB(i)
-
+ self.lookupBlocksThreads -= 1
return
- def processBlocks(self):
+ def processBlocks(self, isThread=False):
'''
Work with the block database and download any missing blocks
This is meant to be called from the communicator daemon on its timer.
'''
-
- for i in self._core.getBlockList(unsaved=True).split("\n"):
+ if isThread:
+ self.processBlocksThreads += 1
+ for i in self._core.getBlockList(unsaved = True):
if i != "":
- if i in self.ignoredHashes:
+ if i in self.blocksProcessing or i in self.ignoredHashes:
+ #logger.debug('already processing ' + i)
continue
-
+ else:
+ self.blocksProcessing.append(i)
try:
self.newHashes[i]
except KeyError:
@@ -490,61 +546,78 @@ class OnionrCommunicate:
# check if a new hash has been around too long, delete it from database and add it to ignore list
if self.newHashes[i] >= self.keepNewHash:
- logger.warn('Ignoring block ' + i + ' because it took to long to get valid data.')
+ logger.warn('Ignoring block %s because it took to long to get valid data.' % str(i))
del self.newHashes[i]
self._core.removeBlock(i)
self.ignoredHashes.append(i)
continue
self.newHashes[i] += 1
- logger.warn('UNSAVED BLOCK: ' + i)
+ logger.warn('Block is unsaved: %s' % str(i))
data = self.downloadBlock(i)
- # if block was successfull gotten (hash already verified)
+ # if block was successfully gotten (hash already verified)
if data:
del self.newHashes[i] # remove from probation list
# deal with block metadata
blockContent = self._core.getData(i)
+ try:
+ blockContent = blockContent.encode()
+ except AttributeError:
+ pass
try:
#blockMetadata = json.loads(self._core.getData(i)).split('}')[0] + '}'
- blockMetadata = self._core.getData(i).split(b'}')[0]
+ blockMetadata = json.loads(blockContent[:blockContent.find(b'\n')].decode())
try:
- blockMetadata = blockMetadata.decode()
+ blockMeta2 = json.loads(blockMetadata['meta'])
+ except KeyError:
+ blockMeta2 = {'type': ''}
+ pass
+ blockContent = blockContent[blockContent.find(b'\n') + 1:]
+ try:
+ blockContent = blockContent.decode()
except AttributeError:
pass
- blockMetadata = json.loads(blockMetadata + '}')
-
- try:
- blockMetadata['sig']
- blockMetadata['id']
- except KeyError:
- pass
-
+ if not self._crypto.verifyPow(blockContent, blockMeta2):
+ logger.warn("%s has invalid or insufficient proof of work token, deleting..." % str(i))
+ self._core.removeBlock(i)
+ continue
else:
- creator = self._utils.getPeerByHashId(blockMetadata['id'])
- try:
- creator = creator.decode()
- except AttributeError:
- pass
+ if (('sig' in blockMetadata) and ('id' in blockMeta2)): # id doesn't exist in blockMeta2, so this won't workin the first place
- if self._core._crypto.edVerify(blockContent.split(b'}')[1], creator, blockMetadata['sig'], encodedData=True):
- self._core.updateBlockInfo(i, 'sig', 'true')
- else:
- self._core.updateBlockInfo(i, 'sig', 'false')
+ #blockData = json.dumps(blockMetadata['meta']) + blockMetadata[blockMetadata.rfind(b'}') + 1:]
+
+ creator = self._utils.getPeerByHashId(blockMeta2['id'])
+ try:
+ creator = creator.decode()
+ except AttributeError:
+ pass
+
+ if self._core._crypto.edVerify(blockMetadata['meta'] + blockContent, creator, blockMetadata['sig'], encodedData=True):
+ logger.info('%s was signed' % str(i))
+ self._core.updateBlockInfo(i, 'sig', 'true')
+ else:
+ logger.warn('%s has an invalid signature' % str(i))
+ self._core.updateBlockInfo(i, 'sig', 'false')
try:
- logger.info('Block type is ' + blockMetadata['type'])
- self._core.updateBlockInfo(i, 'dataType', blockMetadata['type'])
+ logger.info('Block type is %s' % str(blockMeta2['type']))
+ self._core.updateBlockInfo(i, 'dataType', blockMeta2['type'])
+ self.removeBlockFromProcessingList(i)
+ self.removeBlockFromProcessingList(i)
except KeyError:
logger.warn('Block has no type')
pass
except json.decoder.JSONDecodeError:
logger.warn('Could not decode block metadata')
- pass
-
+ self.removeBlockFromProcessingList(i)
+ self.processBlocksThreads -= 1
return
+ def removeBlockFromProcessingList(self, block):
+ return block in blocksProcessing
+
def downloadBlock(self, hash, peerTries=3):
'''
Download a block from random order of peers
@@ -556,8 +629,11 @@ class OnionrCommunicate:
peerTryCount = 0
for i in peerList:
- if self.peerData[i]['failCount'] >= self.highFailureAmount:
- continue
+ try:
+ if self.peerData[i]['failCount'] >= self.highFailureAmount:
+ continue
+ except KeyError:
+ pass
if peerTryCount >= peerTries:
break
@@ -581,17 +657,11 @@ class OnionrCommunicate:
if digest == hash.strip():
self._core.setData(data)
- logger.info('Successfully obtained data for ' + hash, timestamp=True)
+ logger.info('Successfully obtained data for %s' % str(hash), timestamp=True)
retVal = True
break
- '''
- if data.startswith(b'-txt-'):
- self._core.setBlockType(hash, 'txt')
- if len(data) < 120:
- logger.debug('Block text:\n' + data.decode())
- '''
else:
- logger.warn("Failed to validate " + hash + " " + " hash calculated was " + digest)
+ logger.warn("Failed to validate %s -- hash calculated was %s" % (hash, digest))
peerTryCount += 1
return retVal
@@ -614,12 +684,12 @@ class OnionrCommunicate:
raise Exception("Could not perform self address check in performGet due to not knowing our address")
if selfCheck:
if peer.replace('/', '') == self._core.hsAdder:
- logger.warn('Tried to performget to own hidden service, but selfCheck was not set to false')
+ logger.warn('Tried to performGet to own hidden service, but selfCheck was not set to false')
return
# Store peer in peerData dictionary (non permanent)
if not peer in self.peerData:
- self.peerData[peer] = {'connectCount': 0, 'failCount': 0, 'lastConnectTime': math.floor(time.time())}
+ self.peerData[peer] = {'connectCount': 0, 'failCount': 0, 'lastConnectTime': self._utils.getEpoch()}
socksPort = sys.argv[2]
'''We use socks5h to use tor as DNS'''
proxies = {'http': 'socks5://127.0.0.1:' + str(socksPort), 'https': 'socks5://127.0.0.1:' + str(socksPort)}
@@ -630,13 +700,14 @@ class OnionrCommunicate:
try:
if skipHighFailureAddress and self.peerData[peer]['failCount'] > self.highFailureAmount:
retData = False
- logger.debug('Skipping ' + peer + ' because of high failure rate')
+ logger.debug('Skipping %s because of high failure rate.' % peer)
else:
- logger.debug('Contacting ' + peer + ' on port ' + socksPort)
+ self.peerStatus[peer] = action
+ logger.debug('Contacting %s on port %s' % (peer, str(socksPort)))
r = requests.get(url, headers=headers, proxies=proxies, timeout=(15, 30))
retData = r.text
except requests.exceptions.RequestException as e:
- logger.warn(action + " failed with peer " + peer + ": " + str(e))
+ logger.debug("%s failed with peer %s" % (action, peer))
retData = False
if not retData:
@@ -644,9 +715,20 @@ class OnionrCommunicate:
else:
self.peerData[peer]['connectCount'] += 1
self.peerData[peer]['failCount'] -= 1
- self.peerData[peer]['lastConnectTime'] = math.floor(time.time())
+ self.peerData[peer]['lastConnectTime'] = self._utils.getEpoch()
+ self._core.setAddressInfo(peer, 'lastConnect', self._utils.getEpoch())
return retData
+ def peerStatusTaken(self, peer, status):
+ '''
+ Returns if we are currently performing a specific action with a peer.
+ '''
+ try:
+ if self.peerStatus[peer] == status:
+ return True
+ except KeyError:
+ pass
+ return False
shouldRun = False
debug = True
diff --git a/onionr/core.py b/onionr/core.py
index 1cdbf574..d2ab303a 100644
--- a/onionr/core.py
+++ b/onionr/core.py
@@ -17,11 +17,11 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
-import sqlite3, os, sys, time, math, base64, tarfile, getpass, simplecrypt, hashlib, nacl, logger, json, netcontroller
+import sqlite3, os, sys, time, math, base64, tarfile, getpass, simplecrypt, hashlib, nacl, logger, json, netcontroller, math
#from Crypto.Cipher import AES
#from Crypto import Random
-import onionrutils, onionrcrypto, btc, onionrevents as events
+import onionrutils, onionrcrypto, onionrproofs, onionrevents as events
if sys.version_info < (3, 6):
try:
@@ -73,22 +73,24 @@ class Core:
except Exception as error:
logger.error('Failed to initialize core Onionr library.', error=error)
logger.fatal('Cannot recover from error.')
- exit(1)
+ sys.exit(1)
return
- def addPeer(self, peerID, name=''):
+ def addPeer(self, peerID, powID, name=''):
'''
Adds a public key to the key database (misleading function name)
-
- DOES NO SAFETY CHECKS if the ID is valid, but prepares the insertion
'''
# This function simply adds a peer to the DB
if not self._utils.validatePubKey(peerID):
return False
+ if sys.getsizeof(powID) > 60:
+ logger.warn("POW token for pubkey base64 representation exceeded 60 bytes")
+ return False
+
conn = sqlite3.connect(self.peerDB)
hashID = self._crypto.pubKeyHashID(peerID)
c = conn.cursor()
- t = (peerID, name, 'unknown', hashID)
+ t = (peerID, name, 'unknown', hashID, powID)
for i in c.execute("SELECT * FROM PEERS where id = '" + peerID + "';"):
try:
@@ -99,7 +101,7 @@ class Core:
pass
except IndexError:
pass
- c.execute('INSERT INTO peers (id, name, dateSeen, hashID) VALUES(?, ?, ?, ?);', t)
+ c.execute('INSERT INTO peers (id, name, dateSeen, pow, hashID) VALUES(?, ?, ?, ?, ?);', t)
conn.commit()
conn.close()
@@ -189,7 +191,8 @@ class Core:
speed int,
success int,
DBHash text,
- failure int
+ failure int,
+ lastConnect int
);
''')
conn.commit()
@@ -212,7 +215,8 @@ class Core:
bytesStored int,
trust int,
pubkeyExchanged int,
- hashID);
+ hashID text,
+ pow text not null);
''')
conn.commit()
conn.close()
@@ -251,7 +255,7 @@ class Core:
return
- def addToBlockDB(self, newHash, selfInsert=False):
+ def addToBlockDB(self, newHash, selfInsert=False, dataSaved=False):
'''
Add a hash value to the block db
@@ -263,8 +267,8 @@ class Core:
return
conn = sqlite3.connect(self.blockDB)
c = conn.cursor()
- currentTime = math.floor(time.time())
- if selfInsert:
+ currentTime = self._utils.getEpoch()
+ if selfInsert or dataSaved:
selfInsert = 1
else:
selfInsert = 0
@@ -275,11 +279,12 @@ class Core:
return
- def getData(self,hash):
+ def getData(self, hash):
'''
Simply return the data associated to a hash
'''
try:
+ # logger.debug('Opening %s' % (str(self.blockDataLocation) + str(hash) + '.dat'))
dataFile = open(self.blockDataLocation + hash + '.dat', 'rb')
data = dataFile.read()
dataFile.close()
@@ -387,7 +392,7 @@ class Core:
Add a command to the daemon queue, used by the communication daemon (communicator.py)
'''
# Intended to be used by the web server
- date = math.floor(time.time())
+ date = self._utils.getEpoch()
conn = sqlite3.connect(self.queueDB)
c = conn.cursor()
t = (command, data, date)
@@ -431,7 +436,7 @@ class Core:
conn.close()
return addressList
- def listPeers(self, randomOrder=True):
+ def listPeers(self, randomOrder=True, getPow=False):
'''
Return a list of public keys (misleading function name)
@@ -448,10 +453,19 @@ class Core:
for i in c.execute(payload):
try:
if len(i[0]) != 0:
- peerList.append(i[0])
+ if getPow:
+ peerList.append(i[0] + '-' + i[1])
+ else:
+ peerList.append(i[0])
except TypeError:
pass
- peerList.append(self._crypto.pubKey)
+ if getPow:
+ try:
+ peerList.append(self._crypto.pubKey + '-' + self._crypto.pubKeyPowToken)
+ except TypeError:
+ pass
+ else:
+ peerList.append(self._crypto.pubKey)
conn.close()
return peerList
@@ -513,11 +527,12 @@ class Core:
success int, 4
DBHash text, 5
failure int 6
+ lastConnect 7
'''
conn = sqlite3.connect(self.addressDB)
c = conn.cursor()
command = (address,)
- infoNumbers = {'address': 0, 'type': 1, 'knownPeer': 2, 'speed': 3, 'success': 4, 'DBHash': 5, 'failure': 6}
+ infoNumbers = {'address': 0, 'type': 1, 'knownPeer': 2, 'speed': 3, 'success': 4, 'DBHash': 5, 'failure': 6, 'lastConnect': 7}
info = infoNumbers[info]
iterCount = 0
retVal = ''
@@ -539,7 +554,7 @@ class Core:
c = conn.cursor()
command = (data, address)
# TODO: validate key on whitelist
- if key not in ('address', 'type', 'knownPeer', 'speed', 'success', 'DBHash', 'failure'):
+ if key not in ('address', 'type', 'knownPeer', 'speed', 'success', 'DBHash', 'failure', 'lastConnect'):
raise Exception("Got invalid database key when setting address info")
c.execute('UPDATE adders SET ' + key + ' = ? WHERE address=?', command)
conn.commit()
@@ -565,22 +580,36 @@ class Core:
return
- def getBlockList(self, unsaved = False):
+ def getBlockList(self, unsaved = False): # TODO: Use unsaved
'''
Get list of our blocks
'''
conn = sqlite3.connect(self.blockDB)
c = conn.cursor()
- retData = ''
if unsaved:
execute = 'SELECT hash FROM hashes WHERE dataSaved != 1 ORDER BY RANDOM();'
else:
execute = 'SELECT hash FROM hashes ORDER BY RANDOM();'
+ rows = list()
for row in c.execute(execute):
for i in row:
- retData += i + "\n"
+ rows.append(i)
- return retData
+ return rows
+
+ def getBlockDate(self, blockHash):
+ '''
+ Returns the date a block was received
+ '''
+ conn = sqlite3.connect(self.blockDB)
+ c = conn.cursor()
+ execute = 'SELECT dateReceived FROM hashes WHERE hash=?;'
+ args = (blockHash,)
+ for row in c.execute(execute, args):
+ for i in row:
+ return int(i)
+
+ return None
def getBlocksByType(self, blockType):
'''
@@ -588,14 +617,14 @@ class Core:
'''
conn = sqlite3.connect(self.blockDB)
c = conn.cursor()
- retData = ''
execute = 'SELECT hash FROM hashes WHERE dataType=?;'
args = (blockType,)
+ rows = list()
for row in c.execute(execute, args):
for i in row:
- retData += i + "\n"
+ rows.append(i)
- return retData.split('\n')
+ return rows
def setBlockType(self, hash, blockType):
'''
@@ -630,32 +659,57 @@ class Core:
Inserts a block into the network
'''
+ powProof = onionrproofs.POW(data)
+ powToken = ''
+ # wait for proof to complete
+ try:
+ while True:
+ powToken = powProof.getResult()
+ if powToken == False:
+ time.sleep(0.3)
+ continue
+ powHash = powToken[0]
+ powToken = base64.b64encode(powToken[1])
+ try:
+ powToken = powToken.decode()
+ except AttributeError:
+ pass
+ finally:
+ break
+ except KeyboardInterrupt:
+ logger.warn("Got keyboard interrupt while working on inserting block, stopping.")
+ powProof.shutdown()
+ return ''
+
try:
data.decode()
except AttributeError:
data = data.encode()
retData = ''
- metadata = {'type': header}
+ metadata = {'type': header, 'powHash': powHash, 'powToken': powToken}
+ sig = {}
+
+ metadata = json.dumps(metadata)
+ metadata = metadata.encode()
+ signature = ''
if sign:
- signature = self._crypto.edSign(data, self._crypto.privKey, encodeResult=True)
+ signature = self._crypto.edSign(metadata + b'\n' + data, self._crypto.privKey, encodeResult=True)
ourID = self._crypto.pubKeyHashID()
# Convert from bytes on some py versions?
try:
ourID = ourID.decode()
except AttributeError:
pass
- metadata['id'] = ourID
- metadata['sig'] = signature
-
+ metadata = {'sig': signature, 'meta': metadata.decode()}
metadata = json.dumps(metadata)
metadata = metadata.encode()
if len(data) == 0:
logger.error('Will not insert empty block')
else:
- addedHash = self.setData(metadata + data)
+ addedHash = self.setData(metadata + b'\n' + data)
self.addToBlockDB(addedHash, selfInsert=True)
self.setBlockType(addedHash, header)
retData = addedHash
diff --git a/onionr/default-plugins/pluginmanager/main.py b/onionr/default-plugins/pluginmanager/main.py
deleted file mode 100644
index cfcf1911..00000000
--- a/onionr/default-plugins/pluginmanager/main.py
+++ /dev/null
@@ -1,141 +0,0 @@
-'''
- This is the future Onionr plugin manager. TODO: Add better description.
-'''
-
-# useful libraries
-import logger, config
-import os, sys, json
-
-plugin_name = 'pluginmanager'
-
-keys_data = {'keys' : {}}
-
-# key functions
-
-def writeKeys():
- '''
- Serializes and writes the keystore in memory to file
- '''
-
- file = open(keys_file, 'w')
- file.write(json.dumps(keys_data, indent=4, sort_keys=True))
- file.close()
-
-def readKeys():
- '''
- Loads the keystore into memory
- '''
-
- global keys_data
- keys_data = json.loads(open(keys_file).read())
- return keys_data
-
-def getKey(plugin):
- '''
- Returns the public key for a given plugin
- '''
-
- readKeys()
- return (keys_data['keys'][plugin] if plugin in keys_data['keys'] else None)
-
-def saveKey(plugin, key):
- '''
- Saves the public key for a plugin to keystore
- '''
-
- keys_data['keys'][plugin] = key
- writeKeys()
-
-def check():
- '''
- Checks to make sure the keystore file still exists
- '''
-
- global keys_file
- keys_file = pluginapi.plugins.get_data_folder(plugin_name) + 'keystore.json'
- if not os.path.isfile(keys_file):
- writeKeys()
-
-# command handlers
-
-def help():
- logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [public key/block hash]')
-
-def commandInstallPlugin():
- logger.warn('This feature is not functional or is still in development.')
- if len(sys.argv) >= 3:
- check()
-
- pluginname = sys.argv[2]
- pkobh = None # public key or block hash
-
- if len(sys.argv) >= 4:
- # public key or block hash specified
- pkobh = sys.argv[3]
- else:
- # none specified, check if in config file
- pkobh = getKey(pluginname)
-
- if pkobh is None:
- logger.error('No key for this plugin found in keystore, please specify.')
- help()
- return True
-
- valid_hash = pluginapi.get_utils().validateHash(pkobh)
- real_block = False
- valid_key = pluginapi.get_utils().validatePubKey(pkobh)
- real_key = False
-
- if valid_hash:
- real_block = pluginapi.get_utils().hasBlock(pkobh)
- elif valid_key:
- real_key = pluginapi.get_utils().hasKey(pkobh)
-
- blockhash = None
-
- if valid_hash and not real_block:
- logger.error('Block hash not found. Perhaps it has not been synced yet?')
- logger.debug('Is valid hash, but does not belong to a known block.')
- return True
- elif valid_hash and real_block:
- blockhash = str(pkobh)
- logger.debug('Using block %s...' % blockhash)
- elif valid_key and not real_key:
- logger.error('Public key not found. Try adding the node by address manually, if possible.')
- logger.debug('Is valid key, but the key is not a known one.')
- elif valid_key and real_key:
- publickey = str(pkobh)
- logger.debug('Using public key %s...' % publickey)
-
- saveKey(pluginname, pkobh)
- else:
- logger.error('Unknown data "%s"; must be public key or block hash.' % str(pkobh))
- return
- else:
- help()
-
- return True
-
-def commandUninstallPlugin():
- logger.info('This feature has not been created yet. Please check back later.')
- return
-
-def commandSearchPlugin():
- logger.info('This feature has not been created yet. Please check back later.')
- return
-
-# event listeners
-
-def on_init(api, data = None):
- global pluginapi
- pluginapi = api
- check()
-
- # register some commands
- api.commands.register(['install-plugin', 'installplugin', 'plugin-install', 'install', 'plugininstall'], commandInstallPlugin)
- api.commands.register(['remove-plugin', 'removeplugin', 'plugin-remove', 'uninstall-plugin', 'uninstallplugin', 'plugin-uninstall', 'uninstall', 'remove', 'pluginremove'], commandUninstallPlugin)
- api.commands.register(['search', 'filter-plugins', 'search-plugins', 'searchplugins', 'search-plugin', 'searchplugin', 'findplugin', 'find-plugin', 'filterplugin', 'plugin-search', 'pluginsearch'], commandSearchPlugin)
-
- # add help menus once the features are actually implemented
-
- return
diff --git a/onionr/logger.py b/onionr/logger.py
index 990a8b95..c915f2f9 100644
--- a/onionr/logger.py
+++ b/onionr/logger.py
@@ -222,7 +222,7 @@ def error(data, error=None, timestamp=True):
if not error is None:
debug('Error: ' + str(error) + parse_error())
-# fatal: when the something so bad has happened that the prorgam must stop
+# fatal: when the something so bad has happened that the program must stop
def fatal(data, timestamp=True):
if get_level() <= LEVEL_FATAL:
log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp=timestamp)
diff --git a/onionr/netcontroller.py b/onionr/netcontroller.py
index 77149477..4c25a7fc 100644
--- a/onionr/netcontroller.py
+++ b/onionr/netcontroller.py
@@ -89,13 +89,17 @@ DataDirectory data/tordata/
torVersion.kill()
# wait for tor to get to 100% bootstrap
- for line in iter(tor.stdout.readline, b''):
- if 'Bootstrapped 100%: Done' in line.decode():
- break
- elif 'Opening Socks listener' in line.decode():
- logger.debug(line.decode().replace('\n', ''))
- else:
- logger.fatal('Failed to start Tor. Try killing any other Tor processes owned by this user.')
+ try:
+ for line in iter(tor.stdout.readline, b''):
+ if 'Bootstrapped 100%: Done' in line.decode():
+ break
+ elif 'Opening Socks listener' in line.decode():
+ logger.debug(line.decode().replace('\n', ''))
+ else:
+ logger.fatal('Failed to start Tor. Try killing any other Tor processes owned by this user.')
+ return False
+ except KeyboardInterrupt:
+ logger.fatal("Got keyboard interrupt")
return False
logger.info('Finished starting Tor', timestamp=True)
diff --git a/onionr/onionr.py b/onionr/onionr.py
index 213d5d6e..d55664a8 100755
--- a/onionr/onionr.py
+++ b/onionr/onionr.py
@@ -25,9 +25,10 @@ import sys
if sys.version_info[0] == 2 or sys.version_info[1] < 5:
print('Error, Onionr requires Python 3.4+')
sys.exit(1)
-import os, base64, random, getpass, shutil, subprocess, requests, time, platform, datetime, re
+import os, base64, random, getpass, shutil, subprocess, requests, time, platform, datetime, re, json, getpass
from threading import Thread
import api, core, config, logger, onionrplugins as plugins, onionrevents as events
+import onionrutils
from onionrutils import OnionrUtils
from netcontroller import NetController
@@ -64,7 +65,7 @@ class Onionr:
else:
# the default config file doesn't exist, try hardcoded config
config.set_config({'devmode': True, 'log': {'file': {'output': True, 'path': 'data/output.log'}, 'console': {'output': True, 'color': True}}})
- if not exists:
+ if not data_exists:
config.save()
config.reload() # this will read the configuration file into memory
@@ -107,12 +108,11 @@ class Onionr:
if not os.path.exists('data/blocks/'):
os.mkdir('data/blocks/')
- # Copy default plugins into plugins folder
-
+ # Copy default plugins into plugins folder
if not os.path.exists(plugins.get_plugins_folder()):
- if os.path.exists('default-plugins/'):
- names = [f for f in os.listdir("default-plugins/") if not os.path.isfile(f)]
- shutil.copytree('default-plugins/', plugins.get_plugins_folder())
+ if os.path.exists('static-data/default-plugins/'):
+ names = [f for f in os.listdir("static-data/default-plugins/") if not os.path.isfile(f)]
+ shutil.copytree('static-data/default-plugins/', plugins.get_plugins_folder())
# Enable plugins
for name in names:
@@ -134,7 +134,7 @@ class Onionr:
# Get configuration
- if not exists:
+ if not data_exists:
# Generate default config
# Hostname should only be set if different from 127.x.x.x. Important for DNS rebinding attack prevention.
if self.debug:
@@ -153,6 +153,8 @@ class Onionr:
'config': self.configure,
'start': self.start,
'stop': self.killDaemon,
+ 'status': self.showStats,
+ 'statistics': self.showStats,
'stats': self.showStats,
'enable-plugin': self.enablePlugin,
@@ -191,6 +193,8 @@ class Onionr:
'addaddress': self.addAddress,
'addfile': self.addFile,
+ 'importblocks': self.onionrUtils.importNewBlocks,
+
'introduce': self.onionrCore.introduceNode,
'connect': self.addAddress
}
@@ -212,11 +216,12 @@ class Onionr:
'pm': 'Adds a private message to block',
'get-pms': 'Shows private messages sent to you',
'addfile': 'Create an Onionr block from a file',
+ 'importblocks': 'import blocks from the disk (Onionr is transport-agnostic!)',
'introduce': 'Introduce your node to the public Onionr network',
}
# initialize plugins
- events.event('init', onionr = self)
+ events.event('init', onionr = self, threaded = False)
command = ''
try:
@@ -389,7 +394,8 @@ class Onionr:
addedHash = self.onionrCore.insertBlock(messageToAdd, header='txt')
#self.onionrCore.addToBlockDB(addedHash, selfInsert=True)
#self.onionrCore.setBlockType(addedHash, 'txt')
- logger.info("Message inserted as as block %s" % addedHash)
+ if addedHash != '':
+ logger.info("Message inserted as as block %s" % addedHash)
return
def getPMs(self):
@@ -457,7 +463,10 @@ class Onionr:
os.makedirs(plugins.get_plugins_folder(plugin_name))
with open(plugins.get_plugins_folder(plugin_name) + '/main.py', 'a') as main:
- main.write(open('static-data/default_plugin.py').read().replace('$user', os.getlogin()).replace('$date', datetime.datetime.now().strftime('%Y-%m-%d')))
+ main.write(open('static-data/default_plugin.py').read().replace('$user', os.getlogin()).replace('$date', datetime.datetime.now().strftime('%Y-%m-%d')).replace('$name', plugin_name))
+
+ with open(plugins.get_plugins_folder(plugin_name) + '/info.json', 'a') as main:
+ main.write(json.dumps({'author' : 'anonymous', 'description' : 'the default description of the plugin', 'version' : '1.0'}))
logger.info('Enabling plugin "%s"...' % plugin_name)
plugins.enable(plugin_name, self)
@@ -550,8 +559,54 @@ class Onionr:
Displays statistics and exits
'''
- logger.info('Our pubkey: ' + self.onionrCore._crypto.pubKey)
- logger.info('Our address: ' + self.get_hostname())
+ try:
+ # define stats messages here
+ messages = {
+ # info about local client
+ 'Onionr Daemon Status' : ((logger.colors.fg.green + 'Online') if self.onionrUtils.isCommunicatorRunning(timeout = 2) else logger.colors.fg.red + 'Offline'),
+ 'Public Key' : self.onionrCore._crypto.pubKey,
+ 'Address' : self.get_hostname(),
+
+ # file and folder size stats
+ 'div1' : True, # this creates a solid line across the screen, a div
+ 'Total Block Size' : onionrutils.humanSize(onionrutils.size('data/blocks/')),
+ 'Total Plugin Size' : onionrutils.humanSize(onionrutils.size('data/plugins/')),
+ 'Log File Size' : onionrutils.humanSize(onionrutils.size('data/output.log')),
+
+ # count stats
+ 'div2' : True,
+ 'Known Peers Count' : str(len(self.onionrCore.listPeers())),
+ 'Enabled Plugins Count' : str(len(config.get('plugins')['enabled'])) + ' / ' + str(len(os.listdir('data/plugins/')))
+ }
+
+ # color configuration
+ colors = {
+ 'title' : logger.colors.bold,
+ 'key' : logger.colors.fg.lightgreen,
+ 'val' : logger.colors.fg.green,
+ 'border' : logger.colors.fg.lightblue,
+
+ 'reset' : logger.colors.reset
+ }
+
+ # pre-processing
+ maxlength = 0
+ for key, val in messages.items():
+ if not (type(val) is bool and val is True):
+ maxlength = max(len(key), maxlength)
+
+ # generate stats table
+ logger.info(colors['title'] + 'Onionr v%s Statistics' % ONIONR_VERSION + colors['reset'])
+ logger.info(colors['border'] + '─' * (maxlength + 1) + '┐' + colors['reset'])
+ for key, val in messages.items():
+ if not (type(val) is bool and val is True):
+ logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' │ ' + colors['reset'] + colors['val'] + str(val) + colors['reset'])
+ else:
+ logger.info(colors['border'] + '─' * (maxlength + 1) + '┤' + colors['reset'])
+ logger.info(colors['border'] + '─' * (maxlength + 1) + '┘' + colors['reset'])
+ except Exception as e:
+ logger.error('Failed to generate statistics table.', error = e, timestamp = False)
+
return
def showHelp(self, command = None):
diff --git a/onionr/onionrblockapi.py b/onionr/onionrblockapi.py
new file mode 100644
index 00000000..a20a4853
--- /dev/null
+++ b/onionr/onionrblockapi.py
@@ -0,0 +1,443 @@
+'''
+ Onionr - P2P Microblogging Platform & Social network.
+
+ This class contains the OnionrBlocks class which is a class for working with Onionr blocks
+'''
+'''
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+'''
+
+import core as onionrcore, logger
+import json, os, datetime
+
+class Block:
+ def __init__(self, hash = None, core = None):
+ '''
+ Initializes Onionr
+
+ Inputs:
+ - hash (str): the hash of the block to be imported, if any
+ - core (Core/str):
+ - if (Core): this is the Core instance to be used, don't create a new one
+ - if (str): treat `core` as the block content, and instead, treat `hash` as the block type
+
+ Outputs:
+ - (Block): the new Block instance
+ '''
+
+ # input from arguments
+ if (type(hash) == str) and (type(core) == str):
+ self.btype = hash
+ self.bcontent = core
+ self.hash = None
+ self.core = None
+ else:
+ self.btype = ''
+ self.bcontent = ''
+ self.hash = hash
+ self.core = core
+
+ # initialize variables
+ self.valid = True
+ self.raw = None
+ self.powHash = None
+ self.powToken = None
+ self.signed = False
+ self.signature = None
+ self.signedData = None
+ self.blockFile = None
+ self.bheader = {}
+ self.bmetadata = {}
+
+ # handle arguments
+ if self.getCore() is None:
+ self.core = onionrcore.Core()
+ if not self.getHash() is None:
+ self.update()
+
+ # logic
+
+ def update(self, data = None, file = None):
+ '''
+ Loads data from a block in to the current object.
+
+ Inputs:
+ - data (str):
+ - if None: will load from file by hash
+ - else: will load from `data` string
+ - file (str):
+ - if None: will load from file specified in this parameter
+ - else: will load from wherever block is stored by hash
+
+ Outputs:
+ - (bool): indicates whether or not the operation was successful
+ '''
+
+ try:
+ # import from string
+ blockdata = data
+
+ # import from file
+ if blockdata is None:
+ filelocation = file
+
+ if filelocation is None:
+ if self.getHash() is None:
+ return False
+
+ filelocation = 'data/blocks/%s.dat' % self.getHash()
+
+ blockdata = open(filelocation, 'rb').read().decode('utf-8')
+
+ self.blockFile = filelocation
+ else:
+ self.blockFile = None
+
+ # parse block
+ self.raw = str(blockdata)
+ self.bheader = json.loads(self.getRaw()[:self.getRaw().index('\n')])
+ self.bcontent = self.getRaw()[self.getRaw().index('\n') + 1:]
+ self.bmetadata = json.loads(self.getHeader('meta'))
+ self.btype = self.getMetadata('type')
+ self.powHash = self.getMetadata('powHash')
+ self.powToken = self.getMetadata('powToken')
+ self.signed = ('sig' in self.getHeader() and self.getHeader('sig') != '')
+ self.signature = (None if not self.isSigned() else self.getHeader('sig'))
+ self.signedData = (None if not self.isSigned() else self.getHeader('meta') + '\n' + self.getContent())
+ self.date = self.getCore().getBlockDate(self.getHash())
+
+ if not self.getDate() is None:
+ self.date = datetime.datetime.fromtimestamp(self.getDate())
+
+ self.valid = True
+ return True
+ except Exception as e:
+ logger.error('Failed to update block data.', error = e, timestamp = False)
+
+ self.valid = False
+ return False
+
+ def delete(self):
+ '''
+ Deletes the block's file and records, if they exist
+
+ Outputs:
+ - (bool): whether or not the operation was successful
+ '''
+
+ if self.exists():
+ os.remove(self.getBlockFile())
+ removeBlock(self.getHash())
+ return True
+ return False
+
+ def save(self, sign = False, recreate = True):
+ '''
+ Saves a block to file and imports it into Onionr
+
+ Inputs:
+ - sign (bool): whether or not to sign the block before saving
+ - recreate (bool): if the block already exists, whether or not to recreate the block and save under a new hash
+
+ Outputs:
+ - (bool): whether or not the operation was successful
+ '''
+
+ try:
+ if self.isValid() is True:
+ if (not self.getBlockFile() is None) and (recreate is True):
+ with open(self.getBlockFile(), 'wb') as blockFile:
+ blockFile.write(self.getRaw().encode())
+ self.update()
+ else:
+ self.hash = self.getCore().insertBlock(self.getContent(), header = self.getType(), sign = sign)
+ self.update()
+ return True
+ else:
+ logger.warn('Not writing block; it is invalid.')
+ except Exception as e:
+ logger.error('Failed to save block.', error = e, timestamp = False)
+ return False
+
+ # getters
+
+ def getHash(self):
+ '''
+ Returns the hash of the block if saved to file
+
+ Outputs:
+ - (str): the hash of the block, or None
+ '''
+
+ return self.hash
+
+ def getCore(self):
+ '''
+ Returns the Core instance being used by the Block
+
+ Outputs:
+ - (Core): the Core instance
+ '''
+
+ return self.core
+
+ def getType(self):
+ '''
+ Returns the type of the block
+
+ Outputs:
+ - (str): the type of the block
+ '''
+
+ return self.btype
+
+ def getRaw(self):
+ '''
+ Returns the raw contents of the block, if saved to file
+
+ Outputs:
+ - (str): the raw contents of the block, or None
+ '''
+
+ return str(self.raw)
+
+ def getHeader(self, key = None):
+ '''
+ Returns the header information
+
+ Inputs:
+ - key (str): only returns the value of the key in the header
+
+ Outputs:
+ - (dict/str): either the whole header as a dict, or one value
+ '''
+
+ if not key is None:
+ return self.getHeader()[key]
+ else:
+ return self.bheader
+
+ def getMetadata(self, key = None):
+ '''
+ Returns the metadata information
+
+ Inputs:
+ - key (str): only returns the value of the key in the metadata
+
+ Outputs:
+ - (dict/str): either the whole metadata as a dict, or one value
+ '''
+
+ if not key is None:
+ return self.getMetadata()[key]
+ else:
+ return self.bmetadata
+
+ def getContent(self):
+ '''
+ Returns the contents of the block
+
+ Outputs:
+ - (str): the contents of the block
+ '''
+
+ return str(self.bcontent)
+
+ def getDate(self):
+ '''
+ Returns the date that the block was received, if loaded from file
+
+ Outputs:
+ - (datetime): the date that the block was received
+ '''
+
+ return self.date
+
+ def getBlockFile(self):
+ '''
+ Returns the location of the block file if it is saved
+
+ Outputs:
+ - (str): the location of the block file, or None
+ '''
+
+ return self.blockFile
+
+ def isValid(self):
+ '''
+ Checks if the block is valid
+
+ Outputs:
+ - (bool): whether or not the block is valid
+ '''
+
+ return self.valid
+
+ def isSigned(self):
+ '''
+ Checks if the block was signed
+
+ Outputs:
+ - (bool): whether or not the block is signed
+ '''
+
+ return self.signed
+
+ def getSignature(self):
+ '''
+ Returns the base64-encoded signature
+
+ Outputs:
+ - (str): the signature, or None
+ '''
+
+ return self.signature
+
+ def getSignedData(self):
+ '''
+ Returns the data that was signed
+
+ Outputs:
+ - (str): the data that was signed, or None
+ '''
+
+ return self.signedData
+
+ def isSigner(self, signer, encodedData = True):
+ '''
+ Checks if the block was signed by the signer inputted
+
+ Inputs:
+ - signer (str): the public key of the signer to check against
+ - encodedData (bool): whether or not the `signer` argument is base64 encoded
+
+ Outputs:
+ - (bool): whether or not the signer of the block is the signer inputted
+ '''
+
+ try:
+ if (not self.isSigned()) or (not self.getCore()._utils.validatePubKey(signer)):
+ return False
+
+ return bool(self.getCore()._crypto.edVerify(self.getSignedData(), signer, self.getSignature(), encodedData = encodedData))
+ except:
+ return False
+
+ # setters
+
+ def setType(self, btype):
+ '''
+ Sets the type of the block
+
+ Inputs:
+ - btype (str): the type of block to be set to
+
+ Outputs:
+ - (Block): the block instance
+ '''
+
+ self.btype = btype
+ return self
+
+ def setContent(self, bcontent):
+ '''
+ Sets the contents of the block
+
+ Inputs:
+ - bcontent (str): the contents to be set to
+
+ Outputs:
+ - (Block): the block instance
+ '''
+
+ self.bcontent = str(bcontent)
+ return self
+
+ # static
+
+ def getBlocks(type = None, signer = None, signed = None, reverse = False, core = None):
+ '''
+ Returns a list of Block objects based on supplied filters
+
+ Inputs:
+ - type (str): filters by block type
+ - signer (str/list): filters by signer (one in the list has to be a signer)
+ - signed (bool): filters out by whether or not the block is signed
+ - reverse (bool): reverses the list if True
+ - core (Core): lets you optionally supply a core instance so one doesn't need to be started
+
+ Outputs:
+ - (list): a list of Block objects that match the input
+ '''
+
+ try:
+ core = (core if not core is None else onionrcore.Core())
+
+ relevant_blocks = list()
+ blocks = (core.getBlockList() if type is None else core.getBlocksByType(type))
+
+ for block in blocks:
+ if Block.exists(block):
+ block = Block(block, core = core)
+
+ relevant = True
+
+ if (not signed is None) and (block.isSigned() != bool(signed)):
+ relevant = False
+ if not signer is None:
+ if isinstance(signer, (str,)):
+ signer = [signer]
+
+ isSigner = False
+ for key in signer:
+ if block.isSigner(key):
+ isSigner = True
+ break
+
+ if not isSigner:
+ relevant = False
+
+ if relevant:
+ relevant_blocks.append(block)
+
+ if bool(reverse):
+ relevant_blocks.reverse()
+
+ return relevant_blocks
+ except Exception as e:
+ logger.debug(('Failed to get blocks: %s' % str(e)) + logger.parse_error())
+
+ return list()
+
+ def exists(hash):
+ '''
+ Checks if a block is saved to file or not
+
+ Inputs:
+ - hash (str/Block):
+ - if (Block): check if this block is saved to file
+ - if (str): check if a block by this hash is in file
+
+ Outputs:
+ - (bool): whether or not the block file exists
+ '''
+
+ if hash is None:
+ return False
+ elif type(hash) == Block:
+ blockfile = hash.getBlockFile()
+ else:
+ blockfile = 'data/blocks/%s.dat' % hash
+
+ return os.path.exists(blockfile) and os.path.isfile(blockfile)
diff --git a/onionr/onionrblocks.py b/onionr/onionrblocks.py
deleted file mode 100644
index ffa8207c..00000000
--- a/onionr/onionrblocks.py
+++ /dev/null
@@ -1,26 +0,0 @@
-'''
-Onionr - P2P Microblogging Platform & Social network.
-
-This class contains the OnionrBlocks class which is a class for working with Onionr blocks
-'''
-'''
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
-'''
-import json
-class OnionrBlocks:
- def __init__(self, coreInstance):
- return
- def metadataGenerate(self):
- return
-
diff --git a/onionr/onionrcrypto.py b/onionr/onionrcrypto.py
index 3a45eeda..84e9827f 100644
--- a/onionr/onionrcrypto.py
+++ b/onionr/onionrcrypto.py
@@ -17,15 +17,19 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
-import nacl.signing, nacl.encoding, nacl.public, nacl.secret, os, binascii, base64, hashlib, logger
+import nacl.signing, nacl.encoding, nacl.public, nacl.hash, nacl.secret, os, binascii, base64, hashlib, logger, onionrproofs, time, math
class OnionrCrypto:
def __init__(self, coreInstance):
self._core = coreInstance
self._keyFile = 'data/keys.txt'
+ self.keyPowFile = 'data/keyPow.txt'
self.pubKey = None
self.privKey = None
-
+
+ self.pubKeyPowToken = None
+ #self.pubKeyPowHash = None
+
self.HASH_ID_ROUNDS = 2000
# Load our own pub/priv Ed25519 keys, gen & save them if they don't exist
@@ -34,12 +38,29 @@ class OnionrCrypto:
keys = keys.read().split(',')
self.pubKey = keys[0]
self.privKey = keys[1]
+ try:
+ with open(self.keyPowFile, 'r') as powFile:
+ data = powFile.read()
+ self.pubKeyPowToken = data
+ except (FileNotFoundError, IndexError):
+ pass
else:
keys = self.generatePubKey()
self.pubKey = keys[0]
self.privKey = keys[1]
with open(self._keyFile, 'w') as keyfile:
keyfile.write(self.pubKey + ',' + self.privKey)
+ with open(self.keyPowFile, 'w') as keyPowFile:
+ proof = onionrproofs.POW(self.pubKey)
+ logger.info('Doing necessary work to insert our public key')
+ while True:
+ time.sleep(0.2)
+ powToken = proof.getResult()
+ if powToken != False:
+ break
+ keyPowFile.write(base64.b64encode(powToken[1]).decode())
+ self.pubKeyPowToken = powToken[1]
+ self.pubKeyPowHash = powToken[0]
return
def edVerify(self, data, key, sig, encodedData=True):
@@ -60,14 +81,13 @@ class OnionrCrypto:
retData = key.verify(data, sig) # .encode() is not the same as nacl.encoding
except nacl.exceptions.BadSignatureError:
pass
-
else:
try:
retData = key.verify(data, sig)
except nacl.exceptions.BadSignatureError:
pass
return retData
-
+
def edSign(self, data, key, encodeResult=False):
'''Ed25519 sign data'''
try:
@@ -138,8 +158,6 @@ class OnionrCrypto:
decrypted = self.symmetricDecrypt(data, key, encodedKey=True)
return decrypted
- return
-
def symmetricEncrypt(self, data, key, encodedKey=False, returnEncoded=True):
'''Encrypt data to a 32-byte key (Salsa20-Poly1305 MAC)'''
if encodedKey:
@@ -177,7 +195,7 @@ class OnionrCrypto:
if returnEncoded:
decrypted = base64.b64encode(decrypted)
return decrypted
-
+
def generateSymmetricPeer(self, peer):
'''Generate symmetric key for a peer and save it to the peer database'''
key = self.generateSymmetric()
@@ -193,7 +211,7 @@ class OnionrCrypto:
private_key = nacl.signing.SigningKey.generate()
public_key = private_key.verify_key.encode(encoder=nacl.encoding.Base32Encoder())
return (public_key.decode(), private_key.encode(encoder=nacl.encoding.Base32Encoder()).decode())
-
+
def pubKeyHashID(self, pubkey=''):
'''Accept a ed25519 public key, return a truncated result of X many sha3_256 hash rounds'''
if pubkey == '':
@@ -209,4 +227,49 @@ class OnionrCrypto:
hasher.update(pubkey + prev)
prev = hasher.hexdigest()
result = prev
- return result
\ No newline at end of file
+ return result
+
+ def sha3Hash(self, data):
+ hasher = hashlib.sha3_256()
+ hasher.update(data)
+ return hasher.hexdigest()
+
+ def blake2bHash(self, data):
+ try:
+ data = data.encode()
+ except AttributeError:
+ pass
+ return nacl.hash.blake2b(data)
+
+ def verifyPow(self, blockContent, metadata):
+ '''
+ Verifies the proof of work associated with a block
+ '''
+ retData = False
+
+ if not (('powToken' in metadata) and ('powHash' in metadata)):
+ return False
+
+ dataLen = len(blockContent)
+
+ expectedHash = self.blake2bHash(base64.b64decode(metadata['powToken']) + self.blake2bHash(blockContent.encode()))
+ difficulty = 0
+ try:
+ expectedHash = expectedHash.decode()
+ except AttributeError:
+ pass
+ if metadata['powHash'] == expectedHash:
+ difficulty = math.floor(dataLen / 1000000)
+
+ mainHash = '0000000000000000000000000000000000000000000000000000000000000000'#nacl.hash.blake2b(nacl.utils.random()).decode()
+ puzzle = mainHash[:difficulty]
+
+ if metadata['powHash'][:difficulty] == puzzle:
+ # logger.debug('Validated block pow')
+ retData = True
+ else:
+ logger.debug("Invalid token (#1)")
+ else:
+ logger.debug('Invalid token (#2): Expected hash %s, got hash %s...' % (metadata['powHash'], expectedHash))
+
+ return retData
diff --git a/onionr/onionrpluginapi.py b/onionr/onionrpluginapi.py
index 7aa2b891..bfaf73e8 100644
--- a/onionr/onionrpluginapi.py
+++ b/onionr/onionrpluginapi.py
@@ -18,7 +18,7 @@
along with this program. If not, see .
'''
-import onionrplugins as plugins, logger
+import onionrplugins, core as onionrcore, logger
class DaemonAPI:
def __init__(self, pluginapi):
@@ -40,9 +40,7 @@ class DaemonAPI:
return
def local_command(self, command):
- self.pluginapi.get_utils().localCommand(self, command)
-
- return
+ return self.pluginapi.get_utils().localCommand(self, command)
def queue_pop(self):
return self.get_core().daemonQueue()
@@ -52,34 +50,34 @@ class PluginAPI:
self.pluginapi = pluginapi
def start(self, name):
- plugins.start(name)
+ onionrplugins.start(name)
def stop(self, name):
- plugins.stop(name)
+ onionrplugins.stop(name)
def reload(self, name):
- plugins.reload(name)
+ onionrplugins.reload(name)
def enable(self, name):
- plugins.enable(name)
+ onionrplugins.enable(name)
def disable(self, name):
- plugins.disable(name)
+ onionrplugins.disable(name)
def event(self, name, data = {}):
events.event(name, data = data, onionr = self.pluginapi.get_onionr())
def is_enabled(self, name):
- return plugins.is_enabled(name)
+ return onionrplugins.is_enabled(name)
def get_enabled_plugins(self):
- return plugins.get_enabled()
+ return onionrplugins.get_enabled()
def get_folder(self, name = None, absolute = True):
- return plugins.get_plugins_folder(name = name, absolute = absolute)
+ return onionrplugins.get_plugins_folder(name = name, absolute = absolute)
def get_data_folder(self, name, absolute = True):
- return plugins.get_plugin_data_folder(name, absolute = absolute)
+ return onionrplugins.get_plugin_data_folder(name, absolute = absolute)
def daemon_event(self, event, plugin = None):
return # later make local command like /client/?action=makeEvent&event=eventname&module=modulename
@@ -136,6 +134,10 @@ class pluginapi:
def __init__(self, onionr, data):
self.onionr = onionr
self.data = data
+ if self.onionr is None:
+ self.core = onionrcore.Core()
+ else:
+ self.core = self.onionr.onionrCore
self.daemon = DaemonAPI(self)
self.plugins = PluginAPI(self)
@@ -148,10 +150,13 @@ class pluginapi:
return self.data
def get_core(self):
- return self.get_onionr().onionrCore
+ return self.core
def get_utils(self):
- return self.get_onionr().onionrUtils
+ return self.get_core()._utils
+
+ def get_crypto(self):
+ return self.get_core()._crypto
def get_daemonapi(self):
return self.daemon
diff --git a/onionr/onionrplugins.py b/onionr/onionrplugins.py
index bdb7a073..175b2336 100644
--- a/onionr/onionrplugins.py
+++ b/onionr/onionrplugins.py
@@ -62,17 +62,20 @@ def enable(name, onionr = None, start_event = True):
if exists(name):
enabled_plugins = get_enabled_plugins()
- enabled_plugins.append(name)
- config_plugins = config.get('plugins')
- config_plugins['enabled'] = enabled_plugins
- config.set('plugins', config_plugins, True)
+ if not name in enabled_plugins:
+ enabled_plugins.append(name)
+ config_plugins = config.get('plugins')
+ config_plugins['enabled'] = enabled_plugins
+ config.set('plugins', config_plugins, True)
- events.call(get_plugin(name), 'enable', onionr)
+ events.call(get_plugin(name), 'enable', onionr)
- if start_event is True:
- start(name)
+ if start_event is True:
+ start(name)
- return True
+ return True
+ else:
+ return False
else:
logger.error('Failed to enable plugin \"' + name + '\", disabling plugin.')
disable(name)
diff --git a/onionr/onionrproofs.py b/onionr/onionrproofs.py
index e6e2f137..7001d1d6 100644
--- a/onionr/onionrproofs.py
+++ b/onionr/onionrproofs.py
@@ -18,8 +18,8 @@
along with this program. If not, see .
'''
-import nacl.encoding, nacl.hash, nacl.utils, time, math, threading, binascii, logger
-import btc
+import nacl.encoding, nacl.hash, nacl.utils, time, math, threading, binascii, logger, sys
+import core
class POW:
def pow(self, reporting = False):
@@ -30,20 +30,10 @@ class POW:
answer = ''
heartbeat = 200000
hbCount = 0
- blockCheck = 300000 # How often the hasher should check if the bitcoin block is updated (slows hashing but prevents less wasted work)
- blockCheckCount = 0
- block = '' #self.bitcoinNode.getBlockHash(self.bitcoinNode.getLastBlockHeight())
+ myCore = core.Core()
while self.hashing:
- '''
- if blockCheckCount == blockCheck:
- if self.reporting:
- logger.debug('Refreshing Bitcoin block')
- block = '' #self.bitcoinNode.getBlockHash(self.bitcoinNode.getLastBlockHeight())
- blockCheckCount = 0
- blockCheckCount += 1
- hbCount += 1
- '''
- token = nacl.hash.blake2b(nacl.utils.random()).decode()
+ rand = nacl.utils.random()
+ token = nacl.hash.blake2b(rand + self.data).decode()
#print(token)
if self.puzzle == token[0:self.difficulty]:
self.hashing = False
@@ -56,17 +46,28 @@ class POW:
if self.reporting:
logger.info('Found token ' + token, timestamp=True)
logger.info('took ' + str(endTime - startTime) + ' seconds', timestamp=True)
- self.result = token
+ self.result = (token, rand)
- def __init__(self, difficulty, bitcoinNode=''):
+ def __init__(self, data):
self.foundHash = False
- self.difficulty = difficulty
+ self.difficulty = 0
+ self.data = data
+
+ dataLen = sys.getsizeof(data)
+ self.difficulty = math.floor(dataLen/1000000)
+ if self.difficulty <= 2:
+ self.difficulty = 4
+
+ try:
+ self.data = self.data.encode()
+ except AttributeError:
+ pass
+ self.data = nacl.hash.blake2b(self.data)
logger.debug('Computing difficulty of ' + str(self.difficulty))
self.mainHash = '0000000000000000000000000000000000000000000000000000000000000000'#nacl.hash.blake2b(nacl.utils.random()).decode()
self.puzzle = self.mainHash[0:self.difficulty]
- self.bitcoinNode = bitcoinNode
#logger.debug('trying to find ' + str(self.mainHash))
tOne = threading.Thread(name='one', target=self.pow, args=(True,))
tTwo = threading.Thread(name='two', target=self.pow, args=(True,))
diff --git a/onionr/onionrutils.py b/onionr/onionrutils.py
index 1f87e209..457960e0 100644
--- a/onionr/onionrutils.py
+++ b/onionr/onionrutils.py
@@ -18,7 +18,7 @@
along with this program. If not, see .
'''
# Misc functions that do not fit in the main api, but are useful
-import getpass, sys, requests, os, socket, hashlib, logger, sqlite3, config, binascii, time, base64, json
+import getpass, sys, requests, os, socket, hashlib, logger, sqlite3, config, binascii, time, base64, json, glob, shutil, math
import nacl.signing, nacl.encoding
if sys.version_info < (3, 6):
@@ -71,7 +71,7 @@ class OnionrUtils:
if block == '':
logger.error('Could not send PM')
else:
- logger.info('Sent PM, hash: ' + block)
+ logger.info('Sent PM, hash: %s' % block)
except Exception as error:
logger.error('Failed to send PM.', error=error)
@@ -101,9 +101,26 @@ class OnionrUtils:
retVal = False
if newKeyList != False:
for key in newKeyList.split(','):
- if not key in self._core.listPeers(randomOrder=False) and type(key) != None and key != self._core._crypto.pubKey:
- if self._core.addPeer(key):
- retVal = True
+ key = key.split('-')
+ try:
+ if len(key[0]) > 60 or len(key[1]) > 1000:
+ logger.warn('%s or its pow value is too large.' % key[0])
+ continue
+ except IndexError:
+ logger.warn('No pow token')
+ continue
+ powHash = self._core._crypto.blake2bHash(base64.b64decode(key[1]) + self._core._crypto.blake2bHash(key[0].encode()))
+ try:
+ powHash = powHash.encode()
+ except AttributeError:
+ pass
+ if powHash.startswith(b'0000'):
+ if not key[0] in self._core.listPeers(randomOrder=False) and type(key) != None and key[0] != self._core._crypto.pubKey:
+ if self._core.addPeer(key[0], key[1]):
+ retVal = True
+ else:
+ logger.warn(powHash)
+ logger.warn('%s pow failed' % key[0])
return retVal
except Exception as error:
logger.error('Failed to merge keys.', error=error)
@@ -118,15 +135,15 @@ class OnionrUtils:
retVal = False
if newAdderList != False:
for adder in newAdderList.split(','):
- if not adder in self._core.listAdders(randomOrder=False) and adder.strip() != self.getMyAddress():
+ if not adder in self._core.listAdders(randomOrder = False) and adder.strip() != self.getMyAddress():
if self._core.addAddress(adder):
- logger.info('Added ' + adder + ' to db.', timestamp=True)
+ logger.info('Added %s to db.' % adder, timestamp = True)
retVal = True
else:
- logger.debug(adder + " is either our address or already in our DB")
+ logger.debug('%s is either our address or already in our DB' % adder)
return retVal
except Exception as error:
- logger.error('Failed to merge adders.', error=error)
+ logger.error('Failed to merge adders.', error = error)
return False
def getMyAddress(self):
@@ -134,7 +151,7 @@ class OnionrUtils:
with open('./data/hs/hostname', 'r') as hostname:
return hostname.read().strip()
except Exception as error:
- logger.error('Failed to read my address.', error=error)
+ logger.error('Failed to read my address.', error = error)
return None
def localCommand(self, command, silent = True):
@@ -149,7 +166,7 @@ class OnionrUtils:
retData = requests.get('http://' + open('data/host.txt', 'r').read() + ':' + str(config.get('client')['port']) + '/client/?action=' + command + '&token=' + str(config.get('client')['client_hmac']) + '&timingToken=' + self.timingToken).text
except Exception as error:
if not silent:
- logger.error('Failed to make local request (command: ' + str(command) + ').', error=error)
+ logger.error('Failed to make local request (command: %s).' % command, error=error)
retData = False
return retData
@@ -327,7 +344,7 @@ class OnionrUtils:
'''
Find, decrypt, and return array of PMs (array of dictionary, {from, text})
'''
- #blocks = self._core.getBlockList().split('\n')
+ #blocks = self._core.getBlockList()
blocks = self._core.getBlocksByType('pm')
message = ''
sender = ''
@@ -336,52 +353,43 @@ class OnionrUtils:
continue
try:
with open('data/blocks/' + i + '.dat', 'r') as potentialMessage:
- data = potentialMessage.read().split('}')
- message = data[1]
- sigResult = ''
- signer = ''
+ potentialMessage = potentialMessage.read()
+ blockMetadata = json.loads(potentialMessage[:potentialMessage.find('\n')])
+ blockContent = potentialMessage[potentialMessage.find('\n') + 1:]
try:
- metadata = json.loads(data[0] + '}')
- except json.decoder.JSONDecodeError:
- metadata = {}
- '''
- sigResult = self._core._crypto.edVerify(message, signer, sig, encodedData=True)
- #sigResult = False
- if sigResult != False:
- sigResult = 'Valid signature by ' + signer
- else:
- sigResult = 'Invalid signature by ' + signer
- '''
-
- try:
- message = self._core._crypto.pubKeyDecrypt(message, encodedData=True, anonymous=True)
+ message = self._core._crypto.pubKeyDecrypt(blockContent, encodedData=True, anonymous=True)
except nacl.exceptions.CryptoError as e:
- logger.error('Unable to decrypt ' + i, error=e)
+ pass
else:
try:
- message = json.loads(message.decode())
- message['msg']
- message['id']
- message['sig']
+ message = message.decode()
+ except AttributeError:
+ pass
+
+ try:
+ message = json.loads(message)
except json.decoder.JSONDecodeError:
- logger.error('Could not decode PM JSON')
- except KeyError:
- logger.error('PM is missing JSON keys')
+ pass
else:
- if self.validatePubKey(message['id']):
- sigResult = self._core._crypto.edVerify(message['msg'], message['id'], message['sig'], encodedData=True)
- logger.info('-----------------------------------')
- logger.info('Recieved message: ' + message['msg'])
- if sigResult:
- logger.info('Valid signature by ' + message['id'])
+ logger.info('Decrypted %s:' % i)
+ logger.info(message["msg"])
+
+ signer = message["id"]
+ sig = message["sig"]
+
+ if self.validatePubKey(signer):
+ if self._core._crypto.edVerify(message["msg"], signer, sig, encodedData=True):
+ logger.info("Good signature by %s" % signer)
else:
- logger.warn('Invalid signature by ' + message['id'])
+ logger.warn("Bad signature by %s" % signer)
+ else:
+ logger.warn('Bad sender id: %s' % signer)
except FileNotFoundError:
pass
except Exception as error:
- logger.error('Failed to open block ' + str(i) + '.', error=error)
+ logger.error('Failed to open block %s.' % i, error=error)
return
def getPeerByHashId(self, hash):
@@ -423,4 +431,74 @@ class OnionrUtils:
return False
def token(self, size = 32):
+ '''
+ Generates a secure random hex encoded token
+ '''
return binascii.hexlify(os.urandom(size))
+
+ def importNewBlocks(self, scanDir=''):
+ '''
+ This function is intended to scan for new blocks ON THE DISK and import them
+ '''
+ blockList = self._core.getBlockList()
+ if scanDir == '':
+ scanDir = self._core.blockDataLocation
+ if not scanDir.endswith('/'):
+ scanDir += '/'
+ for block in glob.glob(scanDir + "*.dat"):
+ if block.replace(scanDir, '').replace('.dat', '') not in blockList:
+ logger.info('Found new block on dist %s' % block)
+ with open(block, 'rb') as newBlock:
+ block = block.replace(scanDir, '').replace('.dat', '')
+ if self._core._crypto.sha3Hash(newBlock.read()) == block.replace('.dat', ''):
+ self._core.addToBlockDB(block.replace('.dat', ''), dataSaved=True)
+ logger.info('Imported block %s.' % block)
+ else:
+ logger.warn('Failed to verify hash for %s' % block)
+
+ def progressBar(self, value = 0, endvalue = 100, width = None):
+ '''
+ Outputs a progress bar with a percentage. Write \n after use.
+ '''
+
+ if width is None or height is None:
+ width, height = shutil.get_terminal_size((80, 24))
+
+ bar_length = width - 6
+
+ percent = float(value) / endvalue
+ arrow = '─' * int(round(percent * bar_length)-1) + '>'
+ spaces = ' ' * (bar_length - len(arrow))
+
+ sys.stdout.write("\r┣{0}┫ {1}%".format(arrow + spaces, int(round(percent * 100))))
+ sys.stdout.flush()
+
+ def getEpoch(self):
+ '''returns epoch'''
+ return math.floor(time.time())
+
+def size(path='.'):
+ '''
+ Returns the size of a folder's contents in bytes
+ '''
+ total = 0
+ if os.path.exists(path):
+ if os.path.isfile(path):
+ total = os.path.getsize(path)
+ else:
+ for entry in os.scandir(path):
+ if entry.is_file():
+ total += entry.stat().st_size
+ elif entry.is_dir():
+ total += size(entry.path)
+ return total
+
+def humanSize(num, suffix='B'):
+ '''
+ Converts from bytes to a human readable format.
+ '''
+ for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
+ if abs(num) < 1024.0:
+ return "%.1f %s%s" % (num, unit, suffix)
+ num /= 1024.0
+ return "%.1f %s%s" % (num, 'Yi', suffix)
\ No newline at end of file
diff --git a/onionr/static-data/default-plugins/gui/info.json b/onionr/static-data/default-plugins/gui/info.json
new file mode 100644
index 00000000..83d4489a
--- /dev/null
+++ b/onionr/static-data/default-plugins/gui/info.json
@@ -0,0 +1,5 @@
+{
+ "name" : "gui",
+ "version" : "1.0",
+ "author" : "onionr"
+}
diff --git a/onionr/default-plugins/gui/main.py b/onionr/static-data/default-plugins/gui/main.py
similarity index 66%
rename from onionr/default-plugins/gui/main.py
rename to onionr/static-data/default-plugins/gui/main.py
index cc865dbe..07e5a76e 100644
--- a/onionr/default-plugins/gui/main.py
+++ b/onionr/static-data/default-plugins/gui/main.py
@@ -16,46 +16,54 @@
'''
# Imports some useful libraries
-import logger, config
-import os, sqlite3, core
+import logger, config, core
+import os, sqlite3, threading
+from onionrblockapi import Block
+
+plugin_name = 'gui'
+
+def send():
+ global message
+ block = Block()
+ block.setType('txt')
+ block.setContent(message)
+ logger.debug('Sent message in block %s.' % block.save(sign = True))
+
def sendMessage():
global sendEntry
- messageToAdd = '-txt-' + sendEntry.get()
- #addedHash = pluginapi.get_core().setData(messageToAdd)
- #pluginapi.get_core().addToBlockDB(addedHash, selfInsert=True)
- #pluginapi.get_core().setBlockType(addedHash, 'txt')
- pluginapi.get_core().insertBlock(messageToAdd, header='txt', sign=True)
- sendEntry.delete(0, END)
+ global message
+ message = sendEntry.get()
+
+ t = threading.Thread(target = send)
+ t.start()
+
+ sendEntry.delete(0, len(message))
def update():
global listedBlocks, listbox, runningCheckDelayCount, runningCheckDelay, root, daemonStatus
- # TO DO: migrate to new header format
- for i in pluginapi.get_core().getBlocksByType('txt'):
- if i.strip() == '' or i in listedBlocks:
+ for i in Block.getBlocks(type = 'txt'):
+ if i.getContent().strip() == '' or i.getHash() in listedBlocks:
continue
- blockFile = open('./data/blocks/' + i + '.dat')
- listbox.insert(END, str(blockFile.read().replace('-txt-', '')))
- blockFile.close()
- listedBlocks.append(i)
- listbox.see(END)
- blocksList = os.listdir('./data/blocks/') # dir is your directory path
- number_blocks = len(blocksList)
+ listbox.insert(99999, str(i.getContent()))
+ listedBlocks.append(i.getHash())
+ listbox.see(99999)
+
runningCheckDelayCount += 1
if runningCheckDelayCount == runningCheckDelay:
- resp = pluginapi.get_core()._utils.localCommand('ping')
+ resp = pluginapi.daemon.local_command('ping')
if resp == 'pong':
- daemonStatus.config(text="Onionr Daemon Status: Running")
+ daemonStatus.config(text = "Onionr Daemon Status: Running")
else:
- daemonStatus.config(text="Onionr Daemon Status: Not Running")
+ daemonStatus.config(text = "Onionr Daemon Status: Not Running")
runningCheckDelayCount = 0
root.after(10000, update)
-def openGUI():
+def reallyOpenGUI():
import tkinter
global root, runningCheckDelay, runningCheckDelayCount, scrollbar, listedBlocks, nodeInfo, keyInfo, idText, idEntry, pubKeyEntry, listbox, daemonStatus, sendEntry
@@ -74,11 +82,12 @@ def openGUI():
nodeInfo = tkinter.Frame(root)
keyInfo = tkinter.Frame(root)
- print(pluginapi.get_onionr().get_hostname())
- idText = pluginapi.get_onionr().get_hostname()
+ hostname = pluginapi.get_onionr().get_hostname()
+ logger.debug('Onionr Hostname: %s' % hostname)
+ idText = hostname
idEntry = tkinter.Entry(nodeInfo)
- tkinter.Label(nodeInfo, text="Node Address: ").pack(side=tkinter.LEFT)
+ tkinter.Label(nodeInfo, text = "Node Address: ").pack(side=tkinter.LEFT)
idEntry.pack()
idEntry.insert(0, idText.strip())
idEntry.configure(state="readonly")
@@ -100,17 +109,22 @@ def openGUI():
sendEntry.pack(side=tkinter.TOP, pady=5)
sendBtn.pack(side=tkinter.TOP)
- listbox = tkinter.Listbox(root, yscrollcommand=tkinter.scrollbar.set, height=15)
+ listbox = tkinter.Listbox(root, yscrollcommand=tkinter.Scrollbar.set, height=15)
listbox.pack(fill=tkinter.BOTH, pady=25)
daemonStatus = tkinter.Label(root, text="Onionr Daemon Status: unknown")
daemonStatus.pack()
- scrollbar.config(command=tkinter.listbox.yview)
+ scrollbar.config(command=tkinter.Listbox.yview)
root.after(2000, update)
root.mainloop()
+def openGUI():
+ t = threading.Thread(target = reallyOpenGUI)
+ t.daemon = False
+ t.start()
+
def on_init(api, data = None):
global pluginapi
pluginapi = api
diff --git a/onionr/static-data/default-plugins/pluginmanager/info.json b/onionr/static-data/default-plugins/pluginmanager/info.json
new file mode 100644
index 00000000..06c7f0ab
--- /dev/null
+++ b/onionr/static-data/default-plugins/pluginmanager/info.json
@@ -0,0 +1,5 @@
+{
+ "name" : "pluginmanager",
+ "version" : "1.0",
+ "author" : "onionr"
+}
diff --git a/onionr/static-data/default-plugins/pluginmanager/main.py b/onionr/static-data/default-plugins/pluginmanager/main.py
new file mode 100644
index 00000000..59c23d44
--- /dev/null
+++ b/onionr/static-data/default-plugins/pluginmanager/main.py
@@ -0,0 +1,546 @@
+'''
+ Onionr - P2P Microblogging Platform & Social network.
+
+ This plugin acts as a plugin manager, and allows the user to install other plugins distributed over Onionr.
+'''
+'''
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+'''
+
+# useful libraries
+import logger, config
+import os, sys, json, time, random, shutil, base64, getpass, datetime, re
+from onionrblockapi import Block
+
+plugin_name = 'pluginmanager'
+
+keys_data = {'keys' : {}, 'plugins' : [], 'repositories' : {}}
+
+# key functions
+
+def writeKeys():
+ '''
+ Serializes and writes the keystore in memory to file
+ '''
+
+ file = open(keys_file, 'w')
+ file.write(json.dumps(keys_data, indent=4, sort_keys=True))
+ file.close()
+
+def readKeys():
+ '''
+ Loads the keystore into memory
+ '''
+
+ global keys_data
+ keys_data = json.loads(open(keys_file).read())
+ return keys_data
+
+def getKey(plugin):
+ '''
+ Returns the public key for a given plugin
+ '''
+
+ global keys_data
+ readKeys()
+ return (keys_data['keys'][plugin] if plugin in keys_data['keys'] else None)
+
+def saveKey(plugin, key):
+ '''
+ Saves the public key for a plugin to keystore
+ '''
+
+ global keys_data
+ readKeys()
+ keys_data['keys'][plugin] = key
+ writeKeys()
+
+def getPlugins():
+ '''
+ Returns a list of plugins installed by the plugin manager
+ '''
+
+ global keys_data
+ readKeys()
+ return keys_data['plugins']
+
+def addPlugin(plugin):
+ '''
+ Saves the plugin name, to remember that it was installed by the pluginmanager
+ '''
+
+ global keys_data
+ readKeys()
+ if not plugin in keys_data['plugins']:
+ keys_data['plugins'].append(plugin)
+ writeKeys()
+
+def removePlugin(plugin):
+ '''
+ Removes the plugin name from the pluginmanager's records
+ '''
+
+ global keys_data
+ readKeys()
+ if plugin in keys_data['plugins']:
+ keys_data['plugins'].remove(plugin)
+ writeKeys()
+
+def getRepositories():
+ '''
+ Returns a list of plugins installed by the plugin manager
+ '''
+
+ global keys_data
+ readKeys()
+ return keys_data['repositories']
+
+def addRepository(repositories, data):
+ '''
+ Saves the plugin name, to remember that it was installed by the pluginmanager
+ '''
+
+ global keys_data
+ readKeys()
+ keys_data['repositories'][repositories] = data
+ writeKeys()
+
+def removeRepository(repositories):
+ '''
+ Removes the plugin name from the pluginmanager's records
+ '''
+
+ global keys_data
+ readKeys()
+ if plugin in keys_data['repositories']:
+ del keys_data['repositories'][repositories]
+ writeKeys()
+
+def check():
+ '''
+ Checks to make sure the keystore file still exists
+ '''
+
+ global keys_file
+ keys_file = pluginapi.plugins.get_data_folder(plugin_name) + 'keystore.json'
+ if not os.path.isfile(keys_file):
+ writeKeys()
+
+# plugin management
+
+def sanitize(name):
+ return re.sub('[^0-9a-zA-Z]+', '', str(name).lower())[:255]
+
+def blockToPlugin(block):
+ try:
+ block = Block(block)
+ blockContent = json.loads(block.getContent())
+
+ name = sanitize(blockContent['name'])
+ author = blockContent['author']
+ date = blockContent['date']
+ version = None
+
+ if 'version' in blockContent['info']:
+ version = blockContent['info']['version']
+
+ content = base64.b64decode(blockContent['content'].encode())
+
+ source = pluginapi.plugins.get_data_folder(plugin_name) + 'plugin.zip'
+ destination = pluginapi.plugins.get_folder(name)
+
+ with open(source, 'wb') as f:
+ f.write(content)
+
+ if os.path.exists(destination) and not os.path.isfile(destination):
+ shutil.rmtree(destination)
+
+ shutil.unpack_archive(source, destination)
+ pluginapi.plugins.enable(name)
+
+ logger.info('Installation of %s complete.' % name)
+
+ return True
+ except Exception as e:
+ logger.error('Failed to install plugin.', error = e, timestamp = False)
+
+ return False
+
+def pluginToBlock(plugin, import_block = True):
+ try:
+ plugin = sanitize(plugin)
+
+ directory = pluginapi.get_pluginapi().get_folder(plugin)
+ data_directory = pluginapi.get_pluginapi().get_data_folder(plugin)
+ zipfile = pluginapi.get_pluginapi().get_data_folder(plugin_name) + 'plugin.zip'
+
+ if os.path.exists(directory) and not os.path.isfile(directory):
+ if os.path.exists(data_directory) and not os.path.isfile(data_directory):
+ shutil.rmtree(data_directory)
+ if os.path.exists(zipfile) and os.path.isfile(zipfile):
+ os.remove(zipfile)
+ if os.path.exists(directory + '__pycache__') and not os.path.isfile(directory + '__pycache__'):
+ shutil.rmtree(directory + '__pycache__')
+
+ shutil.make_archive(zipfile[:-4], 'zip', directory)
+ data = base64.b64encode(open(zipfile, 'rb').read())
+
+ author = getpass.getuser()
+ description = 'Default plugin description'
+ info = {"name" : plugin}
+ try:
+ if os.path.exists(directory + 'info.json'):
+ info = json.loads(open(directory + 'info.json').read())
+ if 'author' in info:
+ author = info['author']
+ if 'description' in info:
+ description = info['description']
+ except:
+ pass
+
+ metadata = {'author' : author, 'date' : str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')), 'name' : plugin, 'info' : info, 'compiled-by' : plugin_name, 'content' : data.decode('utf-8'), 'description' : description}
+
+ hash = pluginapi.get_core().insertBlock(json.dumps(metadata), header = 'plugin', sign = True)
+
+ if import_block:
+ pluginapi.get_utils().importNewBlocks()
+
+ return hash
+ else:
+ logger.error('Plugin %s does not exist.' % plugin)
+ except Exception as e:
+ logger.error('Failed to convert plugin to block.', error = e, timestamp = False)
+
+ return False
+
+def installBlock(block):
+ try:
+ block = Block(block)
+ blockContent = json.loads(block.getContent())
+
+ name = sanitize(blockContent['name'])
+ author = blockContent['author']
+ date = blockContent['date']
+ version = None
+
+ if 'version' in blockContent['info']:
+ version = blockContent['info']['version']
+
+ install = False
+
+ logger.info(('Will install %s' + (' v' + version if not version is None else '') + ' (%s), by %s') % (name, date, author))
+
+ # TODO: Convert to single line if statement
+ if os.path.exists(pluginapi.plugins.get_folder(name)):
+ install = logger.confirm(message = 'Continue with installation (will overwrite existing plugin) %s?')
+ else:
+ install = logger.confirm(message = 'Continue with installation %s?')
+
+ if install:
+ blockToPlugin(block.getHash())
+ addPlugin(name)
+ else:
+ logger.info('Installation cancelled.')
+ return False
+
+ return True
+ except Exception as e:
+ logger.error('Failed to install plugin.', error = e, timestamp = False)
+ return False
+
+def uninstallPlugin(plugin):
+ try:
+ plugin = sanitize(plugin)
+
+ pluginFolder = pluginapi.plugins.get_folder(plugin)
+ exists = (os.path.exists(pluginFolder) and not os.path.isfile(pluginFolder))
+ installedByPluginManager = plugin in getPlugins()
+ remove = False
+
+ if not exists:
+ logger.warn('Plugin %s does not exist.' % plugin, timestamp = False)
+ return False
+
+ default = 'y'
+ if not installedByPluginManager:
+ logger.warn('The plugin %s was not installed by %s.' % (plugin, plugin_name), timestamp = False)
+ default = 'n'
+ remove = logger.confirm(message = 'All plugin data will be lost. Are you sure you want to proceed %s?', default = default)
+
+ if remove:
+ if installedByPluginManager:
+ removePlugin(plugin)
+ pluginapi.plugins.disable(plugin)
+ shutil.rmtree(pluginFolder)
+
+ logger.info('Uninstallation of %s complete.' % plugin)
+
+ return True
+ else:
+ logger.info('Uninstallation cancelled.')
+ except Exception as e:
+ logger.error('Failed to uninstall plugin.', error = e)
+ return False
+
+# command handlers
+
+def help():
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [public key/block hash]')
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [public key/block hash]')
+
+def commandInstallPlugin():
+ if len(sys.argv) >= 3:
+ check()
+
+ pluginname = sys.argv[2]
+ pkobh = None # public key or block hash
+
+ version = None
+ if ':' in pluginname:
+ details = pluginname
+ pluginname = sanitize(details[0])
+ version = details[1]
+
+ sanitize(pluginname)
+
+ if len(sys.argv) >= 4:
+ # public key or block hash specified
+ pkobh = sys.argv[3]
+ else:
+ # none specified, check if in config file
+ pkobh = getKey(pluginname)
+
+ if pkobh is None:
+ # still nothing found, try searching repositories
+ logger.info('Searching for public key in repositories...')
+ try:
+ repos = getRepositories()
+ distributors = list()
+ for repo, records in repos.items():
+ if pluginname in records:
+ logger.debug('Found %s in repository %s for plugin %s.' % (records[pluginname], repo, pluginname))
+ distributors.append(records[pluginname])
+
+ if len(distributors) != 0:
+ distributor = None
+
+ if len(distributors) == 1:
+ logger.info('Found distributor: %s' % distributors[0])
+ distributor = distributors[0]
+ else:
+ distributors_message = ''
+
+ index = 1
+ for dist in distributors:
+ distributors_message += ' ' + logger.colors.bold + str(index) + ') ' + logger.colors.reset + str(dist) + '\n'
+ index += 1
+
+ logger.info((logger.colors.bold + 'Found distributors (%s):' + logger.colors.reset + '\n' + distributors_message) % len(distributors))
+
+ valid = False
+ while not valid:
+ choice = logger.readline('Select the number of the key to use, from 1 to %s, or press Ctrl+C to cancel:' % (index - 1))
+
+ try:
+ if int(choice) < index and int(choice) >= 1:
+ distributor = distributors[int(choice)]
+ valid = True
+ except KeyboardInterrupt:
+ logger.info('Installation cancelled.')
+ return True
+ except:
+ pass
+
+ if not distributor is None:
+ pkobh = distributor
+ except Exception as e:
+ logger.warn('Failed to lookup plugin in repositories.', timestamp = False)
+ logger.error('asdf', error = e, timestamp = False)
+
+ if pkobh is None:
+ logger.error('No key for this plugin found in keystore or repositories, please specify.')
+ help()
+ return True
+
+ valid_hash = pluginapi.get_utils().validateHash(pkobh)
+ real_block = False
+ valid_key = pluginapi.get_utils().validatePubKey(pkobh)
+ real_key = False
+
+ if valid_hash:
+ real_block = Block.exists(pkobh)
+ elif valid_key:
+ real_key = pluginapi.get_utils().hasKey(pkobh)
+
+ blockhash = None
+
+ if valid_hash and not real_block:
+ logger.error('Block hash not found. Perhaps it has not been synced yet?')
+ logger.debug('Is valid hash, but does not belong to a known block.')
+
+ return True
+ elif valid_hash and real_block:
+ blockhash = str(pkobh)
+ logger.debug('Using block %s...' % blockhash)
+
+ installBlock(blockhash)
+ elif valid_key and not real_key:
+ logger.error('Public key not found. Try adding the node by address manually, if possible.')
+ logger.debug('Is valid key, but the key is not a known one.')
+ elif valid_key and real_key:
+ publickey = str(pkobh)
+ logger.debug('Using public key %s...' % publickey)
+
+ saveKey(pluginname, pkobh)
+
+ signedBlocks = Block.getBlocks(type = 'plugin', signed = True, signer = publickey)
+
+ mostRecentTimestamp = None
+ mostRecentVersionBlock = None
+
+ for block in signedBlocks:
+ try:
+ blockContent = json.loads(block.getContent())
+
+ if not (('author' in blockContent) and ('info' in blockContent) and ('date' in blockContent) and ('name' in blockContent)):
+ raise ValueError('Missing required parameter `date` in block %s.' % block.getHash())
+
+ blockDatetime = datetime.datetime.strptime(blockContent['date'], '%Y-%m-%d %H:%M:%S')
+
+ if blockContent['name'] == pluginname:
+ if ('version' in blockContent['info']) and (blockContent['info']['version'] == version) and (not version is None):
+ mostRecentTimestamp = blockDatetime
+ mostRecentVersionBlock = block.getHash()
+ break
+ elif mostRecentTimestamp is None:
+ mostRecentTimestamp = blockDatetime
+ mostRecentVersionBlock = block.getHash()
+ elif blockDatetime > mostRecentTimestamp:
+ mostRecentTimestamp = blockDatetime
+ mostRecentVersionBlock = block.getHash()
+ except Exception as e:
+ pass
+
+ logger.warn('Only continue the installation is you are absolutely certain that you trust the plugin distributor. Public key of plugin distributor: %s' % publickey, timestamp = False)
+ installBlock(mostRecentVersionBlock)
+ else:
+ logger.error('Unknown data "%s"; must be public key or block hash.' % str(pkobh))
+ return
+ else:
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [public key/block hash]')
+
+ return True
+
+def commandUninstallPlugin():
+ if len(sys.argv) >= 3:
+ uninstallPlugin(sys.argv[2])
+ else:
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' ')
+
+ return True
+
+def commandSearchPlugin():
+ logger.info('This feature has not been created yet. Please check back later.')
+ return True
+
+def commandAddRepository():
+ if len(sys.argv) >= 3:
+ check()
+
+ blockhash = sys.argv[2]
+
+ if pluginapi.get_utils().validateHash(blockhash):
+ if Block.exists(blockhash):
+ try:
+ blockContent = json.loads(Block(blockhash).getContent())
+
+ pluginslist = dict()
+
+ for pluginname, distributor in blockContent['plugins'].items():
+ if pluginapi.get_utils().validatePubKey(distributor):
+ pluginslist[pluginname] = distributor
+
+ logger.debug('Found %s records in repository.' % len(pluginslist))
+
+ if len(pluginslist) != 0:
+ addRepository(blockhash, pluginslist)
+ logger.info('Successfully added repository.')
+ else:
+ logger.error('Repository contains no records, not importing.')
+ except Exception as e:
+ logger.error('Failed to parse block.', error = e)
+ else:
+ logger.error('Block hash not found. Perhaps it has not been synced yet?')
+ logger.debug('Is valid hash, but does not belong to a known block.')
+ else:
+ logger.error('Unknown data "%s"; must be block hash.' % str(pkobh))
+ else:
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [block hash]')
+
+ return True
+
+def commandRemoveRepository():
+ if len(sys.argv) >= 3:
+ check()
+
+ blockhash = sys.argv[2]
+
+ if pluginapi.get_utils().validateHash(blockhash):
+ if blockhash in getRepositories():
+ try:
+ removeRepository(blockhash)
+ except Exception as e:
+ logger.error('Failed to parse block.', error = e)
+ else:
+ logger.error('Repository has not been imported, nothing to remove.')
+ else:
+ logger.error('Unknown data "%s"; must be block hash.' % str(pkobh))
+ else:
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' [block hash]')
+
+ return True
+
+def commandPublishPlugin():
+ if len(sys.argv) >= 3:
+ check()
+
+ pluginname = sanitize(sys.argv[2])
+ pluginfolder = pluginapi.plugins.get_folder(pluginname)
+
+ if os.path.exists(pluginfolder) and not os.path.isfile(pluginfolder):
+ block = pluginToBlock(pluginname)
+ logger.info('Plugin saved in block %s.' % block)
+ else:
+ logger.error('Plugin %s does not exist.' % pluginname, timestamp = False)
+ else:
+ logger.info(sys.argv[0] + ' ' + sys.argv[1] + ' ')
+
+# event listeners
+
+def on_init(api, data = None):
+ global pluginapi
+ pluginapi = api
+ check()
+
+ # register some commands
+ api.commands.register(['install-plugin', 'installplugin', 'plugin-install', 'install', 'plugininstall'], commandInstallPlugin)
+ api.commands.register(['remove-plugin', 'removeplugin', 'plugin-remove', 'uninstall-plugin', 'uninstallplugin', 'plugin-uninstall', 'uninstall', 'remove', 'pluginremove'], commandUninstallPlugin)
+ api.commands.register(['search', 'filter-plugins', 'search-plugins', 'searchplugins', 'search-plugin', 'searchplugin', 'findplugin', 'find-plugin', 'filterplugin', 'plugin-search', 'pluginsearch'], commandSearchPlugin)
+ api.commands.register(['add-repo', 'add-repository', 'addrepo', 'addrepository', 'repository-add', 'repo-add', 'repoadd', 'addrepository', 'add-plugin-repository', 'add-plugin-repo', 'add-pluginrepo', 'add-pluginrepository', 'addpluginrepo', 'addpluginrepository'], commandAddRepository)
+ api.commands.register(['remove-repo', 'remove-repository', 'removerepo', 'removerepository', 'repository-remove', 'repo-remove', 'reporemove', 'removerepository', 'remove-plugin-repository', 'remove-plugin-repo', 'remove-pluginrepo', 'remove-pluginrepository', 'removepluginrepo', 'removepluginrepository', 'rm-repo', 'rm-repository', 'rmrepo', 'rmrepository', 'repository-rm', 'repo-rm', 'reporm', 'rmrepository', 'rm-plugin-repository', 'rm-plugin-repo', 'rm-pluginrepo', 'rm-pluginrepository', 'rmpluginrepo', 'rmpluginrepository'], commandRemoveRepository)
+ api.commands.register(['publish-plugin', 'plugin-publish', 'publishplugin', 'pluginpublish', 'publish'], commandPublishPlugin)
+
+ # add help menus once the features are actually implemented
+
+ return
diff --git a/onionr/static-data/default_config.json b/onionr/static-data/default_config.json
index 020107ce..ccafd7a4 100644
--- a/onionr/static-data/default_config.json
+++ b/onionr/static-data/default_config.json
@@ -12,5 +12,9 @@
"output": true,
"color": true
}
+ },
+ "allocations":{
+ "disk": 1000000000,
+ "netTotal": 1000000000
}
}
diff --git a/onionr/static-data/default_plugin.py b/onionr/static-data/default_plugin.py
index edd8247f..cc6d1d20 100644
--- a/onionr/static-data/default_plugin.py
+++ b/onionr/static-data/default_plugin.py
@@ -1,11 +1,13 @@
'''
- Default plugin template file
+ $name plugin template file.
Generated on $date by $user.
'''
# Imports some useful libraries
import logger, config
+plugin_name = '$name'
+
def on_init(api, data = None):
'''
This event is called after Onionr is initialized, but before the command
diff --git a/onionr/tests.py b/onionr/tests.py
index 7eea62d3..db62dcb3 100755
--- a/onionr/tests.py
+++ b/onionr/tests.py
@@ -14,7 +14,7 @@
You should have received a copy of the GNU General Public License
along with this program. If not, see .
'''
-import unittest, sys, os, base64, tarfile, shutil, simplecrypt, logger, btc
+import unittest, sys, os, base64, tarfile, shutil, simplecrypt, logger #, btc
class OnionrTests(unittest.TestCase):
def testPython3(self):
@@ -56,7 +56,7 @@ class OnionrTests(unittest.TestCase):
myCore = core.Core()
if not os.path.exists('data/peers.db'):
myCore.createPeerDB()
- if myCore.addPeer('6M5MXL237OK57ITHVYN5WGHANPGOMKS5C3PJLHBBNKFFJQOIDOJA====') and not myCore.addPeer('NFXHMYLMNFSAU==='):
+ if myCore.addPeer('6M5MXL237OK57ITHVYN5WGHANPGOMKS5C3PJLHBBNKFFJQOIDOJA====', '1cSix9Ao/yQSdo0sNif8cm2uTcYnSphb4JdZL/3WkN4=') and not myCore.addPeer('NFXHMYLMNFSAU===', '1cSix9Ao/yQSdo0sNif8cm2uTcYnSphb4JdZL/3WkN4='):
self.assertTrue(True)
else:
self.assertTrue(False)