2018-01-02 08:43:29 +00:00
|
|
|
'''
|
2018-08-27 03:44:32 +00:00
|
|
|
Onionr - P2P Anonymous Storage Network
|
2018-01-14 08:48:23 +00:00
|
|
|
|
2018-02-01 22:45:15 +00:00
|
|
|
Core Onionr library, useful for external programs. Handles peer & data processing
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
|
|
|
'''
|
2018-01-02 08:43:29 +00:00
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
2018-10-30 22:22:06 +00:00
|
|
|
import sqlite3, os, sys, time, math, base64, tarfile, nacl, logger, json, netcontroller, math, config
|
2018-05-19 22:11:51 +00:00
|
|
|
from onionrblockapi import Block
|
2018-01-07 08:55:44 +00:00
|
|
|
|
2018-06-19 07:01:59 +00:00
|
|
|
import onionrutils, onionrcrypto, onionrproofs, onionrevents as events, onionrexceptions, onionrvalues
|
2018-10-06 18:06:46 +00:00
|
|
|
import onionrblacklist, onionrchat, onionrusers
|
2018-08-16 05:01:40 +00:00
|
|
|
import dbcreator
|
2018-01-22 02:49:11 +00:00
|
|
|
if sys.version_info < (3, 6):
|
|
|
|
try:
|
|
|
|
import sha3
|
|
|
|
except ModuleNotFoundError:
|
2018-01-26 07:22:48 +00:00
|
|
|
logger.fatal('On Python 3 versions prior to 3.6.x, you need the sha3 module')
|
2018-01-22 02:49:11 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
class Core:
|
2018-06-19 07:01:59 +00:00
|
|
|
def __init__(self, torPort=0):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Initialize Core Onionr library
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-09-26 04:58:11 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.dataDir = os.environ['ONIONR_HOME']
|
|
|
|
if not self.dataDir.endswith('/'):
|
|
|
|
self.dataDir += '/'
|
|
|
|
except KeyError:
|
|
|
|
self.dataDir = 'data/'
|
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
try:
|
2018-09-26 04:58:11 +00:00
|
|
|
self.queueDB = self.dataDir + 'queue.db'
|
|
|
|
self.peerDB = self.dataDir + 'peers.db'
|
|
|
|
self.blockDB = self.dataDir + 'blocks.db'
|
|
|
|
self.blockDataLocation = self.dataDir + 'blocks/'
|
|
|
|
self.addressDB = self.dataDir + 'address.db'
|
2018-08-07 07:31:53 +00:00
|
|
|
self.hsAddress = ''
|
2018-04-22 23:35:00 +00:00
|
|
|
self.bootstrapFileLocation = 'static-data/bootstrap-nodes.txt'
|
|
|
|
self.bootstrapList = []
|
2018-06-19 07:01:59 +00:00
|
|
|
self.requirements = onionrvalues.OnionrValues()
|
2018-07-23 07:43:10 +00:00
|
|
|
self.torPort = torPort
|
2018-09-26 04:58:11 +00:00
|
|
|
self.dataNonceFile = self.dataDir + 'block-nonces.dat'
|
2018-08-16 05:01:40 +00:00
|
|
|
self.dbCreate = dbcreator.DBCreator(self)
|
2018-09-26 04:58:11 +00:00
|
|
|
self.forwardKeysFile = self.dataDir + 'forward-keys.db'
|
2018-07-23 07:43:10 +00:00
|
|
|
|
2018-09-24 01:47:27 +00:00
|
|
|
# Socket data, defined here because of multithreading constraints with gevent
|
2018-09-23 04:53:09 +00:00
|
|
|
self.killSockets = False
|
|
|
|
self.startSocket = {}
|
2018-09-24 01:47:27 +00:00
|
|
|
self.socketServerConnData = {}
|
|
|
|
self.socketReasons = {}
|
|
|
|
self.socketServerResponseData = {}
|
2018-09-23 04:53:09 +00:00
|
|
|
|
2018-09-26 04:58:11 +00:00
|
|
|
self.usageFile = self.dataDir + 'disk-usage.txt'
|
2018-08-23 04:59:41 +00:00
|
|
|
self.config = config
|
2018-04-22 23:35:00 +00:00
|
|
|
|
2018-08-24 22:42:09 +00:00
|
|
|
self.maxBlockSize = 10000000 # max block size in bytes
|
2018-04-22 23:35:00 +00:00
|
|
|
|
2018-09-26 04:58:11 +00:00
|
|
|
if not os.path.exists(self.dataDir):
|
|
|
|
os.mkdir(self.dataDir)
|
|
|
|
if not os.path.exists(self.dataDir + 'blocks/'):
|
|
|
|
os.mkdir(self.dataDir + 'blocks/')
|
2018-04-19 02:16:10 +00:00
|
|
|
if not os.path.exists(self.blockDB):
|
|
|
|
self.createBlockDB()
|
2018-10-07 05:06:44 +00:00
|
|
|
if not os.path.exists(self.forwardKeysFile):
|
|
|
|
self.dbCreate.createForwardKeyDB()
|
2018-04-19 02:16:10 +00:00
|
|
|
|
2018-09-26 04:58:11 +00:00
|
|
|
if os.path.exists(self.dataDir + '/hs/hostname'):
|
|
|
|
with open(self.dataDir + '/hs/hostname', 'r') as hs:
|
2018-08-07 07:31:53 +00:00
|
|
|
self.hsAddress = hs.read().strip()
|
2018-04-22 23:35:00 +00:00
|
|
|
|
|
|
|
# Load bootstrap address list
|
|
|
|
if os.path.exists(self.bootstrapFileLocation):
|
|
|
|
with open(self.bootstrapFileLocation, 'r') as bootstrap:
|
|
|
|
bootstrap = bootstrap.read()
|
|
|
|
for i in bootstrap.split('\n'):
|
|
|
|
self.bootstrapList.append(i)
|
2018-04-22 23:38:32 +00:00
|
|
|
else:
|
|
|
|
logger.warn('Warning: address bootstrap file not found ' + self.bootstrapFileLocation)
|
2018-04-19 02:16:10 +00:00
|
|
|
|
|
|
|
self._utils = onionrutils.OnionrUtils(self)
|
|
|
|
# Initialize the crypto object
|
|
|
|
self._crypto = onionrcrypto.OnionrCrypto(self)
|
2018-08-10 22:13:58 +00:00
|
|
|
self._blacklist = onionrblacklist.OnionrBlackList(self)
|
2018-04-22 23:35:00 +00:00
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
except Exception as error:
|
|
|
|
logger.error('Failed to initialize core Onionr library.', error=error)
|
|
|
|
logger.fatal('Cannot recover from error.')
|
2018-05-18 21:49:05 +00:00
|
|
|
sys.exit(1)
|
2018-01-06 08:51:26 +00:00
|
|
|
return
|
2018-01-10 03:50:38 +00:00
|
|
|
|
2018-08-08 19:26:02 +00:00
|
|
|
def refreshFirstStartVars(self):
|
|
|
|
'''Hack to refresh some vars which may not be set on first start'''
|
2018-09-26 04:58:11 +00:00
|
|
|
if os.path.exists(self.dataDir + '/hs/hostname'):
|
|
|
|
with open(self.dataDir + '/hs/hostname', 'r') as hs:
|
2018-08-08 19:26:02 +00:00
|
|
|
self.hsAddress = hs.read().strip()
|
|
|
|
|
2018-05-07 06:55:03 +00:00
|
|
|
def addPeer(self, peerID, powID, name=''):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-03-16 15:35:37 +00:00
|
|
|
Adds a public key to the key database (misleading function name)
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-01-10 03:50:38 +00:00
|
|
|
# This function simply adds a peer to the DB
|
2018-02-21 09:32:31 +00:00
|
|
|
if not self._utils.validatePubKey(peerID):
|
2018-01-26 09:46:21 +00:00
|
|
|
return False
|
2018-05-19 04:06:22 +00:00
|
|
|
if sys.getsizeof(powID) > 120:
|
|
|
|
logger.warn("POW token for pubkey base64 representation exceeded 120 bytes, is " + str(sys.getsizeof(powID)))
|
2018-05-18 21:49:05 +00:00
|
|
|
return False
|
2018-05-11 03:19:48 +00:00
|
|
|
|
2018-09-09 05:12:41 +00:00
|
|
|
events.event('pubkey_add', data = {'key': peerID}, onionr = None)
|
|
|
|
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB, timeout=10)
|
2018-04-26 07:40:39 +00:00
|
|
|
hashID = self._crypto.pubKeyHashID(peerID)
|
2018-01-10 08:40:25 +00:00
|
|
|
c = conn.cursor()
|
2018-08-27 03:44:32 +00:00
|
|
|
t = (peerID, name, 'unknown', hashID, powID, 0)
|
2018-04-04 01:54:49 +00:00
|
|
|
|
|
|
|
for i in c.execute("SELECT * FROM PEERS where id = '" + peerID + "';"):
|
|
|
|
try:
|
|
|
|
if i[0] == peerID:
|
|
|
|
conn.close()
|
|
|
|
return False
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
except IndexError:
|
|
|
|
pass
|
2018-08-27 03:44:32 +00:00
|
|
|
c.execute('INSERT INTO peers (id, name, dateSeen, pow, hashID, trust) VALUES(?, ?, ?, ?, ?, ?);', t)
|
2018-01-10 08:40:25 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
2018-01-10 08:40:25 +00:00
|
|
|
return True
|
2018-02-27 21:23:49 +00:00
|
|
|
|
|
|
|
def addAddress(self, address):
|
2018-04-21 03:10:50 +00:00
|
|
|
'''
|
|
|
|
Add an address to the address database (only tor currently)
|
|
|
|
'''
|
2018-09-26 04:58:11 +00:00
|
|
|
if address == config.get('i2p.ownAddr', None) or address == self.hsAddress:
|
2018-06-05 05:26:11 +00:00
|
|
|
|
2018-05-19 21:32:21 +00:00
|
|
|
return False
|
2018-02-27 21:23:49 +00:00
|
|
|
if self._utils.validateID(address):
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.addressDB, timeout=10)
|
2018-02-27 21:23:49 +00:00
|
|
|
c = conn.cursor()
|
2018-04-18 03:43:33 +00:00
|
|
|
# check if address is in database
|
|
|
|
# this is safe to do because the address is validated above, but we strip some chars here too just in case
|
|
|
|
address = address.replace('\'', '').replace(';', '').replace('"', '').replace('\\', '')
|
|
|
|
for i in c.execute("SELECT * FROM adders where address = '" + address + "';"):
|
|
|
|
try:
|
|
|
|
if i[0] == address:
|
|
|
|
conn.close()
|
|
|
|
return False
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
t = (address, 1)
|
|
|
|
c.execute('INSERT INTO adders (address, type) VALUES(?, ?);', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
|
|
|
events.event('address_add', data = {'address': address}, onionr = None)
|
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
return True
|
|
|
|
else:
|
2018-06-05 05:26:11 +00:00
|
|
|
logger.debug('Invalid ID')
|
2018-02-27 21:23:49 +00:00
|
|
|
return False
|
2018-02-28 00:00:37 +00:00
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
def removeAddress(self, address):
|
2018-04-21 03:10:50 +00:00
|
|
|
'''
|
|
|
|
Remove an address from the address database
|
|
|
|
'''
|
2018-02-27 21:23:49 +00:00
|
|
|
if self._utils.validateID(address):
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.addressDB, timeout=10)
|
2018-02-27 21:23:49 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
t = (address,)
|
|
|
|
c.execute('Delete from adders where address=?;', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
|
|
|
events.event('address_remove', data = {'address': address}, onionr = None)
|
2018-02-27 21:23:49 +00:00
|
|
|
return True
|
|
|
|
else:
|
2018-04-19 02:16:10 +00:00
|
|
|
return False
|
2018-05-02 06:01:20 +00:00
|
|
|
|
2018-04-23 06:03:10 +00:00
|
|
|
def removeBlock(self, block):
|
|
|
|
'''
|
2018-08-11 05:23:59 +00:00
|
|
|
remove a block from this node (does not automatically blacklist)
|
2018-08-23 18:24:32 +00:00
|
|
|
|
|
|
|
**You may want blacklist.addToDB(blockHash)
|
2018-04-23 06:03:10 +00:00
|
|
|
'''
|
|
|
|
if self._utils.validateHash(block):
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-04-23 06:03:10 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
t = (block,)
|
|
|
|
c.execute('Delete from hashes where hash=?;', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-09-26 04:58:11 +00:00
|
|
|
blockFile = self.dataDir + '/blocks/' + block + '.dat'
|
2018-08-23 17:48:49 +00:00
|
|
|
dataSize = 0
|
2018-04-23 06:03:10 +00:00
|
|
|
try:
|
2018-08-23 17:48:49 +00:00
|
|
|
''' Get size of data when loaded as an object/var, rather than on disk,
|
|
|
|
to avoid conflict with getsizeof when saving blocks
|
|
|
|
'''
|
|
|
|
with open(blockFile, 'r') as data:
|
|
|
|
dataSize = sys.getsizeof(data.read())
|
|
|
|
self._utils.storageCounter.removeBytes(dataSize)
|
|
|
|
os.remove(blockFile)
|
2018-04-23 06:03:10 +00:00
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2018-02-27 21:23:49 +00:00
|
|
|
|
2018-02-21 09:32:31 +00:00
|
|
|
def createAddressDB(self):
|
|
|
|
'''
|
|
|
|
Generate the address database
|
|
|
|
'''
|
2018-08-16 05:01:40 +00:00
|
|
|
self.dbCreate.createAddressDB()
|
2018-01-10 03:50:38 +00:00
|
|
|
|
|
|
|
def createPeerDB(self):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Generate the peer sqlite3 database and populate it with the peers table.
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-08-16 05:01:40 +00:00
|
|
|
self.dbCreate.createPeerDB()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-21 05:49:16 +00:00
|
|
|
def createBlockDB(self):
|
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Create a database for blocks
|
2018-01-21 05:49:16 +00:00
|
|
|
'''
|
2018-08-16 05:01:40 +00:00
|
|
|
self.dbCreate.createBlockDB()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-05-10 07:42:24 +00:00
|
|
|
def addToBlockDB(self, newHash, selfInsert=False, dataSaved=False):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Add a hash value to the block db
|
|
|
|
|
|
|
|
Should be in hex format!
|
|
|
|
'''
|
2018-01-21 05:49:16 +00:00
|
|
|
if not os.path.exists(self.blockDB):
|
|
|
|
raise Exception('Block db does not exist')
|
2018-01-29 03:45:43 +00:00
|
|
|
if self._utils.hasBlock(newHash):
|
|
|
|
return
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-01-21 05:49:16 +00:00
|
|
|
c = conn.cursor()
|
2018-05-18 06:22:16 +00:00
|
|
|
currentTime = self._utils.getEpoch()
|
2018-05-10 07:42:24 +00:00
|
|
|
if selfInsert or dataSaved:
|
2018-01-28 02:18:38 +00:00
|
|
|
selfInsert = 1
|
|
|
|
else:
|
|
|
|
selfInsert = 0
|
2018-04-26 19:56:17 +00:00
|
|
|
data = (newHash, currentTime, '', selfInsert)
|
|
|
|
c.execute('INSERT INTO hashes (hash, dateReceived, dataType, dataSaved) VALUES(?, ?, ?, ?);', data)
|
2018-01-21 05:49:16 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-01-17 23:37:53 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
def getData(self, hash):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Simply return the data associated to a hash
|
|
|
|
'''
|
2018-01-27 21:49:48 +00:00
|
|
|
try:
|
2018-05-13 03:45:32 +00:00
|
|
|
# logger.debug('Opening %s' % (str(self.blockDataLocation) + str(hash) + '.dat'))
|
2018-04-23 01:43:17 +00:00
|
|
|
dataFile = open(self.blockDataLocation + hash + '.dat', 'rb')
|
2018-01-27 21:49:48 +00:00
|
|
|
data = dataFile.read()
|
|
|
|
dataFile.close()
|
|
|
|
except FileNotFoundError:
|
|
|
|
data = False
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-21 09:12:41 +00:00
|
|
|
return data
|
|
|
|
|
2018-01-22 02:49:11 +00:00
|
|
|
def setData(self, data):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Set the data assciated with a hash
|
|
|
|
'''
|
2018-04-23 02:16:11 +00:00
|
|
|
data = data
|
2018-08-23 04:59:41 +00:00
|
|
|
dataSize = sys.getsizeof(data)
|
|
|
|
|
2018-04-23 02:24:34 +00:00
|
|
|
if not type(data) is bytes:
|
|
|
|
data = data.encode()
|
2018-08-13 03:48:33 +00:00
|
|
|
|
2018-10-30 22:22:06 +00:00
|
|
|
dataHash = self._crypto.sha3Hash(data)
|
2018-08-13 03:48:33 +00:00
|
|
|
|
2018-01-29 02:14:46 +00:00
|
|
|
if type(dataHash) is bytes:
|
|
|
|
dataHash = dataHash.decode()
|
2018-01-25 22:39:09 +00:00
|
|
|
blockFileName = self.blockDataLocation + dataHash + '.dat'
|
|
|
|
if os.path.exists(blockFileName):
|
2018-02-04 03:44:29 +00:00
|
|
|
pass # TODO: properly check if block is already saved elsewhere
|
2018-01-29 02:39:34 +00:00
|
|
|
#raise Exception("Data is already set for " + dataHash)
|
2018-01-25 22:39:09 +00:00
|
|
|
else:
|
2018-08-23 04:59:41 +00:00
|
|
|
if self._utils.storageCounter.addBytes(dataSize) != False:
|
|
|
|
blockFile = open(blockFileName, 'wb')
|
|
|
|
blockFile.write(data)
|
|
|
|
blockFile.close()
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-08-23 04:59:41 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
c.execute("UPDATE hashes SET dataSaved=1 WHERE hash = '" + dataHash + "';")
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-08-23 17:48:49 +00:00
|
|
|
with open(self.dataNonceFile, 'a') as nonceFile:
|
|
|
|
nonceFile.write(dataHash + '\n')
|
2018-08-23 04:59:41 +00:00
|
|
|
else:
|
|
|
|
raise onionrexceptions.DiskAllocationReached
|
2018-01-29 02:02:16 +00:00
|
|
|
|
2018-01-25 22:39:09 +00:00
|
|
|
return dataHash
|
2018-01-22 02:49:11 +00:00
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
def daemonQueue(self):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Gives commands to the communication proccess/daemon by reading an sqlite3 database
|
|
|
|
|
|
|
|
This function intended to be used by the client. Queue to exchange data between "client" and server.
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-02 08:43:29 +00:00
|
|
|
retData = False
|
|
|
|
if not os.path.exists(self.queueDB):
|
2018-10-30 22:22:06 +00:00
|
|
|
self.dbCreate.createDaemonDB()
|
2018-01-02 08:43:29 +00:00
|
|
|
else:
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB, timeout=10)
|
2018-01-04 07:12:46 +00:00
|
|
|
c = conn.cursor()
|
2018-06-26 05:04:59 +00:00
|
|
|
try:
|
|
|
|
for row in c.execute('SELECT command, data, date, min(ID) FROM commands group by id'):
|
|
|
|
retData = row
|
|
|
|
break
|
|
|
|
except sqlite3.OperationalError:
|
2018-10-30 22:22:06 +00:00
|
|
|
self.dbCreate.createDaemonDB()
|
2018-06-26 05:04:59 +00:00
|
|
|
else:
|
|
|
|
if retData != False:
|
|
|
|
c.execute('DELETE FROM commands WHERE id=?;', (retData[3],))
|
2018-07-01 03:38:57 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-01-02 08:43:29 +00:00
|
|
|
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_pop', data = {'data': retData}, onionr = None)
|
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
return retData
|
2018-07-04 19:07:17 +00:00
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
def daemonQueueAdd(self, command, data=''):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Add a command to the daemon queue, used by the communication daemon (communicator.py)
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-09-02 02:59:03 +00:00
|
|
|
retData = True
|
2018-01-04 07:12:46 +00:00
|
|
|
# Intended to be used by the web server
|
2018-05-18 06:22:16 +00:00
|
|
|
date = self._utils.getEpoch()
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB, timeout=10)
|
2018-01-02 08:43:29 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
t = (command, data, date)
|
2018-09-02 02:59:03 +00:00
|
|
|
try:
|
|
|
|
c.execute('INSERT INTO commands (command, data, date) VALUES(?, ?, ?)', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
except sqlite3.OperationalError:
|
|
|
|
retData = False
|
2018-09-02 04:55:24 +00:00
|
|
|
self.daemonQueue()
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_push', data = {'command': command, 'data': data}, onionr = None)
|
|
|
|
|
2018-09-02 02:59:03 +00:00
|
|
|
return retData
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-27 01:16:15 +00:00
|
|
|
def clearDaemonQueue(self):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Clear the daemon queue (somewhat dangerous)
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB, timeout=10)
|
2018-01-27 01:16:15 +00:00
|
|
|
c = conn.cursor()
|
2018-01-27 03:42:20 +00:00
|
|
|
try:
|
2018-04-21 03:10:50 +00:00
|
|
|
c.execute('DELETE FROM commands;')
|
2018-01-27 03:42:20 +00:00
|
|
|
conn.commit()
|
|
|
|
except:
|
|
|
|
pass
|
2018-01-27 01:16:15 +00:00
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_clear', onionr = None)
|
2018-01-17 23:37:53 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
2018-04-19 02:16:10 +00:00
|
|
|
|
2018-02-22 09:33:30 +00:00
|
|
|
def listAdders(self, randomOrder=True, i2p=True):
|
|
|
|
'''
|
|
|
|
Return a list of addresses
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.addressDB, timeout=10)
|
2018-02-22 09:33:30 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
if randomOrder:
|
|
|
|
addresses = c.execute('SELECT * FROM adders ORDER BY RANDOM();')
|
|
|
|
else:
|
|
|
|
addresses = c.execute('SELECT * FROM adders;')
|
|
|
|
addressList = []
|
|
|
|
for i in addresses:
|
2018-02-28 00:00:37 +00:00
|
|
|
addressList.append(i[0])
|
2018-02-22 09:33:30 +00:00
|
|
|
conn.close()
|
|
|
|
return addressList
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-08-27 03:44:32 +00:00
|
|
|
def listPeers(self, randomOrder=True, getPow=False, trust=0):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-03-16 15:35:37 +00:00
|
|
|
Return a list of public keys (misleading function name)
|
2018-01-28 01:53:24 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
randomOrder determines if the list should be in a random order
|
2018-08-27 03:44:32 +00:00
|
|
|
trust sets the minimum trust to list
|
2018-01-26 06:28:11 +00:00
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB, timeout=10)
|
2018-01-26 06:28:11 +00:00
|
|
|
c = conn.cursor()
|
2018-04-03 19:39:58 +00:00
|
|
|
payload = ""
|
2018-08-27 03:44:32 +00:00
|
|
|
if trust not in (0, 1, 2):
|
|
|
|
logger.error('Tried to select invalid trust.')
|
|
|
|
return
|
2018-01-28 01:53:24 +00:00
|
|
|
if randomOrder:
|
2018-08-27 03:44:32 +00:00
|
|
|
payload = 'SELECT * FROM peers where trust >= %s ORDER BY RANDOM();' % (trust,)
|
2018-01-28 01:53:24 +00:00
|
|
|
else:
|
2018-08-27 03:44:32 +00:00
|
|
|
payload = 'SELECT * FROM peers where trust >= %s;' % (trust,)
|
2018-01-26 06:28:11 +00:00
|
|
|
peerList = []
|
2018-04-03 19:39:58 +00:00
|
|
|
for i in c.execute(payload):
|
2018-04-04 00:45:13 +00:00
|
|
|
try:
|
2018-04-04 01:54:49 +00:00
|
|
|
if len(i[0]) != 0:
|
2018-05-07 06:55:03 +00:00
|
|
|
if getPow:
|
2018-05-15 05:16:00 +00:00
|
|
|
peerList.append(i[0] + '-' + i[1])
|
2018-05-07 06:55:03 +00:00
|
|
|
else:
|
|
|
|
peerList.append(i[0])
|
2018-04-04 00:45:13 +00:00
|
|
|
except TypeError:
|
|
|
|
pass
|
2018-05-07 06:55:03 +00:00
|
|
|
if getPow:
|
2018-05-07 07:46:07 +00:00
|
|
|
try:
|
|
|
|
peerList.append(self._crypto.pubKey + '-' + self._crypto.pubKeyPowToken)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2018-05-07 06:55:03 +00:00
|
|
|
else:
|
|
|
|
peerList.append(self._crypto.pubKey)
|
2018-01-26 06:28:11 +00:00
|
|
|
conn.close()
|
|
|
|
return peerList
|
|
|
|
|
|
|
|
def getPeerInfo(self, peer, info):
|
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Get info about a peer from their database entry
|
2018-01-26 06:28:11 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
id text 0
|
|
|
|
name text, 1
|
2018-04-04 01:54:49 +00:00
|
|
|
adders text, 2
|
2018-09-11 19:45:06 +00:00
|
|
|
dateSeen not null, 3
|
|
|
|
bytesStored int, 4
|
|
|
|
trust int 5
|
|
|
|
pubkeyExchanged int 6
|
|
|
|
hashID text 7
|
|
|
|
pow text 8
|
2018-01-26 06:28:11 +00:00
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB, timeout=10)
|
2018-01-26 06:28:11 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
command = (peer,)
|
2018-09-11 19:45:06 +00:00
|
|
|
infoNumbers = {'id': 0, 'name': 1, 'adders': 2, 'dateSeen': 3, 'bytesStored': 4, 'trust': 5, 'pubkeyExchanged': 6, 'hashID': 7}
|
2018-01-26 06:28:11 +00:00
|
|
|
info = infoNumbers[info]
|
|
|
|
iterCount = 0
|
|
|
|
retVal = ''
|
|
|
|
for row in c.execute('SELECT * from peers where id=?;', command):
|
|
|
|
for i in row:
|
|
|
|
if iterCount == info:
|
|
|
|
retVal = i
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
iterCount += 1
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-26 06:28:11 +00:00
|
|
|
return retVal
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-28 01:53:24 +00:00
|
|
|
def setPeerInfo(self, peer, key, data):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Update a peer for a key
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB, timeout=10)
|
2018-01-28 01:53:24 +00:00
|
|
|
c = conn.cursor()
|
2018-01-28 01:56:59 +00:00
|
|
|
command = (data, peer)
|
2018-01-28 01:53:24 +00:00
|
|
|
# TODO: validate key on whitelist
|
2018-02-16 04:31:30 +00:00
|
|
|
if key not in ('id', 'name', 'pubkey', 'blockDBHash', 'forwardKey', 'dateSeen', 'bytesStored', 'trust'):
|
2018-01-29 07:05:02 +00:00
|
|
|
raise Exception("Got invalid database key when setting peer info")
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('UPDATE peers SET ' + key + ' = ? WHERE id=?', command)
|
2018-01-28 02:05:55 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-28 00:00:37 +00:00
|
|
|
return
|
2018-01-26 06:28:11 +00:00
|
|
|
|
2018-02-28 00:00:37 +00:00
|
|
|
def getAddressInfo(self, address, info):
|
|
|
|
'''
|
|
|
|
Get info about an address from its database entry
|
|
|
|
|
|
|
|
address text, 0
|
|
|
|
type int, 1
|
|
|
|
knownPeer text, 2
|
|
|
|
speed int, 3
|
|
|
|
success int, 4
|
|
|
|
DBHash text, 5
|
|
|
|
failure int 6
|
2018-05-18 06:22:16 +00:00
|
|
|
lastConnect 7
|
2018-02-28 00:00:37 +00:00
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.addressDB, timeout=10)
|
2018-02-28 00:00:37 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
command = (address,)
|
2018-05-18 06:22:16 +00:00
|
|
|
infoNumbers = {'address': 0, 'type': 1, 'knownPeer': 2, 'speed': 3, 'success': 4, 'DBHash': 5, 'failure': 6, 'lastConnect': 7}
|
2018-02-28 00:00:37 +00:00
|
|
|
info = infoNumbers[info]
|
|
|
|
iterCount = 0
|
|
|
|
retVal = ''
|
|
|
|
for row in c.execute('SELECT * from adders where address=?;', command):
|
|
|
|
for i in row:
|
|
|
|
if iterCount == info:
|
|
|
|
retVal = i
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
iterCount += 1
|
|
|
|
conn.close()
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def setAddressInfo(self, address, key, data):
|
|
|
|
'''
|
|
|
|
Update an address for a key
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.addressDB, timeout=10)
|
2018-02-28 00:00:37 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
command = (data, address)
|
|
|
|
# TODO: validate key on whitelist
|
2018-08-02 07:28:26 +00:00
|
|
|
if key not in ('address', 'type', 'knownPeer', 'speed', 'success', 'DBHash', 'failure', 'lastConnect', 'lastConnectAttempt'):
|
2018-02-28 00:00:37 +00:00
|
|
|
raise Exception("Got invalid database key when setting address info")
|
2018-07-27 03:07:50 +00:00
|
|
|
else:
|
|
|
|
c.execute('UPDATE adders SET ' + key + ' = ? WHERE address=?', command)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
|
|
|
|
2018-07-11 19:45:38 +00:00
|
|
|
def getBlockList(self, unsaved = False): # TODO: Use unsaved??
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Get list of our blocks
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-01-26 06:28:11 +00:00
|
|
|
c = conn.cursor()
|
2018-01-27 01:16:15 +00:00
|
|
|
if unsaved:
|
2018-04-25 22:42:42 +00:00
|
|
|
execute = 'SELECT hash FROM hashes WHERE dataSaved != 1 ORDER BY RANDOM();'
|
2018-01-27 01:16:15 +00:00
|
|
|
else:
|
2018-08-23 17:48:49 +00:00
|
|
|
execute = 'SELECT hash FROM hashes ORDER BY dateReceived ASC;'
|
2018-05-13 03:45:32 +00:00
|
|
|
rows = list()
|
2018-01-27 01:16:15 +00:00
|
|
|
for row in c.execute(execute):
|
2018-01-26 06:28:11 +00:00
|
|
|
for i in row:
|
2018-05-13 03:45:32 +00:00
|
|
|
rows.append(i)
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
return rows
|
2018-02-02 05:39:55 +00:00
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
def getBlockDate(self, blockHash):
|
|
|
|
'''
|
|
|
|
Returns the date a block was received
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-05-16 01:47:58 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
execute = 'SELECT dateReceived FROM hashes WHERE hash=?;'
|
|
|
|
args = (blockHash,)
|
|
|
|
for row in c.execute(execute, args):
|
|
|
|
for i in row:
|
|
|
|
return int(i)
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2018-07-18 07:33:23 +00:00
|
|
|
def getBlocksByType(self, blockType, orderDate=True):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Returns a list of blocks by the type
|
|
|
|
'''
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-02-02 05:39:55 +00:00
|
|
|
c = conn.cursor()
|
2018-07-18 07:33:23 +00:00
|
|
|
if orderDate:
|
|
|
|
execute = 'SELECT hash FROM hashes WHERE dataType=? ORDER BY dateReceived;'
|
|
|
|
else:
|
|
|
|
execute = 'SELECT hash FROM hashes WHERE dataType=?;'
|
2018-02-02 05:39:55 +00:00
|
|
|
args = (blockType,)
|
2018-05-13 03:45:32 +00:00
|
|
|
rows = list()
|
2018-02-02 05:39:55 +00:00
|
|
|
for row in c.execute(execute, args):
|
|
|
|
for i in row:
|
2018-05-13 03:45:32 +00:00
|
|
|
rows.append(i)
|
|
|
|
return rows
|
2018-02-02 05:39:55 +00:00
|
|
|
|
2018-09-30 04:42:31 +00:00
|
|
|
def getExpiredBlocks(self):
|
|
|
|
'''Returns a list of expired blocks'''
|
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
|
|
|
c = conn.cursor()
|
|
|
|
date = int(self._utils.getEpoch())
|
|
|
|
|
2018-09-30 16:53:39 +00:00
|
|
|
execute = 'SELECT hash FROM hashes WHERE expire <= %s ORDER BY dateReceived;' % (date,)
|
2018-09-30 04:42:31 +00:00
|
|
|
|
|
|
|
rows = list()
|
|
|
|
for row in c.execute(execute):
|
|
|
|
for i in row:
|
|
|
|
rows.append(i)
|
|
|
|
return rows
|
|
|
|
|
2018-02-02 05:39:55 +00:00
|
|
|
def setBlockType(self, hash, blockType):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Sets the type of block
|
|
|
|
'''
|
|
|
|
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-02-02 05:39:55 +00:00
|
|
|
c = conn.cursor()
|
2018-02-02 09:15:28 +00:00
|
|
|
c.execute("UPDATE hashes SET dataType='" + blockType + "' WHERE hash = '" + hash + "';")
|
2018-02-02 05:39:55 +00:00
|
|
|
conn.commit()
|
2018-02-04 03:44:29 +00:00
|
|
|
conn.close()
|
|
|
|
return
|
2018-05-02 06:01:20 +00:00
|
|
|
|
2018-04-26 07:40:39 +00:00
|
|
|
def updateBlockInfo(self, hash, key, data):
|
|
|
|
'''
|
|
|
|
sets info associated with a block
|
2018-08-04 02:52:45 +00:00
|
|
|
|
2018-07-11 19:45:38 +00:00
|
|
|
hash - the hash of a block
|
|
|
|
dateReceived - the date the block was recieved, not necessarily when it was created
|
|
|
|
decrypted - if we can successfully decrypt the block (does not describe its current state)
|
|
|
|
dataType - data type of the block
|
|
|
|
dataFound - if the data has been found for the block
|
|
|
|
dataSaved - if the data has been saved for the block
|
|
|
|
sig - optional signature by the author (not optional if author is specified)
|
|
|
|
author - multi-round partial sha3-256 hash of authors public key
|
|
|
|
dateClaimed - timestamp claimed inside the block, only as trustworthy as the block author is
|
2018-09-30 04:42:31 +00:00
|
|
|
expire - expire date for a block
|
2018-04-26 07:40:39 +00:00
|
|
|
'''
|
|
|
|
|
2018-09-30 04:42:31 +00:00
|
|
|
if key not in ('dateReceived', 'decrypted', 'dataType', 'dataFound', 'dataSaved', 'sig', 'author', 'dateClaimed', 'expire'):
|
2018-04-26 07:40:39 +00:00
|
|
|
return False
|
|
|
|
|
2018-09-28 17:29:07 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB, timeout=10)
|
2018-04-26 07:40:39 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
args = (data, hash)
|
|
|
|
c.execute("UPDATE hashes SET " + key + " = ? where hash = ?;", args)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return True
|
2018-04-16 02:22:19 +00:00
|
|
|
|
2018-09-30 16:53:39 +00:00
|
|
|
def insertBlock(self, data, header='txt', sign=False, encryptType='', symKey='', asymPeer='', meta = None, expire=None):
|
2018-04-16 02:22:19 +00:00
|
|
|
'''
|
|
|
|
Inserts a block into the network
|
2018-06-19 07:01:59 +00:00
|
|
|
encryptType must be specified to encrypt a block
|
2018-04-16 02:22:19 +00:00
|
|
|
'''
|
2018-07-10 03:09:45 +00:00
|
|
|
retData = False
|
2018-05-02 06:25:44 +00:00
|
|
|
|
2018-08-18 04:42:30 +00:00
|
|
|
# check nonce
|
|
|
|
dataNonce = self._utils.bytesToStr(self._crypto.sha3Hash(data))
|
|
|
|
try:
|
|
|
|
with open(self.dataNonceFile, 'r') as nonces:
|
|
|
|
if dataNonce in nonces:
|
|
|
|
return retData
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
# record nonce
|
|
|
|
with open(self.dataNonceFile, 'a') as nonceFile:
|
|
|
|
nonceFile.write(dataNonce + '\n')
|
|
|
|
|
2018-07-04 19:07:17 +00:00
|
|
|
if meta is None:
|
|
|
|
meta = dict()
|
|
|
|
|
|
|
|
if type(data) is bytes:
|
|
|
|
data = data.decode()
|
|
|
|
data = str(data)
|
2018-06-19 07:01:59 +00:00
|
|
|
|
|
|
|
retData = ''
|
|
|
|
signature = ''
|
|
|
|
signer = ''
|
2018-06-21 07:26:54 +00:00
|
|
|
metadata = {}
|
2018-08-16 05:01:40 +00:00
|
|
|
# metadata is full block metadata, meta is internal, user specified metadata
|
2018-06-19 07:01:59 +00:00
|
|
|
|
|
|
|
# only use header if not set in provided meta
|
2018-07-04 19:07:17 +00:00
|
|
|
if not header is None:
|
|
|
|
meta['type'] = header
|
|
|
|
meta['type'] = str(meta['type'])
|
2018-06-19 07:01:59 +00:00
|
|
|
|
2018-06-21 07:24:58 +00:00
|
|
|
if encryptType in ('asym', 'sym', ''):
|
2018-06-19 07:01:59 +00:00
|
|
|
metadata['encryptType'] = encryptType
|
|
|
|
else:
|
2018-06-21 07:24:58 +00:00
|
|
|
raise onionrexceptions.InvalidMetadata('encryptType must be asym or sym, or blank')
|
2018-06-19 07:01:59 +00:00
|
|
|
|
2018-07-02 08:13:18 +00:00
|
|
|
try:
|
|
|
|
data = data.encode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
2018-09-12 02:58:51 +00:00
|
|
|
|
2018-10-21 05:07:35 +00:00
|
|
|
if encryptType == 'sym':
|
|
|
|
try:
|
|
|
|
forwardEncrypted = onionrusers.OnionrUser(self, asymPeer).forwardEncrypt(data)
|
|
|
|
data = forwardEncrypted[0]
|
|
|
|
meta['forwardEnc'] = True
|
|
|
|
except onionrexceptions.InvalidPubkey:
|
|
|
|
onionrusers.OnionrUser(self, asymPeer).generateForwardKey()
|
|
|
|
else:
|
|
|
|
logger.info(forwardEncrypted)
|
2018-10-08 05:11:46 +00:00
|
|
|
onionrusers.OnionrUser(self, asymPeer).generateForwardKey()
|
2018-10-21 05:07:35 +00:00
|
|
|
fsKey = onionrusers.OnionrUser(self, asymPeer).getGeneratedForwardKeys()[0]
|
|
|
|
meta['newFSKey'] = fsKey[0]
|
2018-10-08 05:11:46 +00:00
|
|
|
jsonMeta = json.dumps(meta)
|
2018-06-19 07:01:59 +00:00
|
|
|
if sign:
|
2018-07-02 08:13:18 +00:00
|
|
|
signature = self._crypto.edSign(jsonMeta.encode() + data, key=self._crypto.privKey, encodeResult=True)
|
2018-07-17 07:18:17 +00:00
|
|
|
signer = self._crypto.pubKey
|
2018-06-19 07:01:59 +00:00
|
|
|
|
|
|
|
if len(jsonMeta) > 1000:
|
|
|
|
raise onionrexceptions.InvalidMetadata('meta in json encoded form must not exceed 1000 bytes')
|
2018-07-04 19:07:17 +00:00
|
|
|
|
2018-10-06 18:06:46 +00:00
|
|
|
user = onionrusers.OnionrUser(self, symKey)
|
|
|
|
|
2018-06-20 20:56:28 +00:00
|
|
|
# encrypt block metadata/sig/content
|
2018-06-19 07:01:59 +00:00
|
|
|
if encryptType == 'sym':
|
2018-10-06 18:06:46 +00:00
|
|
|
|
2018-06-19 07:01:59 +00:00
|
|
|
if len(symKey) < self.requirements.passwordLength:
|
|
|
|
raise onionrexceptions.SecurityError('Weak encryption key')
|
2018-07-13 21:02:41 +00:00
|
|
|
jsonMeta = self._crypto.symmetricEncrypt(jsonMeta, key=symKey, returnEncoded=True).decode()
|
|
|
|
data = self._crypto.symmetricEncrypt(data, key=symKey, returnEncoded=True).decode()
|
|
|
|
signature = self._crypto.symmetricEncrypt(signature, key=symKey, returnEncoded=True).decode()
|
|
|
|
signer = self._crypto.symmetricEncrypt(signer, key=symKey, returnEncoded=True).decode()
|
2018-06-20 07:40:49 +00:00
|
|
|
elif encryptType == 'asym':
|
|
|
|
if self._utils.validatePubKey(asymPeer):
|
2018-10-07 20:39:22 +00:00
|
|
|
# Encrypt block data with forward secrecy key first, but not meta
|
|
|
|
jsonMeta = json.dumps(meta)
|
2018-07-18 07:33:23 +00:00
|
|
|
jsonMeta = self._crypto.pubKeyEncrypt(jsonMeta, asymPeer, encodedData=True, anonymous=True).decode()
|
|
|
|
data = self._crypto.pubKeyEncrypt(data, asymPeer, encodedData=True, anonymous=True).decode()
|
|
|
|
signature = self._crypto.pubKeyEncrypt(signature, asymPeer, encodedData=True, anonymous=True).decode()
|
|
|
|
signer = self._crypto.pubKeyEncrypt(signer, asymPeer, encodedData=True, anonymous=True).decode()
|
2018-06-20 07:40:49 +00:00
|
|
|
else:
|
|
|
|
raise onionrexceptions.InvalidPubkey(asymPeer + ' is not a valid base32 encoded ed25519 key')
|
2018-08-04 02:52:45 +00:00
|
|
|
|
2018-06-20 20:56:28 +00:00
|
|
|
# compile metadata
|
|
|
|
metadata['meta'] = jsonMeta
|
|
|
|
metadata['sig'] = signature
|
|
|
|
metadata['signer'] = signer
|
|
|
|
metadata['time'] = str(self._utils.getEpoch())
|
2018-09-30 16:53:39 +00:00
|
|
|
|
|
|
|
# ensure expire is integer and of sane length
|
|
|
|
if type(expire) is not type(None):
|
|
|
|
assert len(str(int(expire))) < 14
|
|
|
|
metadata['expire'] = expire
|
2018-08-18 04:42:30 +00:00
|
|
|
|
2018-07-08 07:51:23 +00:00
|
|
|
# send block data (and metadata) to POW module to get tokenized block data
|
|
|
|
proof = onionrproofs.POW(metadata, data)
|
|
|
|
payload = proof.waitForResult()
|
2018-07-10 03:09:45 +00:00
|
|
|
if payload != False:
|
|
|
|
retData = self.setData(payload)
|
|
|
|
self.addToBlockDB(retData, selfInsert=True, dataSaved=True)
|
2018-09-30 16:53:39 +00:00
|
|
|
#self.setBlockType(retData, meta['type'])
|
|
|
|
self._utils.processBlockMetadata(retData)
|
2018-07-23 07:43:10 +00:00
|
|
|
self.daemonQueueAdd('uploadBlock', retData)
|
2018-06-21 07:34:42 +00:00
|
|
|
|
2018-07-11 07:35:22 +00:00
|
|
|
if retData != False:
|
|
|
|
events.event('insertBlock', onionr = None, threaded = False)
|
2018-04-19 01:17:47 +00:00
|
|
|
return retData
|
2018-04-19 02:16:10 +00:00
|
|
|
|
2018-04-19 01:17:47 +00:00
|
|
|
def introduceNode(self):
|
|
|
|
'''
|
|
|
|
Introduces our node into the network by telling X many nodes our HS address
|
|
|
|
'''
|
2018-05-02 06:25:44 +00:00
|
|
|
|
|
|
|
if(self._utils.isCommunicatorRunning()):
|
|
|
|
announceAmount = 2
|
|
|
|
nodeList = self.listAdders()
|
|
|
|
|
|
|
|
if len(nodeList) == 0:
|
|
|
|
for i in self.bootstrapList:
|
|
|
|
if self._utils.validateID(i):
|
|
|
|
self.addAddress(i)
|
|
|
|
nodeList.append(i)
|
|
|
|
|
|
|
|
if announceAmount > len(nodeList):
|
|
|
|
announceAmount = len(nodeList)
|
|
|
|
|
|
|
|
for i in range(announceAmount):
|
|
|
|
self.daemonQueueAdd('announceNode', nodeList[i])
|
|
|
|
|
|
|
|
events.event('introduction', onionr = None)
|
|
|
|
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
logger.error('Onionr daemon is not running.')
|
|
|
|
return False
|
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
return
|