2018-01-02 08:43:29 +00:00
|
|
|
'''
|
|
|
|
Onionr - P2P Microblogging Platform & Social network
|
2018-01-14 08:48:23 +00:00
|
|
|
|
2018-02-01 22:45:15 +00:00
|
|
|
Core Onionr library, useful for external programs. Handles peer & data processing
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
|
|
|
'''
|
2018-01-02 08:43:29 +00:00
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
2018-05-19 21:32:21 +00:00
|
|
|
import sqlite3, os, sys, time, math, base64, tarfile, getpass, simplecrypt, hashlib, nacl, logger, json, netcontroller, math, config
|
2018-02-07 09:04:58 +00:00
|
|
|
#from Crypto.Cipher import AES
|
|
|
|
#from Crypto import Random
|
2018-01-07 08:55:44 +00:00
|
|
|
|
2018-05-15 04:24:38 +00:00
|
|
|
import onionrutils, onionrcrypto, onionrproofs, onionrevents as events
|
2018-01-26 09:46:21 +00:00
|
|
|
|
2018-01-22 02:49:11 +00:00
|
|
|
if sys.version_info < (3, 6):
|
|
|
|
try:
|
|
|
|
import sha3
|
|
|
|
except ModuleNotFoundError:
|
2018-01-26 07:22:48 +00:00
|
|
|
logger.fatal('On Python 3 versions prior to 3.6.x, you need the sha3 module')
|
2018-01-22 02:49:11 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
class Core:
|
|
|
|
def __init__(self):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Initialize Core Onionr library
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-04-19 02:16:10 +00:00
|
|
|
try:
|
|
|
|
self.queueDB = 'data/queue.db'
|
|
|
|
self.peerDB = 'data/peers.db'
|
|
|
|
self.blockDB = 'data/blocks.db'
|
|
|
|
self.blockDataLocation = 'data/blocks/'
|
|
|
|
self.addressDB = 'data/address.db'
|
|
|
|
self.hsAdder = ''
|
|
|
|
|
2018-04-22 23:35:00 +00:00
|
|
|
self.bootstrapFileLocation = 'static-data/bootstrap-nodes.txt'
|
|
|
|
self.bootstrapList = []
|
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
if not os.path.exists('data/'):
|
|
|
|
os.mkdir('data/')
|
|
|
|
if not os.path.exists('data/blocks/'):
|
|
|
|
os.mkdir('data/blocks/')
|
|
|
|
if not os.path.exists(self.blockDB):
|
|
|
|
self.createBlockDB()
|
|
|
|
|
|
|
|
if os.path.exists('data/hs/hostname'):
|
|
|
|
with open('data/hs/hostname', 'r') as hs:
|
|
|
|
self.hsAdder = hs.read()
|
2018-04-22 23:35:00 +00:00
|
|
|
|
|
|
|
# Load bootstrap address list
|
|
|
|
if os.path.exists(self.bootstrapFileLocation):
|
|
|
|
with open(self.bootstrapFileLocation, 'r') as bootstrap:
|
|
|
|
bootstrap = bootstrap.read()
|
|
|
|
for i in bootstrap.split('\n'):
|
|
|
|
self.bootstrapList.append(i)
|
2018-04-22 23:38:32 +00:00
|
|
|
else:
|
|
|
|
logger.warn('Warning: address bootstrap file not found ' + self.bootstrapFileLocation)
|
2018-04-19 02:16:10 +00:00
|
|
|
|
|
|
|
self._utils = onionrutils.OnionrUtils(self)
|
|
|
|
# Initialize the crypto object
|
|
|
|
self._crypto = onionrcrypto.OnionrCrypto(self)
|
2018-04-22 23:35:00 +00:00
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
except Exception as error:
|
|
|
|
logger.error('Failed to initialize core Onionr library.', error=error)
|
|
|
|
logger.fatal('Cannot recover from error.')
|
2018-05-18 21:49:05 +00:00
|
|
|
sys.exit(1)
|
2018-01-06 08:51:26 +00:00
|
|
|
return
|
2018-01-10 03:50:38 +00:00
|
|
|
|
2018-05-07 06:55:03 +00:00
|
|
|
def addPeer(self, peerID, powID, name=''):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-03-16 15:35:37 +00:00
|
|
|
Adds a public key to the key database (misleading function name)
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-01-10 03:50:38 +00:00
|
|
|
# This function simply adds a peer to the DB
|
2018-02-21 09:32:31 +00:00
|
|
|
if not self._utils.validatePubKey(peerID):
|
2018-01-26 09:46:21 +00:00
|
|
|
return False
|
2018-05-19 04:06:22 +00:00
|
|
|
if sys.getsizeof(powID) > 120:
|
|
|
|
logger.warn("POW token for pubkey base64 representation exceeded 120 bytes, is " + str(sys.getsizeof(powID)))
|
2018-05-18 21:49:05 +00:00
|
|
|
return False
|
2018-05-11 03:19:48 +00:00
|
|
|
|
2018-01-10 08:40:25 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB)
|
2018-04-26 07:40:39 +00:00
|
|
|
hashID = self._crypto.pubKeyHashID(peerID)
|
2018-01-10 08:40:25 +00:00
|
|
|
c = conn.cursor()
|
2018-05-07 06:55:03 +00:00
|
|
|
t = (peerID, name, 'unknown', hashID, powID)
|
2018-04-04 01:54:49 +00:00
|
|
|
|
|
|
|
for i in c.execute("SELECT * FROM PEERS where id = '" + peerID + "';"):
|
|
|
|
try:
|
|
|
|
if i[0] == peerID:
|
|
|
|
conn.close()
|
|
|
|
return False
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
except IndexError:
|
|
|
|
pass
|
2018-05-07 06:55:03 +00:00
|
|
|
c.execute('INSERT INTO peers (id, name, dateSeen, pow, hashID) VALUES(?, ?, ?, ?, ?);', t)
|
2018-01-10 08:40:25 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
2018-01-10 08:40:25 +00:00
|
|
|
return True
|
2018-02-27 21:23:49 +00:00
|
|
|
|
|
|
|
def addAddress(self, address):
|
2018-04-21 03:10:50 +00:00
|
|
|
'''
|
|
|
|
Add an address to the address database (only tor currently)
|
|
|
|
'''
|
2018-05-19 21:32:21 +00:00
|
|
|
if address == config.get('i2p')['ownAddr']:
|
|
|
|
return False
|
2018-02-27 21:23:49 +00:00
|
|
|
if self._utils.validateID(address):
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
2018-04-18 03:43:33 +00:00
|
|
|
# check if address is in database
|
|
|
|
# this is safe to do because the address is validated above, but we strip some chars here too just in case
|
|
|
|
address = address.replace('\'', '').replace(';', '').replace('"', '').replace('\\', '')
|
|
|
|
for i in c.execute("SELECT * FROM adders where address = '" + address + "';"):
|
|
|
|
try:
|
|
|
|
if i[0] == address:
|
|
|
|
logger.warn('Not adding existing address')
|
|
|
|
conn.close()
|
|
|
|
return False
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
t = (address, 1)
|
|
|
|
c.execute('INSERT INTO adders (address, type) VALUES(?, ?);', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
|
|
|
events.event('address_add', data = {'address': address}, onionr = None)
|
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
2018-02-28 00:00:37 +00:00
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
def removeAddress(self, address):
|
2018-04-21 03:10:50 +00:00
|
|
|
'''
|
|
|
|
Remove an address from the address database
|
|
|
|
'''
|
2018-02-27 21:23:49 +00:00
|
|
|
if self._utils.validateID(address):
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
t = (address,)
|
|
|
|
c.execute('Delete from adders where address=?;', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
|
|
|
|
events.event('address_remove', data = {'address': address}, onionr = None)
|
|
|
|
|
2018-02-27 21:23:49 +00:00
|
|
|
return True
|
|
|
|
else:
|
2018-04-19 02:16:10 +00:00
|
|
|
return False
|
2018-05-02 06:01:20 +00:00
|
|
|
|
2018-04-23 06:03:10 +00:00
|
|
|
def removeBlock(self, block):
|
|
|
|
'''
|
|
|
|
remove a block from this node
|
|
|
|
'''
|
|
|
|
if self._utils.validateHash(block):
|
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
t = (block,)
|
|
|
|
c.execute('Delete from hashes where hash=?;', t)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
try:
|
|
|
|
os.remove('data/blocks/' + block + '.dat')
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2018-02-27 21:23:49 +00:00
|
|
|
|
2018-02-21 09:32:31 +00:00
|
|
|
def createAddressDB(self):
|
|
|
|
'''
|
|
|
|
Generate the address database
|
|
|
|
|
|
|
|
types:
|
|
|
|
1: I2P b32 address
|
|
|
|
2: Tor v2 (like facebookcorewwwi.onion)
|
|
|
|
3: Tor v3
|
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
c.execute('''CREATE TABLE adders(
|
|
|
|
address text,
|
|
|
|
type int,
|
|
|
|
knownPeer text,
|
|
|
|
speed int,
|
|
|
|
success int,
|
2018-02-28 00:00:37 +00:00
|
|
|
DBHash text,
|
2018-05-18 06:22:16 +00:00
|
|
|
failure int,
|
|
|
|
lastConnect int
|
2018-02-21 09:32:31 +00:00
|
|
|
);
|
|
|
|
''')
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-01-10 03:50:38 +00:00
|
|
|
|
|
|
|
def createPeerDB(self):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Generate the peer sqlite3 database and populate it with the peers table.
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-10 03:50:38 +00:00
|
|
|
# generate the peer database
|
|
|
|
conn = sqlite3.connect(self.peerDB)
|
|
|
|
c = conn.cursor()
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('''CREATE TABLE peers(
|
|
|
|
ID text not null,
|
|
|
|
name text,
|
2018-02-21 09:32:31 +00:00
|
|
|
adders text,
|
2018-02-04 03:44:29 +00:00
|
|
|
blockDBHash text,
|
|
|
|
forwardKey text,
|
|
|
|
dateSeen not null,
|
|
|
|
bytesStored int,
|
2018-04-04 00:34:15 +00:00
|
|
|
trust int,
|
2018-04-26 07:40:39 +00:00
|
|
|
pubkeyExchanged int,
|
2018-05-07 06:55:03 +00:00
|
|
|
hashID text,
|
|
|
|
pow text not null);
|
2018-01-10 03:50:38 +00:00
|
|
|
''')
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
|
|
|
|
2018-01-21 05:49:16 +00:00
|
|
|
def createBlockDB(self):
|
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Create a database for blocks
|
2018-01-21 05:49:16 +00:00
|
|
|
|
2018-04-25 04:04:12 +00:00
|
|
|
hash - the hash of a block
|
2018-02-04 03:44:29 +00:00
|
|
|
dateReceived - the date the block was recieved, not necessarily when it was created
|
2018-04-25 04:04:12 +00:00
|
|
|
decrypted - if we can successfully decrypt the block (does not describe its current state)
|
|
|
|
dataType - data type of the block
|
|
|
|
dataFound - if the data has been found for the block
|
|
|
|
dataSaved - if the data has been saved for the block
|
2018-04-25 07:20:10 +00:00
|
|
|
sig - optional signature by the author (not optional if author is specified)
|
2018-04-26 07:40:39 +00:00
|
|
|
author - multi-round partial sha3-256 hash of authors public key
|
2018-01-21 05:49:16 +00:00
|
|
|
'''
|
|
|
|
if os.path.exists(self.blockDB):
|
|
|
|
raise Exception("Block database already exists")
|
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('''CREATE TABLE hashes(
|
2018-01-21 05:49:16 +00:00
|
|
|
hash text not null,
|
|
|
|
dateReceived int,
|
|
|
|
decrypted int,
|
2018-02-02 05:39:55 +00:00
|
|
|
dataType text,
|
2018-01-26 06:28:11 +00:00
|
|
|
dataFound int,
|
2018-04-25 04:04:12 +00:00
|
|
|
dataSaved int,
|
2018-04-25 07:20:10 +00:00
|
|
|
sig text,
|
|
|
|
author text
|
2018-04-25 04:04:12 +00:00
|
|
|
);
|
2018-01-21 05:49:16 +00:00
|
|
|
''')
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
|
|
|
return
|
|
|
|
|
2018-05-10 07:42:24 +00:00
|
|
|
def addToBlockDB(self, newHash, selfInsert=False, dataSaved=False):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Add a hash value to the block db
|
|
|
|
|
|
|
|
Should be in hex format!
|
|
|
|
'''
|
2018-01-21 05:49:16 +00:00
|
|
|
if not os.path.exists(self.blockDB):
|
|
|
|
raise Exception('Block db does not exist')
|
2018-01-29 03:45:43 +00:00
|
|
|
if self._utils.hasBlock(newHash):
|
|
|
|
return
|
2018-01-21 05:49:16 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-05-18 06:22:16 +00:00
|
|
|
currentTime = self._utils.getEpoch()
|
2018-05-10 07:42:24 +00:00
|
|
|
if selfInsert or dataSaved:
|
2018-01-28 02:18:38 +00:00
|
|
|
selfInsert = 1
|
|
|
|
else:
|
|
|
|
selfInsert = 0
|
2018-04-26 19:56:17 +00:00
|
|
|
data = (newHash, currentTime, '', selfInsert)
|
|
|
|
c.execute('INSERT INTO hashes (hash, dateReceived, dataType, dataSaved) VALUES(?, ?, ?, ?);', data)
|
2018-01-21 05:49:16 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-01-17 23:37:53 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
def getData(self, hash):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Simply return the data associated to a hash
|
|
|
|
'''
|
2018-01-27 21:49:48 +00:00
|
|
|
try:
|
2018-05-13 03:45:32 +00:00
|
|
|
# logger.debug('Opening %s' % (str(self.blockDataLocation) + str(hash) + '.dat'))
|
2018-04-23 01:43:17 +00:00
|
|
|
dataFile = open(self.blockDataLocation + hash + '.dat', 'rb')
|
2018-01-27 21:49:48 +00:00
|
|
|
data = dataFile.read()
|
|
|
|
dataFile.close()
|
|
|
|
except FileNotFoundError:
|
|
|
|
data = False
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-21 09:12:41 +00:00
|
|
|
return data
|
|
|
|
|
2018-01-22 02:49:11 +00:00
|
|
|
def setData(self, data):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Set the data assciated with a hash
|
|
|
|
'''
|
2018-04-23 02:16:11 +00:00
|
|
|
data = data
|
2018-01-25 22:39:09 +00:00
|
|
|
hasher = hashlib.sha3_256()
|
2018-04-23 02:24:34 +00:00
|
|
|
if not type(data) is bytes:
|
|
|
|
data = data.encode()
|
2018-01-25 22:39:09 +00:00
|
|
|
hasher.update(data)
|
|
|
|
dataHash = hasher.hexdigest()
|
2018-01-29 02:14:46 +00:00
|
|
|
if type(dataHash) is bytes:
|
|
|
|
dataHash = dataHash.decode()
|
2018-01-25 22:39:09 +00:00
|
|
|
blockFileName = self.blockDataLocation + dataHash + '.dat'
|
|
|
|
if os.path.exists(blockFileName):
|
2018-02-04 03:44:29 +00:00
|
|
|
pass # TODO: properly check if block is already saved elsewhere
|
2018-01-29 02:39:34 +00:00
|
|
|
#raise Exception("Data is already set for " + dataHash)
|
2018-01-25 22:39:09 +00:00
|
|
|
else:
|
2018-04-23 01:43:17 +00:00
|
|
|
blockFile = open(blockFileName, 'wb')
|
|
|
|
blockFile.write(data)
|
2018-01-25 22:39:09 +00:00
|
|
|
blockFile.close()
|
2018-01-29 02:02:16 +00:00
|
|
|
|
2018-01-29 02:39:34 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute("UPDATE hashes SET dataSaved=1 WHERE hash = '" + dataHash + "';")
|
2018-01-29 02:39:34 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-01-29 02:02:16 +00:00
|
|
|
|
2018-01-25 22:39:09 +00:00
|
|
|
return dataHash
|
2018-01-22 02:49:11 +00:00
|
|
|
|
2018-01-08 09:25:32 +00:00
|
|
|
def dataDirEncrypt(self, password):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Encrypt the data directory on Onionr shutdown
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-08 09:25:32 +00:00
|
|
|
if os.path.exists('data.tar'):
|
|
|
|
os.remove('data.tar')
|
|
|
|
tar = tarfile.open("data.tar", "w")
|
|
|
|
for name in ['data']:
|
|
|
|
tar.add(name)
|
|
|
|
tar.close()
|
|
|
|
tarData = open('data.tar', 'r', encoding = "ISO-8859-1").read()
|
|
|
|
encrypted = simplecrypt.encrypt(password, tarData)
|
|
|
|
open('data-encrypted.dat', 'wb').write(encrypted)
|
|
|
|
os.remove('data.tar')
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-05 09:16:21 +00:00
|
|
|
return
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-08 09:25:32 +00:00
|
|
|
def dataDirDecrypt(self, password):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Decrypt the data directory on startup
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-08 09:25:32 +00:00
|
|
|
if not os.path.exists('data-encrypted.dat'):
|
|
|
|
return (False, 'encrypted archive does not exist')
|
|
|
|
data = open('data-encrypted.dat', 'rb').read()
|
|
|
|
try:
|
|
|
|
decrypted = simplecrypt.decrypt(password, data)
|
|
|
|
except simplecrypt.DecryptionException:
|
2018-01-09 22:58:12 +00:00
|
|
|
return (False, 'wrong password (or corrupted archive)')
|
2018-01-08 09:25:32 +00:00
|
|
|
else:
|
|
|
|
open('data.tar', 'wb').write(decrypted)
|
2018-01-09 22:58:12 +00:00
|
|
|
tar = tarfile.open('data.tar')
|
|
|
|
tar.extractall()
|
|
|
|
tar.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-08 09:25:32 +00:00
|
|
|
return (True, '')
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
def daemonQueue(self):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Gives commands to the communication proccess/daemon by reading an sqlite3 database
|
|
|
|
|
|
|
|
This function intended to be used by the client. Queue to exchange data between "client" and server.
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-02 08:43:29 +00:00
|
|
|
retData = False
|
|
|
|
if not os.path.exists(self.queueDB):
|
2018-01-04 07:12:46 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB)
|
|
|
|
c = conn.cursor()
|
2018-01-02 08:43:29 +00:00
|
|
|
# Create table
|
|
|
|
c.execute('''CREATE TABLE commands
|
|
|
|
(id integer primary key autoincrement, command text, data text, date text)''')
|
|
|
|
conn.commit()
|
|
|
|
else:
|
2018-01-04 07:12:46 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
for row in c.execute('SELECT command, data, date, min(ID) FROM commands group by id'):
|
|
|
|
retData = row
|
|
|
|
break
|
|
|
|
if retData != False:
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('DELETE FROM commands WHERE id=?;', (retData[3],))
|
2018-01-04 07:12:46 +00:00
|
|
|
conn.commit()
|
2018-01-02 08:43:29 +00:00
|
|
|
conn.close()
|
|
|
|
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_pop', data = {'data': retData}, onionr = None)
|
|
|
|
|
2018-01-02 08:43:29 +00:00
|
|
|
return retData
|
|
|
|
|
|
|
|
def daemonQueueAdd(self, command, data=''):
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Add a command to the daemon queue, used by the communication daemon (communicator.py)
|
2018-01-14 08:48:23 +00:00
|
|
|
'''
|
2018-01-04 07:12:46 +00:00
|
|
|
# Intended to be used by the web server
|
2018-05-18 06:22:16 +00:00
|
|
|
date = self._utils.getEpoch()
|
2018-01-02 08:43:29 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
t = (command, data, date)
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('INSERT INTO commands (command, data, date) VALUES(?, ?, ?)', t)
|
2018-01-02 08:43:29 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_push', data = {'command': command, 'data': data}, onionr = None)
|
|
|
|
|
2018-01-15 21:21:32 +00:00
|
|
|
return
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-27 01:16:15 +00:00
|
|
|
def clearDaemonQueue(self):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Clear the daemon queue (somewhat dangerous)
|
|
|
|
'''
|
2018-01-27 01:16:15 +00:00
|
|
|
conn = sqlite3.connect(self.queueDB)
|
|
|
|
c = conn.cursor()
|
2018-01-27 03:42:20 +00:00
|
|
|
try:
|
2018-04-21 03:10:50 +00:00
|
|
|
c.execute('DELETE FROM commands;')
|
2018-01-27 03:42:20 +00:00
|
|
|
conn.commit()
|
|
|
|
except:
|
|
|
|
pass
|
2018-01-27 01:16:15 +00:00
|
|
|
conn.close()
|
2018-04-21 03:10:50 +00:00
|
|
|
events.event('queue_clear', onionr = None)
|
2018-01-17 23:37:53 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
2018-04-19 02:16:10 +00:00
|
|
|
|
2018-02-22 09:33:30 +00:00
|
|
|
def listAdders(self, randomOrder=True, i2p=True):
|
|
|
|
'''
|
|
|
|
Return a list of addresses
|
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
if randomOrder:
|
|
|
|
addresses = c.execute('SELECT * FROM adders ORDER BY RANDOM();')
|
|
|
|
else:
|
|
|
|
addresses = c.execute('SELECT * FROM adders;')
|
|
|
|
addressList = []
|
|
|
|
for i in addresses:
|
2018-02-28 00:00:37 +00:00
|
|
|
addressList.append(i[0])
|
2018-02-22 09:33:30 +00:00
|
|
|
conn.close()
|
|
|
|
return addressList
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-05-07 06:55:03 +00:00
|
|
|
def listPeers(self, randomOrder=True, getPow=False):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
2018-03-16 15:35:37 +00:00
|
|
|
Return a list of public keys (misleading function name)
|
2018-01-28 01:53:24 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
randomOrder determines if the list should be in a random order
|
2018-01-26 06:28:11 +00:00
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.peerDB)
|
|
|
|
c = conn.cursor()
|
2018-04-03 19:39:58 +00:00
|
|
|
payload = ""
|
2018-01-28 01:53:24 +00:00
|
|
|
if randomOrder:
|
2018-04-03 19:39:58 +00:00
|
|
|
payload = 'SELECT * FROM peers ORDER BY RANDOM();'
|
2018-01-28 01:53:24 +00:00
|
|
|
else:
|
2018-04-03 19:39:58 +00:00
|
|
|
payload = 'SELECT * FROM peers;'
|
2018-01-26 06:28:11 +00:00
|
|
|
peerList = []
|
2018-04-03 19:39:58 +00:00
|
|
|
for i in c.execute(payload):
|
2018-04-04 00:45:13 +00:00
|
|
|
try:
|
2018-04-04 01:54:49 +00:00
|
|
|
if len(i[0]) != 0:
|
2018-05-07 06:55:03 +00:00
|
|
|
if getPow:
|
2018-05-15 05:16:00 +00:00
|
|
|
peerList.append(i[0] + '-' + i[1])
|
2018-05-07 06:55:03 +00:00
|
|
|
else:
|
|
|
|
peerList.append(i[0])
|
2018-04-04 00:45:13 +00:00
|
|
|
except TypeError:
|
|
|
|
pass
|
2018-05-07 06:55:03 +00:00
|
|
|
if getPow:
|
2018-05-07 07:46:07 +00:00
|
|
|
try:
|
|
|
|
peerList.append(self._crypto.pubKey + '-' + self._crypto.pubKeyPowToken)
|
|
|
|
except TypeError:
|
|
|
|
pass
|
2018-05-07 06:55:03 +00:00
|
|
|
else:
|
|
|
|
peerList.append(self._crypto.pubKey)
|
2018-01-26 06:28:11 +00:00
|
|
|
conn.close()
|
|
|
|
return peerList
|
|
|
|
|
|
|
|
def getPeerInfo(self, peer, info):
|
|
|
|
'''
|
2018-02-04 03:44:29 +00:00
|
|
|
Get info about a peer from their database entry
|
2018-01-26 06:28:11 +00:00
|
|
|
|
2018-02-04 03:44:29 +00:00
|
|
|
id text 0
|
|
|
|
name text, 1
|
2018-04-04 01:54:49 +00:00
|
|
|
adders text, 2
|
|
|
|
forwardKey text, 3
|
|
|
|
dateSeen not null, 4
|
|
|
|
bytesStored int, 5
|
|
|
|
trust int 6
|
|
|
|
pubkeyExchanged int 7
|
2018-04-26 07:40:39 +00:00
|
|
|
hashID text 8
|
2018-01-26 06:28:11 +00:00
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.peerDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
command = (peer,)
|
2018-04-26 07:40:39 +00:00
|
|
|
infoNumbers = {'id': 0, 'name': 1, 'adders': 2, 'forwardKey': 3, 'dateSeen': 4, 'bytesStored': 5, 'trust': 6, 'pubkeyExchanged': 7, 'hashID': 8}
|
2018-01-26 06:28:11 +00:00
|
|
|
info = infoNumbers[info]
|
|
|
|
iterCount = 0
|
|
|
|
retVal = ''
|
|
|
|
for row in c.execute('SELECT * from peers where id=?;', command):
|
|
|
|
for i in row:
|
|
|
|
if iterCount == info:
|
|
|
|
retVal = i
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
iterCount += 1
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-26 06:28:11 +00:00
|
|
|
return retVal
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-01-28 01:53:24 +00:00
|
|
|
def setPeerInfo(self, peer, key, data):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Update a peer for a key
|
|
|
|
'''
|
2018-01-28 01:53:24 +00:00
|
|
|
conn = sqlite3.connect(self.peerDB)
|
|
|
|
c = conn.cursor()
|
2018-01-28 01:56:59 +00:00
|
|
|
command = (data, peer)
|
2018-01-28 01:53:24 +00:00
|
|
|
# TODO: validate key on whitelist
|
2018-02-16 04:31:30 +00:00
|
|
|
if key not in ('id', 'name', 'pubkey', 'blockDBHash', 'forwardKey', 'dateSeen', 'bytesStored', 'trust'):
|
2018-01-29 07:05:02 +00:00
|
|
|
raise Exception("Got invalid database key when setting peer info")
|
2018-02-04 03:44:29 +00:00
|
|
|
c.execute('UPDATE peers SET ' + key + ' = ? WHERE id=?', command)
|
2018-01-28 02:05:55 +00:00
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-28 00:00:37 +00:00
|
|
|
return
|
2018-01-26 06:28:11 +00:00
|
|
|
|
2018-02-28 00:00:37 +00:00
|
|
|
def getAddressInfo(self, address, info):
|
|
|
|
'''
|
|
|
|
Get info about an address from its database entry
|
|
|
|
|
|
|
|
address text, 0
|
|
|
|
type int, 1
|
|
|
|
knownPeer text, 2
|
|
|
|
speed int, 3
|
|
|
|
success int, 4
|
|
|
|
DBHash text, 5
|
|
|
|
failure int 6
|
2018-05-18 06:22:16 +00:00
|
|
|
lastConnect 7
|
2018-02-28 00:00:37 +00:00
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
command = (address,)
|
2018-05-18 06:22:16 +00:00
|
|
|
infoNumbers = {'address': 0, 'type': 1, 'knownPeer': 2, 'speed': 3, 'success': 4, 'DBHash': 5, 'failure': 6, 'lastConnect': 7}
|
2018-02-28 00:00:37 +00:00
|
|
|
info = infoNumbers[info]
|
|
|
|
iterCount = 0
|
|
|
|
retVal = ''
|
|
|
|
for row in c.execute('SELECT * from adders where address=?;', command):
|
|
|
|
for i in row:
|
|
|
|
if iterCount == info:
|
|
|
|
retVal = i
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
iterCount += 1
|
|
|
|
conn.close()
|
|
|
|
return retVal
|
|
|
|
|
|
|
|
def setAddressInfo(self, address, key, data):
|
|
|
|
'''
|
|
|
|
Update an address for a key
|
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.addressDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
command = (data, address)
|
|
|
|
# TODO: validate key on whitelist
|
2018-05-18 21:59:46 +00:00
|
|
|
if key not in ('address', 'type', 'knownPeer', 'speed', 'success', 'DBHash', 'failure', 'lastConnect'):
|
2018-02-28 00:00:37 +00:00
|
|
|
raise Exception("Got invalid database key when setting address info")
|
|
|
|
c.execute('UPDATE adders SET ' + key + ' = ? WHERE address=?', command)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
2018-02-04 03:44:29 +00:00
|
|
|
return
|
|
|
|
|
2018-05-03 03:22:07 +00:00
|
|
|
def handle_direct_connection(self, data):
|
|
|
|
'''
|
|
|
|
Handles direct messages
|
|
|
|
'''
|
|
|
|
try:
|
|
|
|
data = json.loads(data)
|
|
|
|
|
|
|
|
# TODO: Determine the sender, verify, etc
|
|
|
|
if ('callback' in data) and (data['callback'] is True):
|
|
|
|
# then this is a response to the message we sent earlier
|
|
|
|
self.daemonQueueAdd('checkCallbacks', json.dumps(data))
|
|
|
|
else:
|
|
|
|
# then we should handle it and respond accordingly
|
|
|
|
self.daemonQueueAdd('incomingDirectConnection', json.dumps(data))
|
|
|
|
except Exception as e:
|
|
|
|
logger.warn('Failed to handle incoming direct message: %s' % str(e))
|
|
|
|
|
|
|
|
return
|
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
def getBlockList(self, unsaved = False): # TODO: Use unsaved
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Get list of our blocks
|
|
|
|
'''
|
2018-01-26 06:28:11 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-01-27 01:16:15 +00:00
|
|
|
if unsaved:
|
2018-04-25 22:42:42 +00:00
|
|
|
execute = 'SELECT hash FROM hashes WHERE dataSaved != 1 ORDER BY RANDOM();'
|
2018-01-27 01:16:15 +00:00
|
|
|
else:
|
2018-04-25 22:42:42 +00:00
|
|
|
execute = 'SELECT hash FROM hashes ORDER BY RANDOM();'
|
2018-05-13 03:45:32 +00:00
|
|
|
rows = list()
|
2018-01-27 01:16:15 +00:00
|
|
|
for row in c.execute(execute):
|
2018-01-26 06:28:11 +00:00
|
|
|
for i in row:
|
2018-05-13 03:45:32 +00:00
|
|
|
rows.append(i)
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
return rows
|
2018-02-02 05:39:55 +00:00
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
def getBlockDate(self, blockHash):
|
|
|
|
'''
|
|
|
|
Returns the date a block was received
|
|
|
|
'''
|
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
execute = 'SELECT dateReceived FROM hashes WHERE hash=?;'
|
|
|
|
args = (blockHash,)
|
|
|
|
for row in c.execute(execute, args):
|
|
|
|
for i in row:
|
|
|
|
return int(i)
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2018-02-02 05:39:55 +00:00
|
|
|
def getBlocksByType(self, blockType):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Returns a list of blocks by the type
|
|
|
|
'''
|
2018-02-02 05:39:55 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-02-04 03:44:29 +00:00
|
|
|
execute = 'SELECT hash FROM hashes WHERE dataType=?;'
|
2018-02-02 05:39:55 +00:00
|
|
|
args = (blockType,)
|
2018-05-13 03:45:32 +00:00
|
|
|
rows = list()
|
2018-02-02 05:39:55 +00:00
|
|
|
for row in c.execute(execute, args):
|
|
|
|
for i in row:
|
2018-05-13 03:45:32 +00:00
|
|
|
rows.append(i)
|
2018-02-04 03:44:29 +00:00
|
|
|
|
2018-05-13 03:45:32 +00:00
|
|
|
return rows
|
2018-02-02 05:39:55 +00:00
|
|
|
|
|
|
|
def setBlockType(self, hash, blockType):
|
2018-02-04 03:44:29 +00:00
|
|
|
'''
|
|
|
|
Sets the type of block
|
|
|
|
'''
|
|
|
|
|
2018-02-02 05:39:55 +00:00
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
2018-02-02 09:15:28 +00:00
|
|
|
c.execute("UPDATE hashes SET dataType='" + blockType + "' WHERE hash = '" + hash + "';")
|
2018-02-02 05:39:55 +00:00
|
|
|
conn.commit()
|
2018-02-04 03:44:29 +00:00
|
|
|
conn.close()
|
|
|
|
return
|
2018-05-02 06:01:20 +00:00
|
|
|
|
2018-04-26 07:40:39 +00:00
|
|
|
def updateBlockInfo(self, hash, key, data):
|
|
|
|
'''
|
|
|
|
sets info associated with a block
|
|
|
|
'''
|
|
|
|
|
|
|
|
if key not in ('dateReceived', 'decrypted', 'dataType', 'dataFound', 'dataSaved', 'sig', 'author'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
conn = sqlite3.connect(self.blockDB)
|
|
|
|
c = conn.cursor()
|
|
|
|
args = (data, hash)
|
|
|
|
c.execute("UPDATE hashes SET " + key + " = ? where hash = ?;", args)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return True
|
2018-04-16 02:22:19 +00:00
|
|
|
|
2018-04-25 04:04:12 +00:00
|
|
|
def insertBlock(self, data, header='txt', sign=False):
|
2018-04-16 02:22:19 +00:00
|
|
|
'''
|
|
|
|
Inserts a block into the network
|
|
|
|
'''
|
2018-05-02 06:25:44 +00:00
|
|
|
|
2018-05-05 20:07:32 +00:00
|
|
|
powProof = onionrproofs.POW(data)
|
|
|
|
powToken = ''
|
|
|
|
# wait for proof to complete
|
2018-05-07 06:55:03 +00:00
|
|
|
try:
|
|
|
|
while True:
|
|
|
|
powToken = powProof.getResult()
|
|
|
|
if powToken == False:
|
|
|
|
time.sleep(0.3)
|
|
|
|
continue
|
|
|
|
powHash = powToken[0]
|
|
|
|
powToken = base64.b64encode(powToken[1])
|
|
|
|
try:
|
|
|
|
powToken = powToken.decode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
break
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
logger.warn("Got keyboard interrupt while working on inserting block, stopping.")
|
|
|
|
powProof.shutdown()
|
|
|
|
return ''
|
2018-05-05 20:07:32 +00:00
|
|
|
|
2018-04-25 23:21:43 +00:00
|
|
|
try:
|
|
|
|
data.decode()
|
|
|
|
except AttributeError:
|
|
|
|
data = data.encode()
|
2018-05-02 06:25:44 +00:00
|
|
|
|
2018-04-26 07:40:39 +00:00
|
|
|
retData = ''
|
2018-05-05 22:36:03 +00:00
|
|
|
metadata = {'type': header, 'powHash': powHash, 'powToken': powToken}
|
2018-05-05 06:19:25 +00:00
|
|
|
sig = {}
|
|
|
|
|
|
|
|
metadata = json.dumps(metadata)
|
|
|
|
metadata = metadata.encode()
|
2018-05-05 08:03:05 +00:00
|
|
|
signature = ''
|
2018-05-02 06:25:44 +00:00
|
|
|
|
2018-04-26 07:40:39 +00:00
|
|
|
if sign:
|
2018-05-13 03:55:34 +00:00
|
|
|
signature = self._crypto.edSign(metadata + b'\n' + data, self._crypto.privKey, encodeResult=True)
|
2018-04-26 07:40:39 +00:00
|
|
|
ourID = self._crypto.pubKeyHashID()
|
2018-04-27 00:04:35 +00:00
|
|
|
# Convert from bytes on some py versions?
|
|
|
|
try:
|
|
|
|
ourID = ourID.decode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
2018-05-05 08:03:05 +00:00
|
|
|
metadata = {'sig': signature, 'meta': metadata.decode()}
|
|
|
|
metadata = json.dumps(metadata)
|
|
|
|
metadata = metadata.encode()
|
2018-05-02 06:25:44 +00:00
|
|
|
|
2018-04-16 02:22:19 +00:00
|
|
|
if len(data) == 0:
|
|
|
|
logger.error('Will not insert empty block')
|
|
|
|
else:
|
2018-05-13 03:55:34 +00:00
|
|
|
addedHash = self.setData(metadata + b'\n' + data)
|
2018-04-16 02:22:19 +00:00
|
|
|
self.addToBlockDB(addedHash, selfInsert=True)
|
|
|
|
self.setBlockType(addedHash, header)
|
|
|
|
retData = addedHash
|
2018-04-19 01:17:47 +00:00
|
|
|
return retData
|
2018-04-19 02:16:10 +00:00
|
|
|
|
2018-04-19 01:17:47 +00:00
|
|
|
def introduceNode(self):
|
|
|
|
'''
|
|
|
|
Introduces our node into the network by telling X many nodes our HS address
|
|
|
|
'''
|
2018-05-02 06:25:44 +00:00
|
|
|
|
|
|
|
if(self._utils.isCommunicatorRunning()):
|
|
|
|
announceAmount = 2
|
|
|
|
nodeList = self.listAdders()
|
|
|
|
|
|
|
|
if len(nodeList) == 0:
|
|
|
|
for i in self.bootstrapList:
|
|
|
|
if self._utils.validateID(i):
|
|
|
|
self.addAddress(i)
|
|
|
|
nodeList.append(i)
|
|
|
|
|
|
|
|
if announceAmount > len(nodeList):
|
|
|
|
announceAmount = len(nodeList)
|
|
|
|
|
|
|
|
for i in range(announceAmount):
|
|
|
|
self.daemonQueueAdd('announceNode', nodeList[i])
|
|
|
|
|
|
|
|
events.event('introduction', onionr = None)
|
|
|
|
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
logger.error('Onionr daemon is not running.')
|
|
|
|
return False
|
|
|
|
|
2018-04-19 02:16:10 +00:00
|
|
|
return
|