progress in removing onionr core

This commit is contained in:
Kevin Froman 2019-07-17 17:41:33 -05:00
parent bf8a9c4f27
commit 8163292ed9
16 changed files with 175 additions and 203 deletions

View File

@ -30,6 +30,7 @@ from communicatorutils import cooldownpeer, housekeeping, netcheck
from onionrutils import localcommand, epoch from onionrutils import localcommand, epoch
from etc import humanreadabletime from etc import humanreadabletime
import onionrservices, onionr, onionrproofs import onionrservices, onionr, onionrproofs
from coredb import daemonqueue
OnionrCommunicatorTimers = onionrcommunicatortimers.OnionrCommunicatorTimers OnionrCommunicatorTimers = onionrcommunicatortimers.OnionrCommunicatorTimers
config.reload() config.reload()
@ -84,7 +85,7 @@ class OnionrCommunicatorDaemon:
# Clear the daemon queue for any dead messages # Clear the daemon queue for any dead messages
if os.path.exists(self._core.queueDB): if os.path.exists(self._core.queueDB):
self._core.clearDaemonQueue() daemonqueue.clear_daemon_queue()
# Loads in and starts the enabled plugins # Loads in and starts the enabled plugins
plugins.reload() plugins.reload()

View File

@ -20,8 +20,9 @@
import logger import logger
import onionrevents as events import onionrevents as events
from onionrutils import localcommand from onionrutils import localcommand
from coredb import daemonqueue
def handle_daemon_commands(comm_inst): def handle_daemon_commands(comm_inst):
cmd = comm_inst._core.daemonQueue() cmd = daemonqueue.daemon_queue()
response = '' response = ''
if cmd is not False: if cmd is not False:
events.event('daemon_command', onionr = comm_inst._core.onionrInst, data = {'cmd' : cmd}) events.event('daemon_command', onionr = comm_inst._core.onionrInst, data = {'cmd' : cmd})

View File

@ -62,7 +62,6 @@ class Core:
self.requirements = onionrvalues.OnionrValues() self.requirements = onionrvalues.OnionrValues()
self.torPort = torPort self.torPort = torPort
self.dataNonceFile = self.dataDir + 'block-nonces.dat' self.dataNonceFile = self.dataDir + 'block-nonces.dat'
self.dbCreate = dbcreator.DBCreator(self)
self.forwardKeysFile = self.dataDir + 'forward-keys.db' self.forwardKeysFile = self.dataDir + 'forward-keys.db'
self.keyStore = simplekv.DeadSimpleKV(self.dataDir + 'cachedstorage.dat', refresh_seconds=5) self.keyStore = simplekv.DeadSimpleKV(self.dataDir + 'cachedstorage.dat', refresh_seconds=5)
self.storage_counter = storagecounter.StorageCounter(self) self.storage_counter = storagecounter.StorageCounter(self)
@ -81,7 +80,7 @@ class Core:
if not os.path.exists(self.blockDB): if not os.path.exists(self.blockDB):
self.createBlockDB() self.createBlockDB()
if not os.path.exists(self.forwardKeysFile): if not os.path.exists(self.forwardKeysFile):
self.dbCreate.createForwardKeyDB() dbcreator.createForwardKeyDB()
if not os.path.exists(self.peerDB): if not os.path.exists(self.peerDB):
self.createPeerDB() self.createPeerDB()
if not os.path.exists(self.addressDB): if not os.path.exists(self.addressDB):
@ -176,32 +175,6 @@ class Core:
''' '''
return onionrstorage.getData(self, hash) return onionrstorage.getData(self, hash)
def daemonQueue(self):
'''
Gives commands to the communication proccess/daemon by reading an sqlite3 database
This function intended to be used by the client. Queue to exchange data between "client" and server.
'''
return coredb.daemonqueue.daemon_queue(self)
def daemonQueueAdd(self, command, data='', responseID=''):
'''
Add a command to the daemon queue, used by the communication daemon (communicator.py)
'''
return coredb.daemonqueue.daemon_queue_add(self, command, data, responseID)
def daemonQueueGetResponse(self, responseID=''):
'''
Get a response sent by communicator to the API, by requesting to the API
'''
return coredb.daemonqueue.daemon_queue_get_response(self, responseID)
def clearDaemonQueue(self):
'''
Clear the daemon queue (somewhat dangerous)
'''
return coredb.daemonqueue.clear_daemon_queue(self)
def listAdders(self, randomOrder=True, i2p=True, recent=0): def listAdders(self, randomOrder=True, i2p=True, recent=0):
''' '''
Return a list of addresses Return a list of addresses
@ -390,7 +363,7 @@ class Core:
if localcommand.local_command(self, '/ping', maxWait=10) == 'pong!': if localcommand.local_command(self, '/ping', maxWait=10) == 'pong!':
if self.config.get('general.security_level', 1) == 0: if self.config.get('general.security_level', 1) == 0:
localcommand.local_command(self, '/waitforshare/' + retData, post=True, maxWait=5) localcommand.local_command(self, '/waitforshare/' + retData, post=True, maxWait=5)
self.daemonQueueAdd('uploadBlock', retData) coredb.daemonqueue.daemon_queue_add('uploadBlock', retData)
else: else:
pass pass
coredb.blockmetadb.add_to_block_DB(retData, selfInsert=True, dataSaved=True) coredb.blockmetadb.add_to_block_DB(retData, selfInsert=True, dataSaved=True)
@ -408,7 +381,7 @@ class Core:
Introduces our node into the network by telling X many nodes our HS address Introduces our node into the network by telling X many nodes our HS address
''' '''
if localcommand.local_command(self, '/ping', maxWait=10) == 'pong!': if localcommand.local_command(self, '/ping', maxWait=10) == 'pong!':
self.daemonQueueAdd('announceNode') coredb.daemonqueue.daemon_queue_add('announceNode')
logger.info('Introduction command will be processed.', terminal=True) logger.info('Introduction command will be processed.', terminal=True)
else: else:
logger.warn('No running node detected. Cannot introduce.', terminal=True) logger.warn('No running node detected. Cannot introduce.', terminal=True)

View File

@ -21,8 +21,10 @@
import sqlite3, os import sqlite3, os
import onionrevents as events import onionrevents as events
from onionrutils import localcommand, epoch from onionrutils import localcommand, epoch
from .. import dbfiles
import dbcreator
def daemon_queue(core_inst): def daemon_queue():
''' '''
Gives commands to the communication proccess/daemon by reading an sqlite3 database Gives commands to the communication proccess/daemon by reading an sqlite3 database
@ -30,28 +32,26 @@ def daemon_queue(core_inst):
''' '''
retData = False retData = False
if not os.path.exists(core_inst.queueDB): if not os.path.exists(dbfiles.daemon_queue_db):
core_inst.dbCreate.createDaemonDB() dbcreator.createDaemonDB()
else: else:
conn = sqlite3.connect(core_inst.queueDB, timeout=30) conn = sqlite3.connect(dbfiles.daemon_queue_db, timeout=30)
c = conn.cursor() c = conn.cursor()
try: try:
for row in c.execute('SELECT command, data, date, min(ID), responseID FROM commands group by id'): for row in c.execute('SELECT command, data, date, min(ID), responseID FROM commands group by id'):
retData = row retData = row
break break
except sqlite3.OperationalError: except sqlite3.OperationalError:
core_inst.dbCreate.createDaemonDB() dbcreator.createDaemonDB()
else: else:
if retData != False: if retData != False:
c.execute('DELETE FROM commands WHERE id=?;', (retData[3],)) c.execute('DELETE FROM commands WHERE id=?;', (retData[3],))
conn.commit() conn.commit()
conn.close() conn.close()
events.event('queue_pop', data = {'data': retData}, onionr = core_inst.onionrInst)
return retData return retData
def daemon_queue_add(core_inst, command, data='', responseID=''): def daemon_queue_add(command, data='', responseID=''):
''' '''
Add a command to the daemon queue, used by the communication daemon (communicator.py) Add a command to the daemon queue, used by the communication daemon (communicator.py)
''' '''
@ -59,7 +59,7 @@ def daemon_queue_add(core_inst, command, data='', responseID=''):
retData = True retData = True
date = epoch.get_epoch() date = epoch.get_epoch()
conn = sqlite3.connect(core_inst.queueDB, timeout=30) conn = sqlite3.connect(dbfiles.daemon_queue_db, timeout=30)
c = conn.cursor() c = conn.cursor()
t = (command, data, date, responseID) t = (command, data, date, responseID)
try: try:
@ -67,24 +67,23 @@ def daemon_queue_add(core_inst, command, data='', responseID=''):
conn.commit() conn.commit()
except sqlite3.OperationalError: except sqlite3.OperationalError:
retData = False retData = False
core_inst.daemonQueue() daemon_queue()
events.event('queue_push', data = {'command': command, 'data': data}, onionr = core_inst.onionrInst)
conn.close() conn.close()
return retData return retData
def daemon_queue_get_response(core_inst, responseID=''): def daemon_queue_get_response(responseID=''):
''' '''
Get a response sent by communicator to the API, by requesting to the API Get a response sent by communicator to the API, by requesting to the API
''' '''
assert len(responseID) > 0 if len(responseID) > 0: raise ValueError('ResponseID should not be empty')
resp = localcommand.local_command(core_inst, 'queueResponse/' + responseID) resp = localcommand.local_command(dbfiles.daemon_queue_db, 'queueResponse/' + responseID)
return resp return resp
def clear_daemon_queue(core_inst): def clear_daemon_queue():
''' '''
Clear the daemon queue (somewhat dangerous) Clear the daemon queue (somewhat dangerous)
''' '''
conn = sqlite3.connect(core_inst.queueDB, timeout=30) conn = sqlite3.connect(dbfiles.daemon_queue_db, timeout=30)
c = conn.cursor() c = conn.cursor()
try: try:
@ -94,4 +93,3 @@ def clear_daemon_queue(core_inst):
pass pass
conn.close() conn.close()
events.event('queue_clear', onionr = core_inst.onionrInst)

View File

@ -2,4 +2,9 @@ from utils import identifyhome
home = identifyhome.identify_home() home = identifyhome.identify_home()
if not home.endswith('/'): home += '/' if not home.endswith('/'): home += '/'
block_meta_db = '%sblock-metadata.db' block_meta_db = '%sblock-metadata.db' % (home,)
block_data_db = '%sblocks/block-data.db' % (home,)
daemon_queue_db = '%sdaemon-queue.db' % (home,)
address_info_db = '%saddress.db' % (home,)
user_id_info_db = '%susers.db' % (home,)
forward_keys_db = '%sforward-keys.db' % (home,)

View File

@ -20,7 +20,7 @@
import sqlite3 import sqlite3
import onionrevents as events import onionrevents as events
from onionrutils import stringvalidators from onionrutils import stringvalidators
from . import listkeys
def add_peer(core_inst, peerID, name=''): def add_peer(core_inst, peerID, name=''):
''' '''
Adds a public key to the key database (misleading function name) Adds a public key to the key database (misleading function name)

View File

@ -20,14 +20,15 @@
import sqlite3 import sqlite3
import logger import logger
from onionrutils import epoch from onionrutils import epoch
def list_peers(core_inst, randomOrder=True, getPow=False, trust=0): from .. import dbfiles
def list_peers(randomOrder=True, getPow=False, trust=0):
''' '''
Return a list of public keys (misleading function name) Return a list of public keys (misleading function name)
randomOrder determines if the list should be in a random order randomOrder determines if the list should be in a random order
trust sets the minimum trust to list trust sets the minimum trust to list
''' '''
conn = sqlite3.connect(core_inst.peerDB, timeout=30) conn = sqlite3.connect(dbfiles.user_id_info_db, timeout=30)
c = conn.cursor() c = conn.cursor()
payload = '' payload = ''

View File

@ -17,12 +17,10 @@
You should have received a copy of the GNU General Public License You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>. along with this program. If not, see <https://www.gnu.org/licenses/>.
''' '''
from coredb import dbfiles
import sqlite3, os import sqlite3, os
class DBCreator:
def __init__(self, coreInst):
self.core = coreInst
def createAddressDB(self): def createAddressDB():
''' '''
Generate the address database Generate the address database
@ -31,7 +29,7 @@ class DBCreator:
2: Tor v2 (like facebookcorewwwi.onion) 2: Tor v2 (like facebookcorewwwi.onion)
3: Tor v3 3: Tor v3
''' '''
conn = sqlite3.connect(self.core.addressDB) conn = sqlite3.connect(dbfiles.address_info_db)
c = conn.cursor() c = conn.cursor()
c.execute('''CREATE TABLE adders( c.execute('''CREATE TABLE adders(
address text, address text,
@ -50,12 +48,12 @@ class DBCreator:
conn.commit() conn.commit()
conn.close() conn.close()
def createPeerDB(self): def createPeerDB():
''' '''
Generate the peer sqlite3 database and populate it with the peers table. Generate the peer sqlite3 database and populate it with the peers table.
''' '''
# generate the peer database # generate the peer database
conn = sqlite3.connect(self.core.peerDB) conn = sqlite3.connect(dbfiles.user_id_info_db)
c = conn.cursor() c = conn.cursor()
c.execute('''CREATE TABLE peers( c.execute('''CREATE TABLE peers(
ID text not null, ID text not null,
@ -75,7 +73,7 @@ class DBCreator:
conn.close() conn.close()
return return
def createBlockDB(self): def createBlockDB():
''' '''
Create a database for blocks Create a database for blocks
@ -90,9 +88,9 @@ class DBCreator:
dateClaimed - timestamp claimed inside the block, only as trustworthy as the block author is dateClaimed - timestamp claimed inside the block, only as trustworthy as the block author is
expire int - block expire date in epoch expire int - block expire date in epoch
''' '''
if os.path.exists(self.core.blockDB): if os.path.exists(dbfiles.block_meta_db):
raise FileExistsError("Block database already exists") raise FileExistsError("Block database already exists")
conn = sqlite3.connect(self.core.blockDB) conn = sqlite3.connect(dbfiles.block_meta_db)
c = conn.cursor() c = conn.cursor()
c.execute('''CREATE TABLE hashes( c.execute('''CREATE TABLE hashes(
hash text not null, hash text not null,
@ -111,10 +109,10 @@ class DBCreator:
conn.close() conn.close()
return return
def createBlockDataDB(self): def createBlockDataDB():
if os.path.exists(self.core.blockDataDB): if os.path.exists(dbfiles.block_data_db):
raise FileExistsError("Block data database already exists") raise FileExistsError("Block data database already exists")
conn = sqlite3.connect(self.core.blockDataDB) conn = sqlite3.connect(dbfiles.block_data_db)
c = conn.cursor() c = conn.cursor()
c.execute('''CREATE TABLE blockData( c.execute('''CREATE TABLE blockData(
hash text not null, hash text not null,
@ -124,13 +122,13 @@ class DBCreator:
conn.commit() conn.commit()
conn.close() conn.close()
def createForwardKeyDB(self): def createForwardKeyDB():
''' '''
Create the forward secrecy key db (*for *OUR* keys*) Create the forward secrecy key db (*for *OUR* keys*)
''' '''
if os.path.exists(self.core.forwardKeysFile): if os.path.exists(dbfiles.forward_keys_db):
raise FileExistsError("Block database already exists") raise FileExistsError("Block database already exists")
conn = sqlite3.connect(self.core.forwardKeysFile) conn = sqlite3.connect(dbfiles.forward_keys_db)
c = conn.cursor() c = conn.cursor()
c.execute('''CREATE TABLE myForwardKeys( c.execute('''CREATE TABLE myForwardKeys(
peer text not null, peer text not null,
@ -144,11 +142,11 @@ class DBCreator:
conn.close() conn.close()
return return
def createDaemonDB(self): def createDaemonDB():
''' '''
Create the daemon queue database Create the daemon queue database
''' '''
conn = sqlite3.connect(self.core.queueDB, timeout=10) conn = sqlite3.connect(dbfiles.daemon_queue_db, timeout=10)
c = conn.cursor() c = conn.cursor()
# Create table # Create table
c.execute('''CREATE TABLE commands (id integer primary key autoincrement, command text, data text, date text, responseID text)''') c.execute('''CREATE TABLE commands (id integer primary key autoincrement, command text, data text, date text, responseID text)''')

View File

@ -20,7 +20,7 @@
from flask import Blueprint, Response from flask import Blueprint, Response
import core, onionrblockapi, onionrexceptions import core, onionrblockapi, onionrexceptions
from onionrutils import stringvalidators from onionrutils import stringvalidators
from coredb import daemonqueue
shutdown_bp = Blueprint('shutdown', __name__) shutdown_bp = Blueprint('shutdown', __name__)
def shutdown(client_api_inst): def shutdown(client_api_inst):
@ -34,5 +34,5 @@ def shutdown(client_api_inst):
@shutdown_bp.route('/shutdownclean') @shutdown_bp.route('/shutdownclean')
def shutdown_clean(): def shutdown_clean():
# good for calling from other clients # good for calling from other clients
core.Core().daemonQueueAdd('shutdown') daemonqueue.daemon_queue_add('shutdown')
return Response("bye") return Response("bye")

View File

@ -300,11 +300,6 @@ class Onionr:
if verbosity >= 2: if verbosity >= 2:
function('Running on %s %s' % (platform.platform(), platform.release()), terminal=True) function('Running on %s %s' % (platform.platform(), platform.release()), terminal=True)
def doPEX(self):
'''make communicator do pex'''
logger.info('Sending pex to command queue...')
self.onionrCore.daemonQueueAdd('pex')
def listKeys(self): def listKeys(self):
''' '''
Displays a list of keys (used to be called peers) (?) Displays a list of keys (used to be called peers) (?)

View File

@ -21,7 +21,7 @@
import webbrowser, sys import webbrowser, sys
import logger import logger
from . import pubkeymanager, onionrstatistics, daemonlaunch, filecommands, plugincommands, keyadders from . import pubkeymanager, onionrstatistics, daemonlaunch, filecommands, plugincommands, keyadders
from . import banblocks, exportblocks, openwebinterface, resettor from . import banblocks, exportblocks, openwebinterface, resettor, dopex
from onionrutils import importnewblocks from onionrutils import importnewblocks
def show_help(o_inst, command): def show_help(o_inst, command):
@ -115,7 +115,7 @@ def get_commands(onionr_inst):
'importblocks': importnewblocks.import_new_blocks, 'importblocks': importnewblocks.import_new_blocks,
'introduce': onionr_inst.onionrCore.introduceNode, 'introduce': onionr_inst.onionrCore.introduceNode,
'pex': onionr_inst.doPEX, 'pex': dopex.do_PEX,
'getpassword': onionr_inst.printWebPassword, 'getpassword': onionr_inst.printWebPassword,
'get-password': onionr_inst.printWebPassword, 'get-password': onionr_inst.printWebPassword,

View File

@ -24,6 +24,7 @@ import onionr, apiservers, logger, communicator
import onionrevents as events import onionrevents as events
from netcontroller import NetController from netcontroller import NetController
from onionrutils import localcommand from onionrutils import localcommand
from coredb import daemonqueue
def _proper_shutdown(o_inst): def _proper_shutdown(o_inst):
localcommand.local_command(o_inst.onionrCore, 'shutdown') localcommand.local_command(o_inst.onionrCore, 'shutdown')
@ -103,7 +104,7 @@ def daemon(o_inst):
break # Break out if sigterm for clean exit break # Break out if sigterm for clean exit
signal.signal(signal.SIGINT, _ignore_sigint) signal.signal(signal.SIGINT, _ignore_sigint)
o_inst.onionrCore.daemonQueueAdd('shutdown') daemonqueue.daemon_queue_add('shutdown')
localcommand.local_command(o_inst.onionrCore, 'shutdown') localcommand.local_command(o_inst.onionrCore, 'shutdown')
net.killTor() net.killTor()
@ -124,7 +125,7 @@ def kill_daemon(o_inst):
events.event('daemon_stop', onionr = o_inst) events.event('daemon_stop', onionr = o_inst)
net = NetController(o_inst.onionrCore.config.get('client.port', 59496)) net = NetController(o_inst.onionrCore.config.get('client.port', 59496))
try: try:
o_inst.onionrCore.daemonQueueAdd('shutdown') daemonqueue.daemon_queue_qdd('shutdown')
except sqlite3.OperationalError: except sqlite3.OperationalError:
pass pass

View File

@ -23,7 +23,7 @@ from onionrblockapi import Block
import onionr import onionr
from onionrutils import checkcommunicator, mnemonickeys from onionrutils import checkcommunicator, mnemonickeys
from utils import sizeutils from utils import sizeutils
from coredb import blockmetadb from coredb import blockmetadb, daemonqueue
def show_stats(o_inst): def show_stats(o_inst):
try: try:
# define stats messages here # define stats messages here
@ -96,11 +96,11 @@ def show_details(o_inst):
def show_peers(o_inst): def show_peers(o_inst):
randID = str(uuid.uuid4()) randID = str(uuid.uuid4())
o_inst.onionrCore.daemonQueueAdd('connectedPeers', responseID=randID) daemonqueue.daemon_queue_add('connectedPeers', responseID=randID)
while True: while True:
try: try:
time.sleep(3) time.sleep(3)
peers = o_inst.onionrCore.daemonQueueGetResponse(randID) peers = daemonqueue.daemon_queue_get_response(randID)
except KeyboardInterrupt: except KeyboardInterrupt:
break break
if not type(peers) is None: if not type(peers) is None:

View File

@ -20,7 +20,7 @@
import onionrplugins, core as onionrcore, logger import onionrplugins, core as onionrcore, logger
from onionrutils import localcommand from onionrutils import localcommand
from coredb import daemonqueue
class DaemonAPI: class DaemonAPI:
def __init__(self, pluginapi): def __init__(self, pluginapi):
self.pluginapi = pluginapi self.pluginapi = pluginapi
@ -36,15 +36,13 @@ class DaemonAPI:
return return
def queue(self, command, data = ''): def queue(self, command, data = ''):
self.pluginapi.get_core().daemonQueueAdd(command, data) return daemonqueue.daemon_queue_add(command, data)
return
def local_command(self, command): def local_command(self, command):
return localcommand.local_command(self.pluginapi.get_core(), command) return localcommand.local_command(self.pluginapi.get_core(), command)
def queue_pop(self): def queue_pop(self):
return self.get_core().daemonQueue() return daemonqueue.daemon_queue()
class PluginAPI: class PluginAPI:
def __init__(self, pluginapi): def __init__(self, pluginapi):

View File

@ -21,6 +21,7 @@ import logger, onionrexceptions, json, sqlite3, time
from onionrutils import stringvalidators, bytesconverter, epoch from onionrutils import stringvalidators, bytesconverter, epoch
import unpaddedbase32 import unpaddedbase32
import nacl.exceptions import nacl.exceptions
from coredb import keydb
def deleteExpiredKeys(coreInst): def deleteExpiredKeys(coreInst):
# Fetch the keys we generated for the peer, that are still around # Fetch the keys we generated for the peer, that are still around