Merge branch 'refactoring2' of gitlab.com:beardog/Onionr into refactoring2
This commit is contained in:
commit
8727dc2241
@ -19,7 +19,7 @@
|
|||||||
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
'''
|
'''
|
||||||
import sys, os, core, config, json, onionrblockapi as block, requests, time, logger, threading, onionrplugins as plugins, base64
|
import sys, os, core, config, json, onionrblockapi as block, requests, time, logger, threading, onionrplugins as plugins, base64, onionr
|
||||||
import onionrexceptions
|
import onionrexceptions
|
||||||
from defusedxml import minidom
|
from defusedxml import minidom
|
||||||
|
|
||||||
@ -50,13 +50,13 @@ class OnionrCommunicatorDaemon:
|
|||||||
|
|
||||||
# amount of threads running by name, used to prevent too many
|
# amount of threads running by name, used to prevent too many
|
||||||
self.threadCounts = {}
|
self.threadCounts = {}
|
||||||
|
|
||||||
# set true when shutdown command recieved
|
# set true when shutdown command recieved
|
||||||
self.shutdown = False
|
self.shutdown = False
|
||||||
|
|
||||||
# list of new blocks to download, added to when new block lists are fetched from peers
|
# list of new blocks to download, added to when new block lists are fetched from peers
|
||||||
self.blockQueue = []
|
self.blockQueue = []
|
||||||
|
|
||||||
# Clear the daemon queue for any dead messages
|
# Clear the daemon queue for any dead messages
|
||||||
if os.path.exists(self._core.queueDB):
|
if os.path.exists(self._core.queueDB):
|
||||||
self._core.clearDaemonQueue()
|
self._core.clearDaemonQueue()
|
||||||
@ -64,18 +64,17 @@ class OnionrCommunicatorDaemon:
|
|||||||
# Loads in and starts the enabled plugins
|
# Loads in and starts the enabled plugins
|
||||||
plugins.reload()
|
plugins.reload()
|
||||||
|
|
||||||
|
if debug or developmentMode:
|
||||||
|
OnionrCommunicatorTimers(self, self.heartbeat, 10)
|
||||||
|
|
||||||
|
# Initalize peer online list
|
||||||
|
logger.debug('Onionr is not yet ready to recieve commands.')
|
||||||
|
self.getOnlinePeers()
|
||||||
|
|
||||||
# Print nice header thing :)
|
# Print nice header thing :)
|
||||||
if config.get('general.display_header', True):
|
if config.get('general.display_header', True):
|
||||||
self.header()
|
self.header()
|
||||||
|
|
||||||
if debug or developmentMode:
|
|
||||||
OnionrCommunicatorTimers(self, self.heartbeat, 10)
|
|
||||||
|
|
||||||
# Initalize peer online list
|
|
||||||
logger.warn('Onionr is not yet ready to recieve commands.')
|
|
||||||
self.getOnlinePeers()
|
|
||||||
logger.info('\033[4mOnionr is ready\033[0m.')
|
|
||||||
|
|
||||||
# Set timers, function reference, seconds
|
# Set timers, function reference, seconds
|
||||||
OnionrCommunicatorTimers(self, self.daemonCommands, 5)
|
OnionrCommunicatorTimers(self, self.daemonCommands, 5)
|
||||||
OnionrCommunicatorTimers(self, self.detectAPICrash, 5)
|
OnionrCommunicatorTimers(self, self.detectAPICrash, 5)
|
||||||
@ -93,22 +92,23 @@ class OnionrCommunicatorDaemon:
|
|||||||
break
|
break
|
||||||
i.processTimer()
|
i.processTimer()
|
||||||
time.sleep(self.delay)
|
time.sleep(self.delay)
|
||||||
|
|
||||||
logger.info('Goodbye.')
|
logger.info('Goodbye.')
|
||||||
self._core._utils.localCommand('shutdown')
|
self._core._utils.localCommand('shutdown')
|
||||||
|
|
||||||
def lookupKeys(self):
|
def lookupKeys(self):
|
||||||
'''Lookup new keys'''
|
'''Lookup new keys'''
|
||||||
logger.info('LOOKING UP NEW KEYS')
|
logger.debug('Looking up new keys...')
|
||||||
tryAmount = 1
|
tryAmount = 1
|
||||||
for i in range(tryAmount):
|
for i in range(tryAmount):
|
||||||
# Download new key list from random online peers
|
# Download new key list from random online peers
|
||||||
peer = self.pickOnlinePeer()
|
peer = self.pickOnlinePeer()
|
||||||
newKeys = self.peerAction(peer, action='kex')
|
newKeys = self.peerAction(peer, action='kex')
|
||||||
self._core._utils.mergeKeys(newKeys)
|
self._core._utils.mergeKeys(newKeys)
|
||||||
|
|
||||||
self.decrementThreadCount('lookupKeys')
|
self.decrementThreadCount('lookupKeys')
|
||||||
return
|
return
|
||||||
|
|
||||||
def lookupAdders(self):
|
def lookupAdders(self):
|
||||||
'''Lookup new peer addresses'''
|
'''Lookup new peer addresses'''
|
||||||
logger.info('LOOKING UP NEW ADDRESSES')
|
logger.info('LOOKING UP NEW ADDRESSES')
|
||||||
@ -118,7 +118,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
peer = self.pickOnlinePeer()
|
peer = self.pickOnlinePeer()
|
||||||
newAdders = self.peerAction(peer, action='pex')
|
newAdders = self.peerAction(peer, action='pex')
|
||||||
self._core._utils.mergeAdders(newAdders)
|
self._core._utils.mergeAdders(newAdders)
|
||||||
|
|
||||||
self.decrementThreadCount('lookupKeys')
|
self.decrementThreadCount('lookupKeys')
|
||||||
|
|
||||||
def lookupBlocks(self):
|
def lookupBlocks(self):
|
||||||
@ -149,7 +149,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
def getBlocks(self):
|
def getBlocks(self):
|
||||||
'''download new blocks in queue'''
|
'''download new blocks in queue'''
|
||||||
for blockHash in self.blockQueue:
|
for blockHash in self.blockQueue:
|
||||||
logger.info("ATTEMPTING TO DOWNLOAD " + blockHash)
|
logger.info("Attempting to download %s..." % blockHash)
|
||||||
content = self.peerAction(self.pickOnlinePeer(), 'getData', data=blockHash) # block content from random peer (includes metadata)
|
content = self.peerAction(self.pickOnlinePeer(), 'getData', data=blockHash) # block content from random peer (includes metadata)
|
||||||
if content != False:
|
if content != False:
|
||||||
try:
|
try:
|
||||||
@ -201,7 +201,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
self.threadCounts[threadName] -= 1
|
self.threadCounts[threadName] -= 1
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def clearOfflinePeer(self):
|
def clearOfflinePeer(self):
|
||||||
'''Removes the longest offline peer to retry later'''
|
'''Removes the longest offline peer to retry later'''
|
||||||
try:
|
try:
|
||||||
@ -209,7 +209,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
logger.debug('removed ' + removed + ' from offline list to try them again.')
|
logger.debug('Removed ' + removed + ' from offline list, will try them again.')
|
||||||
self.decrementThreadCount('clearOfflinePeer')
|
self.decrementThreadCount('clearOfflinePeer')
|
||||||
|
|
||||||
def getOnlinePeers(self):
|
def getOnlinePeers(self):
|
||||||
@ -262,7 +262,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
tried.append(address)
|
tried.append(address)
|
||||||
logger.debug('Failed to connect to ' + address)
|
logger.debug('Failed to connect to ' + address)
|
||||||
return retData
|
return retData
|
||||||
|
|
||||||
def printOnlinePeers(self):
|
def printOnlinePeers(self):
|
||||||
'''logs online peer list'''
|
'''logs online peer list'''
|
||||||
if len(self.onlinePeers) == 0:
|
if len(self.onlinePeers) == 0:
|
||||||
@ -349,7 +349,7 @@ class OnionrCommunicatorDaemon:
|
|||||||
if os.path.exists('static-data/header.txt'):
|
if os.path.exists('static-data/header.txt'):
|
||||||
with open('static-data/header.txt', 'rb') as file:
|
with open('static-data/header.txt', 'rb') as file:
|
||||||
# only to stdout, not file or log or anything
|
# only to stdout, not file or log or anything
|
||||||
print(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n'))
|
sys.stderr.write(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n').replace('B', logger.colors.bold).replace('V', onionr.ONIONR_VERSION))
|
||||||
logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n')
|
logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n')
|
||||||
|
|
||||||
class OnionrCommunicatorTimers:
|
class OnionrCommunicatorTimers:
|
||||||
|
@ -390,7 +390,7 @@ class Core:
|
|||||||
events.event('queue_pop', data = {'data': retData}, onionr = None)
|
events.event('queue_pop', data = {'data': retData}, onionr = None)
|
||||||
|
|
||||||
return retData
|
return retData
|
||||||
|
|
||||||
def makeDaemonDB(self):
|
def makeDaemonDB(self):
|
||||||
'''generate the daemon queue db'''
|
'''generate the daemon queue db'''
|
||||||
conn = sqlite3.connect(self.queueDB)
|
conn = sqlite3.connect(self.queueDB)
|
||||||
@ -669,16 +669,18 @@ class Core:
|
|||||||
conn.close()
|
conn.close()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def insertBlock(self, data, header='txt', sign=False, encryptType='', symKey='', asymPeer='', meta = {}):
|
def insertBlock(self, data, header='txt', sign=False, encryptType='', symKey='', asymPeer='', meta = None):
|
||||||
'''
|
'''
|
||||||
Inserts a block into the network
|
Inserts a block into the network
|
||||||
encryptType must be specified to encrypt a block
|
encryptType must be specified to encrypt a block
|
||||||
'''
|
'''
|
||||||
|
|
||||||
try:
|
if meta is None:
|
||||||
data.decode()
|
meta = dict()
|
||||||
except AttributeError:
|
|
||||||
data = data.encode()
|
if type(data) is bytes:
|
||||||
|
data = data.decode()
|
||||||
|
data = str(data)
|
||||||
|
|
||||||
retData = ''
|
retData = ''
|
||||||
signature = ''
|
signature = ''
|
||||||
@ -686,10 +688,9 @@ class Core:
|
|||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
# only use header if not set in provided meta
|
# only use header if not set in provided meta
|
||||||
try:
|
if not header is None:
|
||||||
meta['type']
|
meta['type'] = header
|
||||||
except KeyError:
|
meta['type'] = str(meta['type'])
|
||||||
meta['type'] = header # block type
|
|
||||||
|
|
||||||
jsonMeta = json.dumps(meta)
|
jsonMeta = json.dumps(meta)
|
||||||
|
|
||||||
@ -709,7 +710,7 @@ class Core:
|
|||||||
|
|
||||||
if len(jsonMeta) > 1000:
|
if len(jsonMeta) > 1000:
|
||||||
raise onionrexceptions.InvalidMetadata('meta in json encoded form must not exceed 1000 bytes')
|
raise onionrexceptions.InvalidMetadata('meta in json encoded form must not exceed 1000 bytes')
|
||||||
|
|
||||||
# encrypt block metadata/sig/content
|
# encrypt block metadata/sig/content
|
||||||
if encryptType == 'sym':
|
if encryptType == 'sym':
|
||||||
if len(symKey) < self.requirements.passwordLength:
|
if len(symKey) < self.requirements.passwordLength:
|
||||||
|
@ -123,18 +123,18 @@ def get_file():
|
|||||||
|
|
||||||
return _outputfile
|
return _outputfile
|
||||||
|
|
||||||
def raw(data):
|
def raw(data, fd = sys.stdout):
|
||||||
'''
|
'''
|
||||||
Outputs raw data to console without formatting
|
Outputs raw data to console without formatting
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if get_settings() & OUTPUT_TO_CONSOLE:
|
if get_settings() & OUTPUT_TO_CONSOLE:
|
||||||
print(data)
|
ts = fd.write('%s\n' % data)
|
||||||
if get_settings() & OUTPUT_TO_FILE:
|
if get_settings() & OUTPUT_TO_FILE:
|
||||||
with open(_outputfile, "a+") as f:
|
with open(_outputfile, "a+") as f:
|
||||||
f.write(colors.filter(data) + '\n')
|
f.write(colors.filter(data) + '\n')
|
||||||
|
|
||||||
def log(prefix, data, color = '', timestamp=True):
|
def log(prefix, data, color = '', timestamp=True, fd = sys.stdout):
|
||||||
'''
|
'''
|
||||||
Logs the data
|
Logs the data
|
||||||
prefix : The prefix to the output
|
prefix : The prefix to the output
|
||||||
@ -149,7 +149,7 @@ def log(prefix, data, color = '', timestamp=True):
|
|||||||
if not get_settings() & USE_ANSI:
|
if not get_settings() & USE_ANSI:
|
||||||
output = colors.filter(output)
|
output = colors.filter(output)
|
||||||
|
|
||||||
raw(output)
|
raw(output, fd = fd)
|
||||||
|
|
||||||
def readline(message = ''):
|
def readline(message = ''):
|
||||||
'''
|
'''
|
||||||
@ -218,14 +218,14 @@ def warn(data, timestamp=True):
|
|||||||
# error: when only one function, module, or process of the program encountered a problem and must stop
|
# error: when only one function, module, or process of the program encountered a problem and must stop
|
||||||
def error(data, error=None, timestamp=True):
|
def error(data, error=None, timestamp=True):
|
||||||
if get_level() <= LEVEL_ERROR:
|
if get_level() <= LEVEL_ERROR:
|
||||||
log('-', data, colors.fg.red, timestamp=timestamp)
|
log('-', data, colors.fg.red, timestamp=timestamp, fd = sys.stderr)
|
||||||
if not error is None:
|
if not error is None:
|
||||||
debug('Error: ' + str(error) + parse_error())
|
debug('Error: ' + str(error) + parse_error())
|
||||||
|
|
||||||
# fatal: when the something so bad has happened that the program must stop
|
# fatal: when the something so bad has happened that the program must stop
|
||||||
def fatal(data, timestamp=True):
|
def fatal(data, timestamp=True):
|
||||||
if get_level() <= LEVEL_FATAL:
|
if get_level() <= LEVEL_FATAL:
|
||||||
log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp=timestamp)
|
log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp=timestamp, fd = sys.stderr)
|
||||||
|
|
||||||
# returns a formatted error message
|
# returns a formatted error message
|
||||||
def parse_error():
|
def parse_error():
|
||||||
|
@ -263,7 +263,7 @@ class Onionr:
|
|||||||
|
|
||||||
def listConn(self):
|
def listConn(self):
|
||||||
self.onionrCore.daemonQueueAdd('connectedPeers')
|
self.onionrCore.daemonQueueAdd('connectedPeers')
|
||||||
|
|
||||||
def listPeers(self):
|
def listPeers(self):
|
||||||
logger.info('Peer transport address list:')
|
logger.info('Peer transport address list:')
|
||||||
for i in self.onionrCore.listAdders():
|
for i in self.onionrCore.listAdders():
|
||||||
@ -335,7 +335,7 @@ class Onionr:
|
|||||||
logger.info('Running on %s %s' % (platform.platform(), platform.release()))
|
logger.info('Running on %s %s' % (platform.platform(), platform.release()))
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def doKEX(self):
|
def doKEX(self):
|
||||||
'''make communicator do kex'''
|
'''make communicator do kex'''
|
||||||
logger.info('Sending kex to command queue...')
|
logger.info('Sending kex to command queue...')
|
||||||
@ -745,5 +745,5 @@ class Onionr:
|
|||||||
else:
|
else:
|
||||||
logger.error('%s add-file <filename>' % sys.argv[0], timestamp = False)
|
logger.error('%s add-file <filename>' % sys.argv[0], timestamp = False)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
Onionr()
|
Onionr()
|
||||||
|
@ -496,7 +496,6 @@ class Block:
|
|||||||
- child (str/Block): the child Block to be followed
|
- child (str/Block): the child Block to be followed
|
||||||
- file (str/file): the file to write the content to, instead of returning it
|
- file (str/file): the file to write the content to, instead of returning it
|
||||||
- maximumFollows (int): the maximum number of Blocks to follow
|
- maximumFollows (int): the maximum number of Blocks to follow
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# validate data and instantiate Core
|
# validate data and instantiate Core
|
||||||
|
@ -18,7 +18,7 @@ P ::: :::: ::::::: :::: :::: W:: :: :: ::: :: :: :: :: :::: :::::
|
|||||||
P ::: ::::: :::::: :::: :::: W:: :: :: ::: :: :: :: :: ::: :: :::
|
P ::: ::::: :::::: :::: :::: W:: :: :: ::: :: :: :: :: ::: :: :::
|
||||||
P :::: ::::: ::::: ::: W :::: :: :: :: ::::: :: :: :: ::
|
P :::: ::::: ::::: ::: W :::: :: :: :: ::::: :: :: :: ::
|
||||||
P :::: :::::: :::::: ::::
|
P :::: :::::: :::::: ::::
|
||||||
P :::: :::::::::::: ::::
|
P :::: :::::::::::: :::: GvPBV
|
||||||
P ::::: :::::::: ::::
|
P ::::: :::::::: ::::
|
||||||
P ::::: ::::::
|
P ::::: ::::::
|
||||||
P ::::::::::::::::
|
P ::::::::::::::::
|
||||||
|
Loading…
Reference in New Issue
Block a user