improved logging messages to be less spammy

This commit is contained in:
Kevin Froman 2019-06-19 15:29:27 -05:00
parent 8082570b7f
commit 065e97ab11
14 changed files with 129 additions and 129 deletions

View File

@ -85,7 +85,7 @@ The following applies to Ubuntu Bionic. Other distros may have different package
`$ sudo apt install python3-pip python3-dev tor` `$ sudo apt install python3-pip python3-dev tor`
* Have python3.6+, python3-pip, Tor (daemon, not browser) installed (python3-dev recommended) * Have python3.6+, python3-pip, Tor (daemon, not browser) installed. python3-dev is recommended.
* Clone the git repo: `$ git clone https://gitlab.com/beardog/onionr` * Clone the git repo: `$ git clone https://gitlab.com/beardog/onionr`
* cd into install direction: `$ cd onionr/` * cd into install direction: `$ cd onionr/`
* Install the Python dependencies ([virtualenv strongly recommended](https://virtualenv.pypa.io/en/stable/userguide/)): `$ pip3 install --require-hashes -r requirements.txt` * Install the Python dependencies ([virtualenv strongly recommended](https://virtualenv.pypa.io/en/stable/userguide/)): `$ pip3 install --require-hashes -r requirements.txt`

View File

@ -176,7 +176,7 @@ class OnionrCommunicatorDaemon:
self.shutdown = True self.shutdown = True
pass pass
logger.info('Goodbye. (Onionr is cleaning up, and will exit)') logger.info('Goodbye. (Onionr is cleaning up, and will exit)', terminal=True)
try: try:
self.service_greenlets self.service_greenlets
except AttributeError: except AttributeError:
@ -252,7 +252,7 @@ class OnionrCommunicatorDaemon:
break break
else: else:
if len(self.onlinePeers) == 0: if len(self.onlinePeers) == 0:
logger.debug('Couldn\'t connect to any peers.' + (' Last node seen %s ago.' % humanreadabletime.human_readable_time(time.time() - self.lastNodeSeen) if not self.lastNodeSeen is None else '')) logger.debug('Couldn\'t connect to any peers.' + (' Last node seen %s ago.' % humanreadabletime.human_readable_time(time.time() - self.lastNodeSeen) if not self.lastNodeSeen is None else ''), terminal=True)
else: else:
self.lastNodeSeen = time.time() self.lastNodeSeen = time.time()
self.decrementThreadCount('getOnlinePeers') self.decrementThreadCount('getOnlinePeers')
@ -293,12 +293,12 @@ class OnionrCommunicatorDaemon:
def printOnlinePeers(self): def printOnlinePeers(self):
'''logs online peer list''' '''logs online peer list'''
if len(self.onlinePeers) == 0: if len(self.onlinePeers) == 0:
logger.warn('No online peers') logger.warn('No online peers', terminal=True)
else: else:
logger.info('Online peers:') logger.info('Online peers:', terminal=True)
for i in self.onlinePeers: for i in self.onlinePeers:
score = str(self.getPeerProfileInstance(i).score) score = str(self.getPeerProfileInstance(i).score)
logger.info(i + ', score: ' + score) logger.info(i + ', score: ' + score, terminal=True)
def peerAction(self, peer, action, data='', returnHeaders=False): def peerAction(self, peer, action, data='', returnHeaders=False):
'''Perform a get request to a peer''' '''Perform a get request to a peer'''
@ -318,6 +318,7 @@ class OnionrCommunicatorDaemon:
self.getPeerProfileInstance(peer).addScore(-10) self.getPeerProfileInstance(peer).addScore(-10)
self.removeOnlinePeer(peer) self.removeOnlinePeer(peer)
if action != 'ping': if action != 'ping':
logger.warn('Lost connection to ' + peer, terminal=True)
self.getOnlinePeers() # Will only add a new peer to pool if needed self.getOnlinePeers() # Will only add a new peer to pool if needed
except ValueError: except ValueError:
pass pass
@ -359,7 +360,7 @@ class OnionrCommunicatorDaemon:
def announce(self, peer): def announce(self, peer):
'''Announce to peers our address''' '''Announce to peers our address'''
if announcenode.announce_node(self) == False: if announcenode.announce_node(self) == False:
logger.warn('Could not introduce node.') logger.warn('Could not introduce node.', terminal=True)
def detectAPICrash(self): def detectAPICrash(self):
'''exit if the api server crashes/stops''' '''exit if the api server crashes/stops'''
@ -371,7 +372,7 @@ class OnionrCommunicatorDaemon:
else: else:
# This executes if the api is NOT detected to be running # This executes if the api is NOT detected to be running
events.event('daemon_crash', onionr = self._core.onionrInst, data = {}) events.event('daemon_crash', onionr = self._core.onionrInst, data = {})
logger.error('Daemon detected API crash (or otherwise unable to reach API after long time), stopping...') logger.fatal('Daemon detected API crash (or otherwise unable to reach API after long time), stopping...', terminal=True)
self.shutdown = True self.shutdown = True
self.decrementThreadCount('detectAPICrash') self.decrementThreadCount('detectAPICrash')
@ -388,5 +389,4 @@ def run_file_exists(daemon):
if os.path.isfile(daemon._core.dataDir + '.runcheck'): if os.path.isfile(daemon._core.dataDir + '.runcheck'):
os.remove(daemon._core.dataDir + '.runcheck') os.remove(daemon._core.dataDir + '.runcheck')
return True return True
return False return False

View File

@ -72,7 +72,7 @@ def connect_new_peer_to_communicator(comm_inst, peer='', useBootstrap=False):
# Add a peer to our list if it isn't already since it successfully connected # Add a peer to our list if it isn't already since it successfully connected
networkmerger.mergeAdders(address, comm_inst._core) networkmerger.mergeAdders(address, comm_inst._core)
if address not in comm_inst.onlinePeers: if address not in comm_inst.onlinePeers:
logger.info('Connected to ' + address) logger.info('Connected to ' + address, terminal=True)
comm_inst.onlinePeers.append(address) comm_inst.onlinePeers.append(address)
comm_inst.connectTimes[address] = comm_inst._core._utils.getEpoch() comm_inst.connectTimes[address] = comm_inst._core._utils.getEpoch()
retData = address retData = address

View File

@ -110,6 +110,7 @@ def download_blocks_from_communicator(comm_inst):
if removeFromQueue: if removeFromQueue:
try: try:
del comm_inst.blockQueue[blockHash] # remove from block queue both if success or false del comm_inst.blockQueue[blockHash] # remove from block queue both if success or false
logger.info('%s blocks remaining in queue' % [len(comm_inst.blockQueue)])
except KeyError: except KeyError:
pass pass
comm_inst.currentDownloading.remove(blockHash) comm_inst.currentDownloading.remove(blockHash)

View File

@ -19,61 +19,65 @@
''' '''
import logger, onionrproofs import logger, onionrproofs
def lookup_blocks_from_communicator(comm_inst): def lookup_blocks_from_communicator(comm_inst):
logger.info('Looking up new blocks...') logger.info('Looking up new blocks...')
tryAmount = 2 tryAmount = 2
newBlocks = '' newBlocks = ''
existingBlocks = comm_inst._core.getBlockList() existingBlocks = comm_inst._core.getBlockList()
triedPeers = [] # list of peers we've tried this time around triedPeers = [] # list of peers we've tried this time around
maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion
lastLookupTime = 0 # Last time we looked up a particular peer's list lastLookupTime = 0 # Last time we looked up a particular peer's list
for i in range(tryAmount): new_block_count = 0
listLookupCommand = 'getblocklist' # This is defined here to reset it each time for i in range(tryAmount):
if len(comm_inst.blockQueue) >= maxBacklog: listLookupCommand = 'getblocklist' # This is defined here to reset it each time
if len(comm_inst.blockQueue) >= maxBacklog:
break
if not comm_inst.isOnline:
break
# check if disk allocation is used
if comm_inst._core._utils.storageCounter.isFull():
logger.debug('Not looking up new blocks due to maximum amount of allowed disk space used')
break
peer = comm_inst.pickOnlinePeer() # select random online peer
# if we've already tried all the online peers this time around, stop
if peer in triedPeers:
if len(comm_inst.onlinePeers) == len(triedPeers):
break break
if not comm_inst.isOnline: else:
break continue
# check if disk allocation is used triedPeers.append(peer)
if comm_inst._core._utils.storageCounter.isFull():
logger.debug('Not looking up new blocks due to maximum amount of allowed disk space used')
break
peer = comm_inst.pickOnlinePeer() # select random online peer
# if we've already tried all the online peers this time around, stop
if peer in triedPeers:
if len(comm_inst.onlinePeers) == len(triedPeers):
break
else:
continue
triedPeers.append(peer)
# Get the last time we looked up a peer's stamp to only fetch blocks since then. # Get the last time we looked up a peer's stamp to only fetch blocks since then.
# Saved in memory only for privacy reasons # Saved in memory only for privacy reasons
try: try:
lastLookupTime = comm_inst.dbTimestamps[peer] lastLookupTime = comm_inst.dbTimestamps[peer]
except KeyError: except KeyError:
lastLookupTime = 0 lastLookupTime = 0
else: else:
listLookupCommand += '?date=%s' % (lastLookupTime,) listLookupCommand += '?date=%s' % (lastLookupTime,)
try: try:
newBlocks = comm_inst.peerAction(peer, listLookupCommand) # get list of new block hashes newBlocks = comm_inst.peerAction(peer, listLookupCommand) # get list of new block hashes
except Exception as error: except Exception as error:
logger.warn('Could not get new blocks from %s.' % peer, error = error) logger.warn('Could not get new blocks from %s.' % peer, error = error)
newBlocks = False newBlocks = False
else: else:
comm_inst.dbTimestamps[peer] = comm_inst._core._utils.getRoundedEpoch(roundS=60) comm_inst.dbTimestamps[peer] = comm_inst._core._utils.getRoundedEpoch(roundS=60)
if newBlocks != False: if newBlocks != False:
# if request was a success # if request was a success
for i in newBlocks.split('\n'): for i in newBlocks.split('\n'):
if comm_inst._core._utils.validateHash(i): if comm_inst._core._utils.validateHash(i):
# if newline seperated string is valid hash # if newline seperated string is valid hash
if not i in existingBlocks: if not i in existingBlocks:
# if block does not exist on disk and is not already in block queue # if block does not exist on disk and is not already in block queue
if i not in comm_inst.blockQueue: if i not in comm_inst.blockQueue:
if onionrproofs.hashMeetsDifficulty(i) and not comm_inst._core._blacklist.inBlacklist(i): if onionrproofs.hashMeetsDifficulty(i) and not comm_inst._core._blacklist.inBlacklist(i):
if len(comm_inst.blockQueue) <= 1000000: if len(comm_inst.blockQueue) <= 1000000:
comm_inst.blockQueue[i] = [peer] # add blocks to download queue comm_inst.blockQueue[i] = [peer] # add blocks to download queue
else: new_block_count += 1
if peer not in comm_inst.blockQueue[i]: else:
if len(comm_inst.blockQueue[i]) < 10: if peer not in comm_inst.blockQueue[i]:
comm_inst.blockQueue[i].append(peer) if len(comm_inst.blockQueue[i]) < 10:
comm_inst.decrementThreadCount('lookupBlocks') comm_inst.blockQueue[i].append(peer)
return if new_block_count > 0:
logger.info('Discovered %s new blocks' % (new_block_count,), terminal=True)
comm_inst.decrementThreadCount('lookupBlocks')
return

View File

@ -126,24 +126,24 @@ def get_file():
return _outputfile return _outputfile
def raw(data, fd = sys.stdout, sensitive = False): def raw(data, fd = sys.stdout, terminal = False):
''' '''
Outputs raw data to console without formatting Outputs raw data to console without formatting
''' '''
if get_settings() & OUTPUT_TO_CONSOLE: if terminal and (get_settings() & OUTPUT_TO_CONSOLE):
try: try:
ts = fd.write('%s\n' % data) ts = fd.write('%s\n' % data)
except OSError: except OSError:
pass pass
if get_settings() & OUTPUT_TO_FILE and not sensitive: if get_settings() & OUTPUT_TO_FILE:
try: try:
with open(_outputfile, "a+") as f: with open(_outputfile, "a+") as f:
f.write(colors.filter(data) + '\n') f.write(colors.filter(data) + '\n')
except OSError: except OSError:
pass pass
def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True, sensitive = False): def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True, terminal = False):
''' '''
Logs the data Logs the data
prefix : The prefix to the output prefix : The prefix to the output
@ -158,7 +158,7 @@ def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True
if not get_settings() & USE_ANSI: if not get_settings() & USE_ANSI:
output = colors.filter(output) output = colors.filter(output)
raw(output, fd = fd, sensitive = sensitive) raw(output, fd = fd, terminal = terminal)
def readline(message = ''): def readline(message = ''):
''' '''
@ -210,37 +210,37 @@ def confirm(default = 'y', message = 'Are you sure %s? '):
return default == 'y' return default == 'y'
# debug: when there is info that could be useful for debugging purposes only # debug: when there is info that could be useful for debugging purposes only
def debug(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_DEBUG): def debug(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_DEBUG):
if get_level() <= level: if get_level() <= level:
log('/', data, timestamp = timestamp, prompt = prompt, sensitive = sensitive) log('/', data, timestamp = timestamp, prompt = prompt, terminal = terminal)
if not error is None: if not error is None:
debug('Error: ' + str(error) + parse_error()) debug('Error: ' + str(error) + parse_error())
# info: when there is something to notify the user of, such as the success of a process # info: when there is something to notify the user of, such as the success of a process
def info(data, timestamp = False, prompt = True, sensitive = False, level = LEVEL_INFO): def info(data, timestamp = False, prompt = True, terminal = False, level = LEVEL_INFO):
if get_level() <= level: if get_level() <= level:
log('+', data, colors.fg.green, timestamp = timestamp, prompt = prompt, sensitive = sensitive) log('+', data, colors.fg.green, timestamp = timestamp, prompt = prompt, terminal = terminal)
# warn: when there is a potential for something bad to happen # warn: when there is a potential for something bad to happen
def warn(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_WARN): def warn(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_WARN):
if not error is None: if not error is None:
debug('Error: ' + str(error) + parse_error()) debug('Error: ' + str(error) + parse_error())
if get_level() <= level: if get_level() <= level:
log('!', data, colors.fg.orange, timestamp = timestamp, prompt = prompt, sensitive = sensitive) log('!', data, colors.fg.orange, timestamp = timestamp, prompt = prompt, terminal = terminal)
# error: when only one function, module, or process of the program encountered a problem and must stop # error: when only one function, module, or process of the program encountered a problem and must stop
def error(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_ERROR): def error(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_ERROR):
if get_level() <= level: if get_level() <= level:
log('-', data, colors.fg.red, timestamp = timestamp, fd = sys.stderr, prompt = prompt, sensitive = sensitive) log('-', data, colors.fg.red, timestamp = timestamp, fd = sys.stderr, prompt = prompt, terminal = terminal)
if not error is None: if not error is None:
debug('Error: ' + str(error) + parse_error()) debug('Error: ' + str(error) + parse_error())
# fatal: when the something so bad has happened that the program must stop # fatal: when the something so bad has happened that the program must stop
def fatal(data, error = None, timestamp=True, prompt = True, sensitive = False, level = LEVEL_FATAL): def fatal(data, error = None, timestamp=True, prompt = True, terminal = False, level = LEVEL_FATAL):
if not error is None: if not error is None:
debug('Error: ' + str(error) + parse_error(), sensitive = sensitive) debug('Error: ' + str(error) + parse_error(), terminal = terminal)
if get_level() <= level: if get_level() <= level:
log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp = timestamp, fd = sys.stderr, prompt = prompt, sensitive = sensitive) log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp = timestamp, fd = sys.stderr, prompt = prompt, terminal = terminal)
# returns a formatted error message # returns a formatted error message
def parse_error(): def parse_error():

View File

@ -124,14 +124,14 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort)
try: try:
tor = subprocess.Popen([self.torBinary, '-f', self.torConfigLocation], stdout=subprocess.PIPE, stderr=subprocess.PIPE) tor = subprocess.Popen([self.torBinary, '-f', self.torConfigLocation], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except FileNotFoundError: except FileNotFoundError:
logger.fatal("Tor was not found in your path or the Onionr directory. Please install Tor and try again.") logger.fatal("Tor was not found in your path or the Onionr directory. Please install Tor and try again.", terminal=True)
sys.exit(1) sys.exit(1)
else: else:
# Test Tor Version # Test Tor Version
torVersion = subprocess.Popen([self.torBinary, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) torVersion = subprocess.Popen([self.torBinary, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in iter(torVersion.stdout.readline, b''): for line in iter(torVersion.stdout.readline, b''):
if 'Tor 0.2.' in line.decode(): if 'Tor 0.2.' in line.decode():
logger.error('Tor 0.3+ required') logger.fatal('Tor 0.3+ required', terminal=True)
sys.exit(1) sys.exit(1)
break break
torVersion.kill() torVersion.kill()
@ -140,17 +140,18 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort)
try: try:
for line in iter(tor.stdout.readline, b''): for line in iter(tor.stdout.readline, b''):
if 'bootstrapped 100' in line.decode().lower(): if 'bootstrapped 100' in line.decode().lower():
logger.info(line.decode())
break break
elif 'opening socks listener' in line.decode().lower(): elif 'opening socks listener' in line.decode().lower():
logger.debug(line.decode().replace('\n', '')) logger.debug(line.decode().replace('\n', ''))
else: else:
logger.fatal('Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open') logger.fatal('Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open', terminal=True)
return False return False
except KeyboardInterrupt: except KeyboardInterrupt:
logger.fatal('Got keyboard interrupt. Onionr will exit soon.', timestamp = False, level = logger.LEVEL_IMPORTANT) logger.fatal('Got keyboard interrupt. Onionr will exit soon.', timestamp = False, level = logger.LEVEL_IMPORTANT, terminal=True)
return False return False
logger.debug('Finished starting Tor.', timestamp=True) logger.info('Finished starting Tor.', terminal=True)
self.readyState = True self.readyState = True
try: try:

View File

@ -159,7 +159,7 @@ class Onionr:
sys.stderr.write(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n').replace('B', logger.colors.bold).replace('A', '%s' % API_VERSION).replace('V', ONIONR_VERSION)) sys.stderr.write(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n').replace('B', logger.colors.bold).replace('A', '%s' % API_VERSION).replace('V', ONIONR_VERSION))
if not message is None: if not message is None:
logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n', sensitive=True) logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n', terminal=True)
def deleteRunFiles(self): def deleteRunFiles(self):
try: try:
@ -238,7 +238,7 @@ class Onionr:
return config.get('client.webpassword') return config.get('client.webpassword')
def printWebPassword(self): def printWebPassword(self):
logger.info(self.getWebPassword(), sensitive = True) logger.info(self.getWebPassword(), term_only = True)
def getHelp(self): def getHelp(self):
return self.cmdhelp return self.cmdhelp
@ -289,11 +289,11 @@ class Onionr:
Displays the Onionr version Displays the Onionr version
''' '''
function('Onionr v%s (%s) (API v%s)' % (ONIONR_VERSION, platform.machine(), API_VERSION)) function('Onionr v%s (%s) (API v%s)' % (ONIONR_VERSION, platform.machine(), API_VERSION), terminal=True)
if verbosity >= 1: if verbosity >= 1:
function(ONIONR_TAGLINE) function(ONIONR_TAGLINE, terminal=True)
if verbosity >= 2: if verbosity >= 2:
function('Running on %s %s' % (platform.platform(), platform.release())) function('Running on %s %s' % (platform.platform(), platform.release()), terminal=True)
def doPEX(self): def doPEX(self):
'''make communicator do pex''' '''make communicator do pex'''

View File

@ -30,10 +30,10 @@ def ban_block(o_inst):
o_inst.onionrCore._blacklist.addToDB(ban) o_inst.onionrCore._blacklist.addToDB(ban)
o_inst.onionrCore.removeBlock(ban) o_inst.onionrCore.removeBlock(ban)
except Exception as error: except Exception as error:
logger.error('Could not blacklist block', error=error) logger.error('Could not blacklist block', error=error, terminal=True)
else: else:
logger.info('Block blacklisted') logger.info('Block blacklisted', terminal=True)
else: else:
logger.warn('That block is already blacklisted') logger.warn('That block is already blacklisted', terminal=True)
else: else:
logger.error('Invalid block hash') logger.error('Invalid block hash', terminal=True)

View File

@ -40,11 +40,6 @@ def daemon(o_inst):
Thread(target=api.API, args=(o_inst, o_inst.debug, onionr.API_VERSION)).start() Thread(target=api.API, args=(o_inst, o_inst.debug, onionr.API_VERSION)).start()
Thread(target=api.PublicAPI, args=[o_inst.getClientApi()]).start() Thread(target=api.PublicAPI, args=[o_inst.getClientApi()]).start()
try:
time.sleep(0)
except KeyboardInterrupt:
logger.debug('Got keyboard interrupt, shutting down...')
_proper_shutdown(o_inst)
apiHost = '' apiHost = ''
while apiHost == '': while apiHost == '':
@ -56,10 +51,17 @@ def daemon(o_inst):
time.sleep(0.5) time.sleep(0.5)
#onionr.Onionr.setupConfig('data/', self = o_inst) #onionr.Onionr.setupConfig('data/', self = o_inst)
logger.raw('', terminal=True)
# print nice header thing :)
if o_inst.onionrCore.config.get('general.display_header', True):
o_inst.header()
o_inst.version(verbosity = 5, function = logger.info)
logger.debug('Python version %s' % platform.python_version())
if o_inst._developmentMode: if o_inst._developmentMode:
logger.warn('DEVELOPMENT MODE ENABLED (NOT RECOMMENDED)', timestamp = False) logger.warn('DEVELOPMENT MODE ENABLED', timestamp = False, terminal=True)
net = NetController(o_inst.onionrCore.config.get('client.public.port', 59497), apiServerIP=apiHost) net = NetController(o_inst.onionrCore.config.get('client.public.port', 59497), apiServerIP=apiHost)
logger.debug('Tor is starting...') logger.info('Tor is starting...', terminal=True)
if not net.startTor(): if not net.startTor():
o_inst.onionrUtils.localCommand('shutdown') o_inst.onionrUtils.localCommand('shutdown')
sys.exit(1) sys.exit(1)
@ -67,7 +69,7 @@ def daemon(o_inst):
logger.debug('Started .onion service: %s' % (logger.colors.underline + net.myID)) logger.debug('Started .onion service: %s' % (logger.colors.underline + net.myID))
else: else:
logger.debug('.onion service disabled') logger.debug('.onion service disabled')
logger.debug('Using public key: %s' % (logger.colors.underline + o_inst.onionrCore._crypto.pubKey)) logger.info('Using public key: %s' % (logger.colors.underline + o_inst.onionrCore._crypto.pubKey[:52]), terminal=True)
try: try:
time.sleep(1) time.sleep(1)
@ -81,14 +83,6 @@ def daemon(o_inst):
while o_inst.communicatorInst is None: while o_inst.communicatorInst is None:
time.sleep(0.1) time.sleep(0.1)
# print nice header thing :)
if o_inst.onionrCore.config.get('general.display_header', True):
o_inst.header()
# print out debug info
o_inst.version(verbosity = 5, function = logger.debug)
logger.debug('Python version %s' % platform.python_version())
logger.debug('Started communicator.') logger.debug('Started communicator.')
events.event('daemon_start', onionr = o_inst) events.event('daemon_start', onionr = o_inst)
@ -124,7 +118,7 @@ def kill_daemon(o_inst):
Shutdown the Onionr daemon Shutdown the Onionr daemon
''' '''
logger.warn('Stopping the running daemon...', timestamp = False) logger.warn('Stopping the running daemon...', timestamp = False, terminal=True)
try: try:
events.event('daemon_stop', onionr = o_inst) events.event('daemon_stop', onionr = o_inst)
net = NetController(o_inst.onionrCore.config.get('client.port', 59496)) net = NetController(o_inst.onionrCore.config.get('client.port', 59496))
@ -135,12 +129,12 @@ def kill_daemon(o_inst):
net.killTor() net.killTor()
except Exception as e: except Exception as e:
logger.error('Failed to shutdown daemon.', error = e, timestamp = False) logger.error('Failed to shutdown daemon.', error = e, timestamp = False, terminal=True)
return return
def start(o_inst, input = False, override = False): def start(o_inst, input = False, override = False):
if os.path.exists('.onionr-lock') and not override: if os.path.exists('.onionr-lock') and not override:
logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).') logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).', terminal=True)
else: else:
if not o_inst.debug and not o_inst._developmentMode: if not o_inst.debug and not o_inst._developmentMode:
lockFile = open('.onionr-lock', 'w') lockFile = open('.onionr-lock', 'w')

View File

@ -65,21 +65,21 @@ def show_stats(o_inst):
groupsize = width - prewidth - len('[+] ') groupsize = width - prewidth - len('[+] ')
# generate stats table # generate stats table
logger.info(colors['title'] + 'Onionr v%s Statistics' % onionr.ONIONR_VERSION + colors['reset']) logger.info(colors['title'] + 'Onionr v%s Statistics' % onionr.ONIONR_VERSION + colors['reset'], terminal=True)
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True)
for key, val in messages.items(): for key, val in messages.items():
if not (type(val) is bool and val is True): if not (type(val) is bool and val is True):
val = [str(val)[i:i + groupsize] for i in range(0, len(str(val)), groupsize)] val = [str(val)[i:i + groupsize] for i in range(0, len(str(val)), groupsize)]
logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(val.pop(0)) + colors['reset']) logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(val.pop(0)) + colors['reset'], terminal=True)
for value in val: for value in val:
logger.info(' ' * maxlength + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(value) + colors['reset']) logger.info(' ' * maxlength + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(value) + colors['reset'], terminal=True)
else: else:
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True)
logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True)
except Exception as e: except Exception as e:
logger.error('Failed to generate statistics table.', error = e, timestamp = False) logger.error('Failed to generate statistics table.', error = e, timestamp = False, terminal=True)
def show_details(o_inst): def show_details(o_inst):
details = { details = {
@ -90,7 +90,7 @@ def show_details(o_inst):
} }
for detail in details: for detail in details:
logger.info('%s%s: \n%s%s\n' % (logger.colors.fg.lightgreen, detail, logger.colors.fg.green, details[detail]), sensitive = True) logger.info('%s%s: \n%s%s\n' % (logger.colors.fg.lightgreen, detail, logger.colors.fg.green, details[detail]), terminal = True)
def show_peers(o_inst): def show_peers(o_inst):
randID = str(uuid.uuid4()) randID = str(uuid.uuid4())

View File

@ -26,5 +26,5 @@ def open_home(o_inst):
logger.error('Onionr seems to not be running (could not get api host)') logger.error('Onionr seems to not be running (could not get api host)')
else: else:
url = 'http://%s/#%s' % (url, o_inst.onionrCore.config.get('client.webpassword')) url = 'http://%s/#%s' % (url, o_inst.onionrCore.config.get('client.webpassword'))
logger.info('If Onionr does not open automatically, use this URL: ' + url) logger.info('If Onionr does not open automatically, use this URL: ' + url, terminal=True)
webbrowser.open_new_tab(url) webbrowser.open_new_tab(url)

View File

@ -519,7 +519,7 @@ class OnionrUtils:
except KeyboardInterrupt: except KeyboardInterrupt:
raise KeyboardInterrupt raise KeyboardInterrupt
except ValueError as e: except ValueError as e:
logger.debug('Failed to make GET request to %s' % url, error = e, sensitive = True) pass
except onionrexceptions.InvalidAPIVersion: except onionrexceptions.InvalidAPIVersion:
if 'X-API' in response_headers: if 'X-API' in response_headers:
logger.debug('Using API version %s. Cannot communicate with node\'s API version of %s.' % (API_VERSION, response_headers['X-API'])) logger.debug('Using API version %s. Cannot communicate with node\'s API version of %s.' % (API_VERSION, response_headers['X-API']))

View File

@ -40,7 +40,7 @@ class OnionrFlow:
return return
def start(self): def start(self):
logger.warn("Please note: everything said here is public, even if a random channel name is used.") logger.warn("Please note: everything said here is public, even if a random channel name is used.", terminal=True)
message = "" message = ""
self.flowRunning = True self.flowRunning = True
newThread = threading.Thread(target=self.showOutput) newThread = threading.Thread(target=self.showOutput)
@ -63,7 +63,7 @@ class OnionrFlow:
if len(message) > 0: if len(message) > 0:
self.myCore.insertBlock(message, header='txt', expire=expireTime, meta={'ch': self.channel}) self.myCore.insertBlock(message, header='txt', expire=expireTime, meta={'ch': self.channel})
logger.info("Flow is exiting, goodbye") logger.info("Flow is exiting, goodbye", terminal=True)
return return
def showOutput(self): def showOutput(self):
@ -81,11 +81,11 @@ class OnionrFlow:
continue continue
if not self.flowRunning: if not self.flowRunning:
break break
logger.info('\n------------------------', prompt = False) logger.info('\n------------------------', prompt = False, terminal=True)
content = block.getContent() content = block.getContent()
# Escape new lines, remove trailing whitespace, and escape ansi sequences # Escape new lines, remove trailing whitespace, and escape ansi sequences
content = self.myCore._utils.escapeAnsi(content.replace('\n', '\\n').replace('\r', '\\r').strip()) content = self.myCore._utils.escapeAnsi(content.replace('\n', '\\n').replace('\r', '\\r').strip())
logger.info(block.getDate().strftime("%m/%d %H:%M") + ' - ' + logger.colors.reset + content, prompt = False) logger.info(block.getDate().strftime("%m/%d %H:%M") + ' - ' + logger.colors.reset + content, prompt = False, terminal=True)
self.alreadyOutputed.append(block.getHash()) self.alreadyOutputed.append(block.getHash())
time.sleep(5) time.sleep(5)
except KeyboardInterrupt: except KeyboardInterrupt: