From 065e97ab118718515715ee22aa3038fb86d778b3 Mon Sep 17 00:00:00 2001 From: Kevin Froman Date: Wed, 19 Jun 2019 15:29:27 -0500 Subject: [PATCH] improved logging messages to be less spammy --- README.md | 2 +- onionr/communicator.py | 16 +-- onionr/communicatorutils/connectnewpeers.py | 2 +- onionr/communicatorutils/downloadblocks.py | 1 + onionr/communicatorutils/lookupblocks.py | 116 +++++++++--------- onionr/logger.py | 32 ++--- onionr/netcontroller.py | 11 +- onionr/onionr.py | 10 +- onionr/onionrcommands/banblocks.py | 8 +- onionr/onionrcommands/daemonlaunch.py | 32 ++--- onionr/onionrcommands/onionrstatistics.py | 16 +-- onionr/onionrcommands/openwebinterface.py | 2 +- onionr/onionrutils.py | 2 +- .../static-data/default-plugins/flow/main.py | 8 +- 14 files changed, 129 insertions(+), 129 deletions(-) diff --git a/README.md b/README.md index 536e323b..4417e31f 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ The following applies to Ubuntu Bionic. Other distros may have different package `$ sudo apt install python3-pip python3-dev tor` -* Have python3.6+, python3-pip, Tor (daemon, not browser) installed (python3-dev recommended) +* Have python3.6+, python3-pip, Tor (daemon, not browser) installed. python3-dev is recommended. * Clone the git repo: `$ git clone https://gitlab.com/beardog/onionr` * cd into install direction: `$ cd onionr/` * Install the Python dependencies ([virtualenv strongly recommended](https://virtualenv.pypa.io/en/stable/userguide/)): `$ pip3 install --require-hashes -r requirements.txt` diff --git a/onionr/communicator.py b/onionr/communicator.py index de49e601..370a4981 100755 --- a/onionr/communicator.py +++ b/onionr/communicator.py @@ -176,7 +176,7 @@ class OnionrCommunicatorDaemon: self.shutdown = True pass - logger.info('Goodbye. (Onionr is cleaning up, and will exit)') + logger.info('Goodbye. (Onionr is cleaning up, and will exit)', terminal=True) try: self.service_greenlets except AttributeError: @@ -252,7 +252,7 @@ class OnionrCommunicatorDaemon: break else: if len(self.onlinePeers) == 0: - logger.debug('Couldn\'t connect to any peers.' + (' Last node seen %s ago.' % humanreadabletime.human_readable_time(time.time() - self.lastNodeSeen) if not self.lastNodeSeen is None else '')) + logger.debug('Couldn\'t connect to any peers.' + (' Last node seen %s ago.' % humanreadabletime.human_readable_time(time.time() - self.lastNodeSeen) if not self.lastNodeSeen is None else ''), terminal=True) else: self.lastNodeSeen = time.time() self.decrementThreadCount('getOnlinePeers') @@ -293,12 +293,12 @@ class OnionrCommunicatorDaemon: def printOnlinePeers(self): '''logs online peer list''' if len(self.onlinePeers) == 0: - logger.warn('No online peers') + logger.warn('No online peers', terminal=True) else: - logger.info('Online peers:') + logger.info('Online peers:', terminal=True) for i in self.onlinePeers: score = str(self.getPeerProfileInstance(i).score) - logger.info(i + ', score: ' + score) + logger.info(i + ', score: ' + score, terminal=True) def peerAction(self, peer, action, data='', returnHeaders=False): '''Perform a get request to a peer''' @@ -318,6 +318,7 @@ class OnionrCommunicatorDaemon: self.getPeerProfileInstance(peer).addScore(-10) self.removeOnlinePeer(peer) if action != 'ping': + logger.warn('Lost connection to ' + peer, terminal=True) self.getOnlinePeers() # Will only add a new peer to pool if needed except ValueError: pass @@ -359,7 +360,7 @@ class OnionrCommunicatorDaemon: def announce(self, peer): '''Announce to peers our address''' if announcenode.announce_node(self) == False: - logger.warn('Could not introduce node.') + logger.warn('Could not introduce node.', terminal=True) def detectAPICrash(self): '''exit if the api server crashes/stops''' @@ -371,7 +372,7 @@ class OnionrCommunicatorDaemon: else: # This executes if the api is NOT detected to be running events.event('daemon_crash', onionr = self._core.onionrInst, data = {}) - logger.error('Daemon detected API crash (or otherwise unable to reach API after long time), stopping...') + logger.fatal('Daemon detected API crash (or otherwise unable to reach API after long time), stopping...', terminal=True) self.shutdown = True self.decrementThreadCount('detectAPICrash') @@ -388,5 +389,4 @@ def run_file_exists(daemon): if os.path.isfile(daemon._core.dataDir + '.runcheck'): os.remove(daemon._core.dataDir + '.runcheck') return True - return False \ No newline at end of file diff --git a/onionr/communicatorutils/connectnewpeers.py b/onionr/communicatorutils/connectnewpeers.py index ee1eb468..96e4ecac 100755 --- a/onionr/communicatorutils/connectnewpeers.py +++ b/onionr/communicatorutils/connectnewpeers.py @@ -72,7 +72,7 @@ def connect_new_peer_to_communicator(comm_inst, peer='', useBootstrap=False): # Add a peer to our list if it isn't already since it successfully connected networkmerger.mergeAdders(address, comm_inst._core) if address not in comm_inst.onlinePeers: - logger.info('Connected to ' + address) + logger.info('Connected to ' + address, terminal=True) comm_inst.onlinePeers.append(address) comm_inst.connectTimes[address] = comm_inst._core._utils.getEpoch() retData = address diff --git a/onionr/communicatorutils/downloadblocks.py b/onionr/communicatorutils/downloadblocks.py index 720e0eae..aad5f884 100755 --- a/onionr/communicatorutils/downloadblocks.py +++ b/onionr/communicatorutils/downloadblocks.py @@ -110,6 +110,7 @@ def download_blocks_from_communicator(comm_inst): if removeFromQueue: try: del comm_inst.blockQueue[blockHash] # remove from block queue both if success or false + logger.info('%s blocks remaining in queue' % [len(comm_inst.blockQueue)]) except KeyError: pass comm_inst.currentDownloading.remove(blockHash) diff --git a/onionr/communicatorutils/lookupblocks.py b/onionr/communicatorutils/lookupblocks.py index 0994d15c..e39e3c68 100755 --- a/onionr/communicatorutils/lookupblocks.py +++ b/onionr/communicatorutils/lookupblocks.py @@ -19,61 +19,65 @@ ''' import logger, onionrproofs def lookup_blocks_from_communicator(comm_inst): - logger.info('Looking up new blocks...') - tryAmount = 2 - newBlocks = '' - existingBlocks = comm_inst._core.getBlockList() - triedPeers = [] # list of peers we've tried this time around - maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion - lastLookupTime = 0 # Last time we looked up a particular peer's list - for i in range(tryAmount): - listLookupCommand = 'getblocklist' # This is defined here to reset it each time - if len(comm_inst.blockQueue) >= maxBacklog: + logger.info('Looking up new blocks...') + tryAmount = 2 + newBlocks = '' + existingBlocks = comm_inst._core.getBlockList() + triedPeers = [] # list of peers we've tried this time around + maxBacklog = 1560 # Max amount of *new* block hashes to have already in queue, to avoid memory exhaustion + lastLookupTime = 0 # Last time we looked up a particular peer's list + new_block_count = 0 + for i in range(tryAmount): + listLookupCommand = 'getblocklist' # This is defined here to reset it each time + if len(comm_inst.blockQueue) >= maxBacklog: + break + if not comm_inst.isOnline: + break + # check if disk allocation is used + if comm_inst._core._utils.storageCounter.isFull(): + logger.debug('Not looking up new blocks due to maximum amount of allowed disk space used') + break + peer = comm_inst.pickOnlinePeer() # select random online peer + # if we've already tried all the online peers this time around, stop + if peer in triedPeers: + if len(comm_inst.onlinePeers) == len(triedPeers): break - if not comm_inst.isOnline: - break - # check if disk allocation is used - if comm_inst._core._utils.storageCounter.isFull(): - logger.debug('Not looking up new blocks due to maximum amount of allowed disk space used') - break - peer = comm_inst.pickOnlinePeer() # select random online peer - # if we've already tried all the online peers this time around, stop - if peer in triedPeers: - if len(comm_inst.onlinePeers) == len(triedPeers): - break - else: - continue - triedPeers.append(peer) + else: + continue + triedPeers.append(peer) - # Get the last time we looked up a peer's stamp to only fetch blocks since then. - # Saved in memory only for privacy reasons - try: - lastLookupTime = comm_inst.dbTimestamps[peer] - except KeyError: - lastLookupTime = 0 - else: - listLookupCommand += '?date=%s' % (lastLookupTime,) - try: - newBlocks = comm_inst.peerAction(peer, listLookupCommand) # get list of new block hashes - except Exception as error: - logger.warn('Could not get new blocks from %s.' % peer, error = error) - newBlocks = False - else: - comm_inst.dbTimestamps[peer] = comm_inst._core._utils.getRoundedEpoch(roundS=60) - if newBlocks != False: - # if request was a success - for i in newBlocks.split('\n'): - if comm_inst._core._utils.validateHash(i): - # if newline seperated string is valid hash - if not i in existingBlocks: - # if block does not exist on disk and is not already in block queue - if i not in comm_inst.blockQueue: - if onionrproofs.hashMeetsDifficulty(i) and not comm_inst._core._blacklist.inBlacklist(i): - if len(comm_inst.blockQueue) <= 1000000: - comm_inst.blockQueue[i] = [peer] # add blocks to download queue - else: - if peer not in comm_inst.blockQueue[i]: - if len(comm_inst.blockQueue[i]) < 10: - comm_inst.blockQueue[i].append(peer) - comm_inst.decrementThreadCount('lookupBlocks') - return \ No newline at end of file + # Get the last time we looked up a peer's stamp to only fetch blocks since then. + # Saved in memory only for privacy reasons + try: + lastLookupTime = comm_inst.dbTimestamps[peer] + except KeyError: + lastLookupTime = 0 + else: + listLookupCommand += '?date=%s' % (lastLookupTime,) + try: + newBlocks = comm_inst.peerAction(peer, listLookupCommand) # get list of new block hashes + except Exception as error: + logger.warn('Could not get new blocks from %s.' % peer, error = error) + newBlocks = False + else: + comm_inst.dbTimestamps[peer] = comm_inst._core._utils.getRoundedEpoch(roundS=60) + if newBlocks != False: + # if request was a success + for i in newBlocks.split('\n'): + if comm_inst._core._utils.validateHash(i): + # if newline seperated string is valid hash + if not i in existingBlocks: + # if block does not exist on disk and is not already in block queue + if i not in comm_inst.blockQueue: + if onionrproofs.hashMeetsDifficulty(i) and not comm_inst._core._blacklist.inBlacklist(i): + if len(comm_inst.blockQueue) <= 1000000: + comm_inst.blockQueue[i] = [peer] # add blocks to download queue + new_block_count += 1 + else: + if peer not in comm_inst.blockQueue[i]: + if len(comm_inst.blockQueue[i]) < 10: + comm_inst.blockQueue[i].append(peer) + if new_block_count > 0: + logger.info('Discovered %s new blocks' % (new_block_count,), terminal=True) + comm_inst.decrementThreadCount('lookupBlocks') + return \ No newline at end of file diff --git a/onionr/logger.py b/onionr/logger.py index a7abf715..01ada8ce 100755 --- a/onionr/logger.py +++ b/onionr/logger.py @@ -126,24 +126,24 @@ def get_file(): return _outputfile -def raw(data, fd = sys.stdout, sensitive = False): +def raw(data, fd = sys.stdout, terminal = False): ''' Outputs raw data to console without formatting ''' - if get_settings() & OUTPUT_TO_CONSOLE: + if terminal and (get_settings() & OUTPUT_TO_CONSOLE): try: ts = fd.write('%s\n' % data) except OSError: pass - if get_settings() & OUTPUT_TO_FILE and not sensitive: + if get_settings() & OUTPUT_TO_FILE: try: with open(_outputfile, "a+") as f: f.write(colors.filter(data) + '\n') except OSError: pass -def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True, sensitive = False): +def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True, terminal = False): ''' Logs the data prefix : The prefix to the output @@ -158,7 +158,7 @@ def log(prefix, data, color = '', timestamp=True, fd = sys.stdout, prompt = True if not get_settings() & USE_ANSI: output = colors.filter(output) - raw(output, fd = fd, sensitive = sensitive) + raw(output, fd = fd, terminal = terminal) def readline(message = ''): ''' @@ -210,37 +210,37 @@ def confirm(default = 'y', message = 'Are you sure %s? '): return default == 'y' # debug: when there is info that could be useful for debugging purposes only -def debug(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_DEBUG): +def debug(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_DEBUG): if get_level() <= level: - log('/', data, timestamp = timestamp, prompt = prompt, sensitive = sensitive) + log('/', data, timestamp = timestamp, prompt = prompt, terminal = terminal) if not error is None: debug('Error: ' + str(error) + parse_error()) # info: when there is something to notify the user of, such as the success of a process -def info(data, timestamp = False, prompt = True, sensitive = False, level = LEVEL_INFO): +def info(data, timestamp = False, prompt = True, terminal = False, level = LEVEL_INFO): if get_level() <= level: - log('+', data, colors.fg.green, timestamp = timestamp, prompt = prompt, sensitive = sensitive) + log('+', data, colors.fg.green, timestamp = timestamp, prompt = prompt, terminal = terminal) # warn: when there is a potential for something bad to happen -def warn(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_WARN): +def warn(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_WARN): if not error is None: debug('Error: ' + str(error) + parse_error()) if get_level() <= level: - log('!', data, colors.fg.orange, timestamp = timestamp, prompt = prompt, sensitive = sensitive) + log('!', data, colors.fg.orange, timestamp = timestamp, prompt = prompt, terminal = terminal) # error: when only one function, module, or process of the program encountered a problem and must stop -def error(data, error = None, timestamp = True, prompt = True, sensitive = False, level = LEVEL_ERROR): +def error(data, error = None, timestamp = True, prompt = True, terminal = False, level = LEVEL_ERROR): if get_level() <= level: - log('-', data, colors.fg.red, timestamp = timestamp, fd = sys.stderr, prompt = prompt, sensitive = sensitive) + log('-', data, colors.fg.red, timestamp = timestamp, fd = sys.stderr, prompt = prompt, terminal = terminal) if not error is None: debug('Error: ' + str(error) + parse_error()) # fatal: when the something so bad has happened that the program must stop -def fatal(data, error = None, timestamp=True, prompt = True, sensitive = False, level = LEVEL_FATAL): +def fatal(data, error = None, timestamp=True, prompt = True, terminal = False, level = LEVEL_FATAL): if not error is None: - debug('Error: ' + str(error) + parse_error(), sensitive = sensitive) + debug('Error: ' + str(error) + parse_error(), terminal = terminal) if get_level() <= level: - log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp = timestamp, fd = sys.stderr, prompt = prompt, sensitive = sensitive) + log('#', data, colors.bg.red + colors.fg.green + colors.bold, timestamp = timestamp, fd = sys.stderr, prompt = prompt, terminal = terminal) # returns a formatted error message def parse_error(): diff --git a/onionr/netcontroller.py b/onionr/netcontroller.py index 74087500..b2d8237f 100755 --- a/onionr/netcontroller.py +++ b/onionr/netcontroller.py @@ -124,14 +124,14 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort) try: tor = subprocess.Popen([self.torBinary, '-f', self.torConfigLocation], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except FileNotFoundError: - logger.fatal("Tor was not found in your path or the Onionr directory. Please install Tor and try again.") + logger.fatal("Tor was not found in your path or the Onionr directory. Please install Tor and try again.", terminal=True) sys.exit(1) else: # Test Tor Version torVersion = subprocess.Popen([self.torBinary, '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) for line in iter(torVersion.stdout.readline, b''): if 'Tor 0.2.' in line.decode(): - logger.error('Tor 0.3+ required') + logger.fatal('Tor 0.3+ required', terminal=True) sys.exit(1) break torVersion.kill() @@ -140,17 +140,18 @@ HiddenServicePort 80 ''' + self.apiServerIP + ''':''' + str(self.hsPort) try: for line in iter(tor.stdout.readline, b''): if 'bootstrapped 100' in line.decode().lower(): + logger.info(line.decode()) break elif 'opening socks listener' in line.decode().lower(): logger.debug(line.decode().replace('\n', '')) else: - logger.fatal('Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open') + logger.fatal('Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open', terminal=True) return False except KeyboardInterrupt: - logger.fatal('Got keyboard interrupt. Onionr will exit soon.', timestamp = False, level = logger.LEVEL_IMPORTANT) + logger.fatal('Got keyboard interrupt. Onionr will exit soon.', timestamp = False, level = logger.LEVEL_IMPORTANT, terminal=True) return False - logger.debug('Finished starting Tor.', timestamp=True) + logger.info('Finished starting Tor.', terminal=True) self.readyState = True try: diff --git a/onionr/onionr.py b/onionr/onionr.py index 16ec1839..6306acca 100755 --- a/onionr/onionr.py +++ b/onionr/onionr.py @@ -159,7 +159,7 @@ class Onionr: sys.stderr.write(file.read().decode().replace('P', logger.colors.fg.pink).replace('W', logger.colors.reset + logger.colors.bold).replace('G', logger.colors.fg.green).replace('\n', logger.colors.reset + '\n').replace('B', logger.colors.bold).replace('A', '%s' % API_VERSION).replace('V', ONIONR_VERSION)) if not message is None: - logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n', sensitive=True) + logger.info(logger.colors.fg.lightgreen + '-> ' + str(message) + logger.colors.reset + logger.colors.fg.lightgreen + ' <-\n', terminal=True) def deleteRunFiles(self): try: @@ -238,7 +238,7 @@ class Onionr: return config.get('client.webpassword') def printWebPassword(self): - logger.info(self.getWebPassword(), sensitive = True) + logger.info(self.getWebPassword(), term_only = True) def getHelp(self): return self.cmdhelp @@ -289,11 +289,11 @@ class Onionr: Displays the Onionr version ''' - function('Onionr v%s (%s) (API v%s)' % (ONIONR_VERSION, platform.machine(), API_VERSION)) + function('Onionr v%s (%s) (API v%s)' % (ONIONR_VERSION, platform.machine(), API_VERSION), terminal=True) if verbosity >= 1: - function(ONIONR_TAGLINE) + function(ONIONR_TAGLINE, terminal=True) if verbosity >= 2: - function('Running on %s %s' % (platform.platform(), platform.release())) + function('Running on %s %s' % (platform.platform(), platform.release()), terminal=True) def doPEX(self): '''make communicator do pex''' diff --git a/onionr/onionrcommands/banblocks.py b/onionr/onionrcommands/banblocks.py index a9caa867..fe50d16a 100755 --- a/onionr/onionrcommands/banblocks.py +++ b/onionr/onionrcommands/banblocks.py @@ -30,10 +30,10 @@ def ban_block(o_inst): o_inst.onionrCore._blacklist.addToDB(ban) o_inst.onionrCore.removeBlock(ban) except Exception as error: - logger.error('Could not blacklist block', error=error) + logger.error('Could not blacklist block', error=error, terminal=True) else: - logger.info('Block blacklisted') + logger.info('Block blacklisted', terminal=True) else: - logger.warn('That block is already blacklisted') + logger.warn('That block is already blacklisted', terminal=True) else: - logger.error('Invalid block hash') \ No newline at end of file + logger.error('Invalid block hash', terminal=True) \ No newline at end of file diff --git a/onionr/onionrcommands/daemonlaunch.py b/onionr/onionrcommands/daemonlaunch.py index 88e43fae..d65a33d6 100755 --- a/onionr/onionrcommands/daemonlaunch.py +++ b/onionr/onionrcommands/daemonlaunch.py @@ -40,11 +40,6 @@ def daemon(o_inst): Thread(target=api.API, args=(o_inst, o_inst.debug, onionr.API_VERSION)).start() Thread(target=api.PublicAPI, args=[o_inst.getClientApi()]).start() - try: - time.sleep(0) - except KeyboardInterrupt: - logger.debug('Got keyboard interrupt, shutting down...') - _proper_shutdown(o_inst) apiHost = '' while apiHost == '': @@ -56,10 +51,17 @@ def daemon(o_inst): time.sleep(0.5) #onionr.Onionr.setupConfig('data/', self = o_inst) + logger.raw('', terminal=True) + # print nice header thing :) + if o_inst.onionrCore.config.get('general.display_header', True): + o_inst.header() + o_inst.version(verbosity = 5, function = logger.info) + logger.debug('Python version %s' % platform.python_version()) + if o_inst._developmentMode: - logger.warn('DEVELOPMENT MODE ENABLED (NOT RECOMMENDED)', timestamp = False) + logger.warn('DEVELOPMENT MODE ENABLED', timestamp = False, terminal=True) net = NetController(o_inst.onionrCore.config.get('client.public.port', 59497), apiServerIP=apiHost) - logger.debug('Tor is starting...') + logger.info('Tor is starting...', terminal=True) if not net.startTor(): o_inst.onionrUtils.localCommand('shutdown') sys.exit(1) @@ -67,7 +69,7 @@ def daemon(o_inst): logger.debug('Started .onion service: %s' % (logger.colors.underline + net.myID)) else: logger.debug('.onion service disabled') - logger.debug('Using public key: %s' % (logger.colors.underline + o_inst.onionrCore._crypto.pubKey)) + logger.info('Using public key: %s' % (logger.colors.underline + o_inst.onionrCore._crypto.pubKey[:52]), terminal=True) try: time.sleep(1) @@ -81,14 +83,6 @@ def daemon(o_inst): while o_inst.communicatorInst is None: time.sleep(0.1) - # print nice header thing :) - if o_inst.onionrCore.config.get('general.display_header', True): - o_inst.header() - - # print out debug info - o_inst.version(verbosity = 5, function = logger.debug) - logger.debug('Python version %s' % platform.python_version()) - logger.debug('Started communicator.') events.event('daemon_start', onionr = o_inst) @@ -124,7 +118,7 @@ def kill_daemon(o_inst): Shutdown the Onionr daemon ''' - logger.warn('Stopping the running daemon...', timestamp = False) + logger.warn('Stopping the running daemon...', timestamp = False, terminal=True) try: events.event('daemon_stop', onionr = o_inst) net = NetController(o_inst.onionrCore.config.get('client.port', 59496)) @@ -135,12 +129,12 @@ def kill_daemon(o_inst): net.killTor() except Exception as e: - logger.error('Failed to shutdown daemon.', error = e, timestamp = False) + logger.error('Failed to shutdown daemon.', error = e, timestamp = False, terminal=True) return def start(o_inst, input = False, override = False): if os.path.exists('.onionr-lock') and not override: - logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).') + logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).', terminal=True) else: if not o_inst.debug and not o_inst._developmentMode: lockFile = open('.onionr-lock', 'w') diff --git a/onionr/onionrcommands/onionrstatistics.py b/onionr/onionrcommands/onionrstatistics.py index 918d8445..a28fb7db 100755 --- a/onionr/onionrcommands/onionrstatistics.py +++ b/onionr/onionrcommands/onionrstatistics.py @@ -65,21 +65,21 @@ def show_stats(o_inst): groupsize = width - prewidth - len('[+] ') # generate stats table - logger.info(colors['title'] + 'Onionr v%s Statistics' % onionr.ONIONR_VERSION + colors['reset']) - logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) + logger.info(colors['title'] + 'Onionr v%s Statistics' % onionr.ONIONR_VERSION + colors['reset'], terminal=True) + logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True) for key, val in messages.items(): if not (type(val) is bool and val is True): val = [str(val)[i:i + groupsize] for i in range(0, len(str(val)), groupsize)] - logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(val.pop(0)) + colors['reset']) + logger.info(colors['key'] + str(key).rjust(maxlength) + colors['reset'] + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(val.pop(0)) + colors['reset'], terminal=True) for value in val: - logger.info(' ' * maxlength + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(value) + colors['reset']) + logger.info(' ' * maxlength + colors['border'] + ' | ' + colors['reset'] + colors['val'] + str(value) + colors['reset'], terminal=True) else: - logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) - logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset']) + logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True) + logger.info(colors['border'] + '-' * (maxlength + 1) + '+' + colors['reset'], terminal=True) except Exception as e: - logger.error('Failed to generate statistics table.', error = e, timestamp = False) + logger.error('Failed to generate statistics table.', error = e, timestamp = False, terminal=True) def show_details(o_inst): details = { @@ -90,7 +90,7 @@ def show_details(o_inst): } for detail in details: - logger.info('%s%s: \n%s%s\n' % (logger.colors.fg.lightgreen, detail, logger.colors.fg.green, details[detail]), sensitive = True) + logger.info('%s%s: \n%s%s\n' % (logger.colors.fg.lightgreen, detail, logger.colors.fg.green, details[detail]), terminal = True) def show_peers(o_inst): randID = str(uuid.uuid4()) diff --git a/onionr/onionrcommands/openwebinterface.py b/onionr/onionrcommands/openwebinterface.py index 2bd0cdd3..3e68e3ea 100755 --- a/onionr/onionrcommands/openwebinterface.py +++ b/onionr/onionrcommands/openwebinterface.py @@ -26,5 +26,5 @@ def open_home(o_inst): logger.error('Onionr seems to not be running (could not get api host)') else: url = 'http://%s/#%s' % (url, o_inst.onionrCore.config.get('client.webpassword')) - logger.info('If Onionr does not open automatically, use this URL: ' + url) + logger.info('If Onionr does not open automatically, use this URL: ' + url, terminal=True) webbrowser.open_new_tab(url) \ No newline at end of file diff --git a/onionr/onionrutils.py b/onionr/onionrutils.py index 887c421a..4aa02d76 100755 --- a/onionr/onionrutils.py +++ b/onionr/onionrutils.py @@ -519,7 +519,7 @@ class OnionrUtils: except KeyboardInterrupt: raise KeyboardInterrupt except ValueError as e: - logger.debug('Failed to make GET request to %s' % url, error = e, sensitive = True) + pass except onionrexceptions.InvalidAPIVersion: if 'X-API' in response_headers: logger.debug('Using API version %s. Cannot communicate with node\'s API version of %s.' % (API_VERSION, response_headers['X-API'])) diff --git a/onionr/static-data/default-plugins/flow/main.py b/onionr/static-data/default-plugins/flow/main.py index de406207..0162019f 100755 --- a/onionr/static-data/default-plugins/flow/main.py +++ b/onionr/static-data/default-plugins/flow/main.py @@ -40,7 +40,7 @@ class OnionrFlow: return def start(self): - logger.warn("Please note: everything said here is public, even if a random channel name is used.") + logger.warn("Please note: everything said here is public, even if a random channel name is used.", terminal=True) message = "" self.flowRunning = True newThread = threading.Thread(target=self.showOutput) @@ -63,7 +63,7 @@ class OnionrFlow: if len(message) > 0: self.myCore.insertBlock(message, header='txt', expire=expireTime, meta={'ch': self.channel}) - logger.info("Flow is exiting, goodbye") + logger.info("Flow is exiting, goodbye", terminal=True) return def showOutput(self): @@ -81,11 +81,11 @@ class OnionrFlow: continue if not self.flowRunning: break - logger.info('\n------------------------', prompt = False) + logger.info('\n------------------------', prompt = False, terminal=True) content = block.getContent() # Escape new lines, remove trailing whitespace, and escape ansi sequences content = self.myCore._utils.escapeAnsi(content.replace('\n', '\\n').replace('\r', '\\r').strip()) - logger.info(block.getDate().strftime("%m/%d %H:%M") + ' - ' + logger.colors.reset + content, prompt = False) + logger.info(block.getDate().strftime("%m/%d %H:%M") + ' - ' + logger.colors.reset + content, prompt = False, terminal=True) self.alreadyOutputed.append(block.getHash()) time.sleep(5) except KeyboardInterrupt: