fixed importer
This commit is contained in:
parent
4bbbff7651
commit
faf85c10d2
@ -11,6 +11,7 @@ bootstrap_file_location = 'static-data/bootstrap-nodes.txt'
|
||||
data_nonce_file = home + 'block-nonces.dat'
|
||||
forward_keys_file = home + 'forward-keys.db'
|
||||
cached_storage = home + 'cachedstorage.dat'
|
||||
export_location = home + 'block-export/'
|
||||
|
||||
tor_hs_address_file = home + 'hs/hostname'
|
||||
|
||||
|
@ -43,7 +43,7 @@ import netcontroller
|
||||
from onionrblockapi import Block
|
||||
import onionrexceptions, communicator, setupconfig
|
||||
import onionrcommands as commands # Many command definitions are here
|
||||
from utils import identifyhome
|
||||
from utils import identifyhome, hastor
|
||||
from coredb import keydb
|
||||
import filepaths
|
||||
|
||||
@ -77,10 +77,6 @@ class Onionr:
|
||||
# Load global configuration data
|
||||
data_exists = Onionr.setupConfig(self)
|
||||
|
||||
if netcontroller.tor_binary() is None:
|
||||
logger.error('Tor is not installed', terminal=True)
|
||||
sys.exit(1)
|
||||
|
||||
# If block data folder does not exist
|
||||
if not os.path.exists(self.dataDir + 'blocks/'):
|
||||
os.mkdir(self.dataDir + 'blocks/')
|
||||
|
@ -27,6 +27,8 @@ from onionrutils import localcommand
|
||||
import filepaths
|
||||
from coredb import daemonqueue
|
||||
from onionrcrypto import getourkeypair
|
||||
from utils import hastor
|
||||
|
||||
def _proper_shutdown(o_inst):
|
||||
localcommand.local_command('shutdown')
|
||||
sys.exit(1)
|
||||
@ -35,6 +37,9 @@ def daemon(o_inst):
|
||||
'''
|
||||
Starts the Onionr communication daemon
|
||||
'''
|
||||
if not hastor.has_tor():
|
||||
logger.error("Tor is not present in system path or Onionr directory", terminal=True)
|
||||
sys.exit(1)
|
||||
|
||||
# remove runcheck if it exists
|
||||
if os.path.isfile(filepaths.run_check_file):
|
||||
|
@ -19,21 +19,18 @@
|
||||
'''
|
||||
import sys, os
|
||||
import logger, onionrstorage
|
||||
from utils import createdirs
|
||||
from onionrutils import stringvalidators
|
||||
import filepaths
|
||||
def doExport(o_inst, bHash):
|
||||
exportDir = o_inst.dataDir + 'block-export/'
|
||||
if not os.path.exists(exportDir):
|
||||
if os.path.exists(o_inst.dataDir):
|
||||
os.mkdir(exportDir)
|
||||
else:
|
||||
logger.error('Onionr Not initialized', terminal=True)
|
||||
data = onionrstorage.getData(o_inst.onionrCore, bHash)
|
||||
with open('%s/%s.dat' % (exportDir, bHash), 'wb') as exportFile:
|
||||
createdirs.create_dirs()
|
||||
data = onionrstorage.getData(bHash)
|
||||
with open('%s/%s.dat' % (filepaths.export_location, bHash), 'wb') as exportFile:
|
||||
exportFile.write(data)
|
||||
logger.info('Block exported as file', terminal=True)
|
||||
|
||||
def export_block(o_inst):
|
||||
exportDir = o_inst.dataDir + 'block-export/'
|
||||
exportDir = filepaths.export_location
|
||||
try:
|
||||
assert stringvalidators.validate_hash(sys.argv[2])
|
||||
except (IndexError, AssertionError):
|
||||
|
@ -85,7 +85,7 @@ def process_block_metadata(blockHash):
|
||||
blockmetadb.update_block_info(blockHash, 'expire', expireTime)
|
||||
if not blockType is None:
|
||||
blockmetadb.update_block_info(blockHash, 'dataType', blockType)
|
||||
#onionrevents.event('processblocks', data = {'block': myBlock, 'type': blockType, 'signer': signer, 'validSig': valid}, onionr = core_inst.onionrInst)
|
||||
onionrevents.event('processblocks', data = {'block': myBlock, 'type': blockType, 'signer': signer, 'validSig': valid})
|
||||
else:
|
||||
pass
|
||||
|
||||
|
@ -21,12 +21,12 @@ import glob
|
||||
import logger
|
||||
from onionrutils import blockmetadata
|
||||
from coredb import blockmetadb
|
||||
import filepaths, onionrcrypto
|
||||
import filepaths
|
||||
import onionrcrypto as crypto
|
||||
def import_new_blocks(scanDir=''):
|
||||
'''
|
||||
This function is intended to scan for new blocks ON THE DISK and import them
|
||||
'''
|
||||
crypto = onionrcrypto.OnionrCrypto()
|
||||
blockList = blockmetadb.get_block_list()
|
||||
exist = False
|
||||
if scanDir == '':
|
||||
@ -39,7 +39,7 @@ def import_new_blocks(scanDir=''):
|
||||
logger.info('Found new block on dist %s' % block, terminal=True)
|
||||
with open(block, 'rb') as newBlock:
|
||||
block = block.replace(scanDir, '').replace('.dat', '')
|
||||
if crypto.sha3Hash(newBlock.read()) == block.replace('.dat', ''):
|
||||
if crypto.hashers.sha3_hash(newBlock.read()) == block.replace('.dat', ''):
|
||||
blockmetadb.add_to_block_DB(block.replace('.dat', ''), dataSaved=True)
|
||||
logger.info('Imported block %s.' % block, terminal=True)
|
||||
blockmetadata.process_block_metadata(block)
|
||||
|
@ -4,12 +4,10 @@ import dbcreator, filepaths
|
||||
home = identifyhome.identify_home()
|
||||
|
||||
def create_dirs():
|
||||
if not os.path.exists(home):
|
||||
os.mkdir(home)
|
||||
if not os.path.exists(filepaths.block_data_location):
|
||||
os.mkdir(filepaths.block_data_location)
|
||||
if not os.path.exists(filepaths.contacts_location):
|
||||
os.mkdir(filepaths.contacts_location)
|
||||
gen_dirs = [home, filepaths.block_data_location, filepaths.contacts_location, filepaths.export_location]
|
||||
for path in gen_dirs:
|
||||
if not os.path.exists(path):
|
||||
os.mkdir(path)
|
||||
|
||||
for db in dbcreator.create_funcs:
|
||||
try:
|
||||
|
Loading…
Reference in New Issue
Block a user