Onionr/onionr/onionrblockapi.py

807 lines
26 KiB
Python
Raw Normal View History

2018-05-15 06:43:29 +00:00
'''
Onionr - P2P Anonymous Storage Network
2018-05-15 06:43:29 +00:00
This class contains the OnionrBlocks class which is a class for working with Onionr blocks
'''
'''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
2018-10-08 02:25:59 +00:00
import core as onionrcore, logger, config, onionrexceptions, nacl.exceptions, onionrusers
2019-01-05 22:16:36 +00:00
import json, os, sys, datetime, base64, onionrstorage
2018-05-15 06:43:29 +00:00
class Block:
2018-06-14 04:17:58 +00:00
blockCacheOrder = list() # NEVER write your own code that writes to this!
blockCache = dict() # should never be accessed directly, look at Block.getCache()
2018-09-30 16:53:39 +00:00
def __init__(self, hash = None, core = None, type = None, content = None, expire=None):
2018-06-20 04:36:07 +00:00
# take from arguments
# sometimes people input a bytes object instead of str in `hash`
2018-08-04 03:47:56 +00:00
if (not hash is None) and isinstance(hash, bytes):
hash = hash.decode()
2018-06-20 04:36:07 +00:00
self.hash = hash
self.core = core
self.btype = type
self.bcontent = content
2018-09-30 16:53:39 +00:00
self.expire = expire
2018-05-16 02:12:23 +00:00
2018-05-16 01:47:58 +00:00
# initialize variables
self.valid = True
self.raw = None
self.signed = False
self.signature = None
self.signedData = None
2018-05-16 03:08:42 +00:00
self.blockFile = None
self.parent = None
2018-05-16 01:47:58 +00:00
self.bheader = {}
self.bmetadata = {}
self.isEncrypted = False
self.decrypted = False
self.signer = None
self.validSig = False
2018-05-16 01:47:58 +00:00
# handle arguments
2018-05-15 06:43:29 +00:00
if self.getCore() is None:
self.core = onionrcore.Core()
2018-06-20 04:36:07 +00:00
# update the blocks' contents if it exists
2018-05-15 06:43:29 +00:00
if not self.getHash() is None:
2018-07-18 04:45:51 +00:00
if not self.core._utils.validateHash(self.hash):
logger.debug('Block hash %s is invalid.' % self.getHash())
raise onionrexceptions.InvalidHexHash('Block hash is invalid.')
elif not self.update():
2018-06-14 04:17:58 +00:00
logger.debug('Failed to open block %s.' % self.getHash())
else:
pass
#logger.debug('Did not update block.')
2018-05-15 06:43:29 +00:00
# logic
2018-07-30 00:37:12 +00:00
def decrypt(self, anonymous = True, encodedData = True):
'''
Decrypt a block, loading decrypted data into their vars
'''
if self.decrypted:
return True
retData = False
core = self.getCore()
2018-07-17 07:18:17 +00:00
# decrypt data
if self.getHeader('encryptType') == 'asym':
try:
self.bcontent = core._crypto.pubKeyDecrypt(self.bcontent, anonymous=anonymous, encodedData=encodedData)
bmeta = core._crypto.pubKeyDecrypt(self.bmetadata, anonymous=anonymous, encodedData=encodedData)
try:
bmeta = bmeta.decode()
except AttributeError:
# yet another bytes fix
pass
self.bmetadata = json.loads(bmeta)
self.signature = core._crypto.pubKeyDecrypt(self.signature, anonymous=anonymous, encodedData=encodedData)
self.signer = core._crypto.pubKeyDecrypt(self.signer, anonymous=anonymous, encodedData=encodedData)
self.signedData = json.dumps(self.bmetadata) + self.bcontent.decode()
2018-10-08 02:25:59 +00:00
try:
assert self.bmetadata['forwardEnc'] is True
except (AssertionError, KeyError) as e:
pass
else:
2018-10-08 05:11:46 +00:00
try:
2018-10-09 23:36:52 +00:00
self.bcontent = onionrusers.OnionrUser(self.getCore(), self.signer).forwardDecrypt(self.bcontent)
except (onionrexceptions.DecryptionError, nacl.exceptions.CryptoError) as e:
logger.error(str(e))
2018-10-08 05:11:46 +00:00
pass
except nacl.exceptions.CryptoError:
2018-10-09 23:36:52 +00:00
logger.debug('Could not decrypt block. Either invalid key or corrupted data')
else:
retData = True
self.decrypted = True
else:
logger.warn('symmetric decryption is not yet supported by this API')
return retData
2018-07-30 00:37:12 +00:00
def verifySig(self):
2018-07-30 00:37:12 +00:00
'''
Verify if a block's signature is signed by its claimed signer
'''
core = self.getCore()
if core._crypto.edVerify(data=self.signedData, key=self.signer, sig=self.signature, encodedData=True):
self.validSig = True
else:
self.validSig = False
return self.validSig
2018-07-17 07:18:17 +00:00
2018-05-15 06:43:29 +00:00
def update(self, data = None, file = None):
'''
Loads data from a block in to the current object.
Inputs:
- data (str):
- if None: will load from file by hash
- else: will load from `data` string
- file (str):
- if None: will load from file specified in this parameter
- else: will load from wherever block is stored by hash
Outputs:
- (bool): indicates whether or not the operation was successful
'''
2018-05-16 01:47:58 +00:00
try:
# import from string
blockdata = data
# import from file
if blockdata is None:
filelocation = file
2018-06-14 04:17:58 +00:00
readfile = True
2018-05-16 01:47:58 +00:00
if filelocation is None:
if self.getHash() is None:
return False
2018-06-14 04:17:58 +00:00
elif self.getHash() in Block.getCache():
# get the block from cache, if it's in it
blockdata = Block.getCache(self.getHash())
readfile = False
# read from file if it's still None
if blockdata is None:
filelocation = self.core.dataDir + 'blocks/%s.dat' % self.getHash()
2018-05-16 01:47:58 +00:00
2018-06-14 04:17:58 +00:00
if readfile:
2019-01-05 22:16:36 +00:00
blockdata = onionrstorage.getData(self.core, self.getHash()).decode()
#with open(filelocation, 'rb') as f:
#blockdata = f.read().decode()
2018-05-16 01:47:58 +00:00
self.blockFile = filelocation
else:
self.blockFile = None
# parse block
self.raw = str(blockdata)
self.bheader = json.loads(self.getRaw()[:self.getRaw().index('\n')])
self.bcontent = self.getRaw()[self.getRaw().index('\n') + 1:]
2018-09-27 00:40:02 +00:00
if ('encryptType' in self.bheader) and (self.bheader['encryptType'] in ('asym', 'sym')):
self.bmetadata = self.getHeader('meta', None)
self.isEncrypted = True
else:
self.bmetadata = json.loads(self.getHeader('meta', None))
2018-06-20 04:36:07 +00:00
self.parent = self.getMetadata('parent', None)
self.btype = self.getMetadata('type', None)
2018-05-16 01:47:58 +00:00
self.signed = ('sig' in self.getHeader() and self.getHeader('sig') != '')
# TODO: detect if signer is hash of pubkey or not
self.signer = self.getHeader('signer', None)
2018-06-20 04:36:07 +00:00
self.signature = self.getHeader('sig', None)
# signed data is jsonMeta + block content (no linebreak)
self.signedData = (None if not self.isSigned() else self.getHeader('meta') + self.getContent())
2018-05-16 01:47:58 +00:00
self.date = self.getCore().getBlockDate(self.getHash())
self.claimedTime = self.getHeader('time', None)
2018-05-16 01:47:58 +00:00
if not self.getDate() is None:
self.date = datetime.datetime.fromtimestamp(self.getDate())
self.valid = True
2018-06-14 04:17:58 +00:00
if len(self.getRaw()) <= config.get('allocations.blockCache', 500000):
self.cache()
2018-05-16 01:47:58 +00:00
return True
except Exception as e:
2018-09-25 04:16:51 +00:00
logger.error('Failed to parse block %s.' % self.getHash(), error = e, timestamp = False)
# if block can't be parsed, it's a waste of precious space. Throw it away.
if not self.delete():
logger.error('Failed to delete invalid block %s.' % self.getHash(), error = e)
2018-09-27 00:40:02 +00:00
else:
logger.debug('Deleted invalid block %s.' % self.getHash(), timestamp = False)
2018-05-16 01:47:58 +00:00
self.valid = False
2018-05-15 06:43:29 +00:00
return False
def delete(self):
2018-05-16 02:12:23 +00:00
'''
Deletes the block's file and records, if they exist
Outputs:
- (bool): whether or not the operation was successful
'''
2018-05-16 01:47:58 +00:00
if self.exists():
os.remove(self.getBlockFile())
2018-09-27 00:40:02 +00:00
self.getCore().removeBlock(self.getHash())
2018-05-16 01:47:58 +00:00
return True
2018-05-15 06:43:29 +00:00
return False
2018-05-16 02:12:23 +00:00
def save(self, sign = False, recreate = True):
'''
Saves a block to file and imports it into Onionr
Inputs:
- sign (bool): whether or not to sign the block before saving
- recreate (bool): if the block already exists, whether or not to recreate the block and save under a new hash
Outputs:
- (bool): whether or not the operation was successful
'''
2018-05-16 01:47:58 +00:00
try:
if self.isValid() is True:
'''
2018-05-16 02:12:23 +00:00
if (not self.getBlockFile() is None) and (recreate is True):
2019-01-05 22:16:36 +00:00
onionrstorage.store(self.core, self.getRaw().encode())
#with open(self.getBlockFile(), 'wb') as blockFile:
# blockFile.write(self.getRaw().encode())
2018-05-16 01:47:58 +00:00
else:
'''
self.hash = self.getCore().insertBlock(self.getRaw(), header = self.getType(), sign = sign, meta = self.getMetadata(), expire = self.getExpire())
2018-12-24 06:12:46 +00:00
if self.hash != False:
self.update()
2018-07-30 00:37:12 +00:00
2018-05-19 22:21:35 +00:00
return self.getHash()
2018-05-16 01:47:58 +00:00
else:
logger.warn('Not writing block; it is invalid.')
except Exception as e:
logger.error('Failed to save block.', error = e, timestamp = False)
2018-07-30 00:37:12 +00:00
return False
2018-05-15 06:43:29 +00:00
# getters
2018-10-02 05:02:05 +00:00
def getExpire(self):
'''
Returns the expire time for a block
Outputs:
- (int): the expire time for a block, or None
'''
return self.expire
2018-05-15 06:43:29 +00:00
def getHash(self):
2018-05-16 02:12:23 +00:00
'''
Returns the hash of the block if saved to file
Outputs:
- (str): the hash of the block, or None
'''
2018-05-15 06:43:29 +00:00
return self.hash
def getCore(self):
2018-05-16 02:12:23 +00:00
'''
Returns the Core instance being used by the Block
Outputs:
- (Core): the Core instance
'''
2018-05-15 06:43:29 +00:00
return self.core
def getType(self):
2018-05-16 02:12:23 +00:00
'''
Returns the type of the block
Outputs:
- (str): the type of the block
'''
2018-05-15 06:43:29 +00:00
return self.btype
2018-05-16 01:47:58 +00:00
def getRaw(self):
2018-05-16 02:12:23 +00:00
'''
Returns the raw contents of the block, if saved to file
Outputs:
- (str): the raw contents of the block, or None
'''
2018-05-16 01:47:58 +00:00
return str(self.raw)
2018-06-20 04:36:07 +00:00
def getHeader(self, key = None, default = None):
2018-05-16 02:12:23 +00:00
'''
Returns the header information
Inputs:
- key (str): only returns the value of the key in the header
Outputs:
- (dict/str): either the whole header as a dict, or one value
'''
2018-05-16 01:47:58 +00:00
if not key is None:
2018-06-20 04:36:07 +00:00
if key in self.getHeader():
return self.getHeader()[key]
return default
return self.bheader
2018-05-16 01:47:58 +00:00
2018-06-20 04:36:07 +00:00
def getMetadata(self, key = None, default = None):
2018-05-16 02:12:23 +00:00
'''
Returns the metadata information
Inputs:
- key (str): only returns the value of the key in the metadata
Outputs:
- (dict/str): either the whole metadata as a dict, or one value
'''
2018-05-16 01:47:58 +00:00
if not key is None:
2018-06-20 04:36:07 +00:00
if key in self.getMetadata():
return self.getMetadata()[key]
return default
return self.bmetadata
2018-05-15 06:43:29 +00:00
def getContent(self):
2018-05-16 02:12:23 +00:00
'''
Returns the contents of the block
Outputs:
- (str): the contents of the block
'''
2018-05-16 01:47:58 +00:00
return str(self.bcontent)
2018-06-05 02:26:04 +00:00
def getParent(self):
'''
Returns the Block's parent Block, or None
2018-06-05 02:26:04 +00:00
Outputs:
- (Block): the Block's parent
'''
2018-06-05 02:26:04 +00:00
2018-06-07 01:58:40 +00:00
if type(self.parent) == str:
if self.parent == self.getHash():
self.parent = self
elif Block.exists(self.parent):
2018-06-20 04:36:07 +00:00
self.parent = Block(self.getMetadata('parent'), core = self.getCore())
else:
self.parent = None
return self.parent
2018-05-15 06:43:29 +00:00
def getDate(self):
2018-05-16 02:12:23 +00:00
'''
Returns the date that the block was received, if loaded from file
Outputs:
- (datetime): the date that the block was received
'''
2018-05-15 06:43:29 +00:00
return self.date
2018-05-16 01:47:58 +00:00
def getBlockFile(self):
2018-05-16 02:12:23 +00:00
'''
Returns the location of the block file if it is saved
Outputs:
- (str): the location of the block file, or None
'''
2018-05-16 01:47:58 +00:00
return self.blockFile
2018-05-15 06:43:29 +00:00
def isValid(self):
2018-05-16 02:12:23 +00:00
'''
Checks if the block is valid
Outputs:
- (bool): whether or not the block is valid
'''
2018-05-15 06:43:29 +00:00
return self.valid
def isSigned(self):
2018-05-16 02:12:23 +00:00
'''
Checks if the block was signed
Outputs:
- (bool): whether or not the block is signed
'''
2018-05-15 06:43:29 +00:00
return self.signed
2018-05-16 01:47:58 +00:00
def getSignature(self):
2018-05-16 02:12:23 +00:00
'''
Returns the base64-encoded signature
Outputs:
- (str): the signature, or None
'''
2018-05-16 01:47:58 +00:00
return self.signature
def getSignedData(self):
2018-05-16 02:12:23 +00:00
'''
Returns the data that was signed
Outputs:
- (str): the data that was signed, or None
'''
2018-05-16 01:47:58 +00:00
return self.signedData
def isSigner(self, signer, encodedData = True):
2018-05-16 02:12:23 +00:00
'''
Checks if the block was signed by the signer inputted
Inputs:
- signer (str): the public key of the signer to check against
- encodedData (bool): whether or not the `signer` argument is base64 encoded
Outputs:
- (bool): whether or not the signer of the block is the signer inputted
'''
2018-05-16 01:47:58 +00:00
try:
if (not self.isSigned()) or (not self.getCore()._utils.validatePubKey(signer)):
return False
return bool(self.getCore()._crypto.edVerify(self.getSignedData(), signer, self.getSignature(), encodedData = encodedData))
except:
return False
2018-05-15 06:43:29 +00:00
# setters
def setType(self, btype):
2018-05-16 02:12:23 +00:00
'''
Sets the type of the block
Inputs:
- btype (str): the type of block to be set to
Outputs:
- (Block): the Block instance
2018-05-16 02:12:23 +00:00
'''
2018-05-15 06:43:29 +00:00
self.btype = btype
return self
2018-06-05 02:26:04 +00:00
def setMetadata(self, key, val):
'''
Sets a custom metadata value
2018-06-05 02:26:04 +00:00
Metadata should not store block-specific data structures.
2018-06-05 02:26:04 +00:00
Inputs:
- key (str): the key
- val: the value (type is irrelevant)
2018-06-05 02:26:04 +00:00
Outputs:
- (Block): the Block instance
'''
2018-06-05 02:26:04 +00:00
if key == 'parent' and (not val is None) and (not val == self.getParent().getHash()):
self.setParent(val)
else:
self.bmetadata[key] = val
return self
2018-05-15 06:43:29 +00:00
def setContent(self, bcontent):
2018-05-16 02:12:23 +00:00
'''
Sets the contents of the block
Inputs:
- bcontent (str): the contents to be set to
Outputs:
- (Block): the Block instance
2018-05-16 02:12:23 +00:00
'''
2018-05-16 01:47:58 +00:00
self.bcontent = str(bcontent)
2018-05-15 06:43:29 +00:00
return self
2018-06-05 02:26:04 +00:00
def setParent(self, parent):
'''
Sets the Block's parent
2018-06-05 02:26:04 +00:00
Inputs:
- parent (Block/str): the Block's parent, to be stored in metadata
2018-06-05 02:26:04 +00:00
Outputs:
- (Block): the Block instance
'''
2018-06-05 02:26:04 +00:00
if type(parent) == str:
parent = Block(parent, core = self.getCore())
2018-06-05 02:26:04 +00:00
self.parent = parent
self.setMetadata('parent', (None if parent is None else self.getParent().getHash()))
return self
2018-05-15 06:43:29 +00:00
# static functions
2018-05-15 06:43:29 +00:00
2018-08-04 03:47:56 +00:00
def getBlocks(type = None, signer = None, signed = None, parent = None, reverse = False, limit = None, core = None):
2018-05-16 02:12:23 +00:00
'''
Returns a list of Block objects based on supplied filters
Inputs:
- type (str): filters by block type
- signer (str/list): filters by signer (one in the list has to be a signer)
- signed (bool): filters out by whether or not the block is signed
- reverse (bool): reverses the list if True
- core (Core): lets you optionally supply a core instance so one doesn't need to be started
Outputs:
- (list): a list of Block objects that match the input
'''
2018-05-15 06:43:29 +00:00
2018-05-16 01:47:58 +00:00
try:
core = (core if not core is None else onionrcore.Core())
2018-08-04 03:47:56 +00:00
if (not parent is None) and (not isinstance(parent, Block)):
parent = Block(hash = parent, core = core)
2018-05-16 01:47:58 +00:00
relevant_blocks = list()
blocks = (core.getBlockList() if type is None else core.getBlocksByType(type))
for block in blocks:
if Block.exists(block):
2018-05-16 02:12:23 +00:00
block = Block(block, core = core)
2018-05-16 01:47:58 +00:00
relevant = True
if (not signed is None) and (block.isSigned() != bool(signed)):
relevant = False
if not signer is None:
if isinstance(signer, (str,)):
signer = [signer]
2018-07-18 04:45:51 +00:00
if isinstance(signer, (bytes,)):
signer = [signer.decode()]
2018-05-16 01:47:58 +00:00
isSigner = False
for key in signer:
if block.isSigner(key):
isSigner = True
break
if not isSigner:
relevant = False
2018-08-04 03:47:56 +00:00
if not parent is None:
blockParent = block.getParent()
if blockParent is None:
relevant = False
else:
relevant = parent.getHash() == blockParent.getHash()
if relevant and (limit is None or len(relevant_Blocks) <= int(limit)):
2018-05-16 01:47:58 +00:00
relevant_blocks.append(block)
2018-07-30 00:37:12 +00:00
2018-05-16 02:12:23 +00:00
if bool(reverse):
relevant_blocks.reverse()
2018-05-16 01:47:58 +00:00
return relevant_blocks
except Exception as e:
2018-07-18 04:45:51 +00:00
logger.debug('Failed to get blocks.', error = e)
2018-05-16 01:47:58 +00:00
return list()
2018-05-15 06:43:29 +00:00
2018-06-05 02:26:04 +00:00
def mergeChain(child, file = None, maximumFollows = 32, core = None):
'''
Follows a child Block to its root parent Block, merging content
2018-06-05 02:26:04 +00:00
Inputs:
- child (str/Block): the child Block to be followed
- file (str/file): the file to write the content to, instead of returning it
- maximumFollows (int): the maximum number of Blocks to follow
'''
2018-06-05 02:26:04 +00:00
# validate data and instantiate Core
core = (core if not core is None else onionrcore.Core())
maximumFollows = max(0, maximumFollows)
2018-06-05 02:26:04 +00:00
# type conversions
2018-06-05 02:26:04 +00:00
if type(child) == list:
child = child[-1]
if type(child) == str:
child = Block(child)
if (not file is None) and (type(file) == str):
file = open(file, 'ab')
2018-06-05 02:26:04 +00:00
# only store hashes to avoid intensive memory usage
blocks = [child.getHash()]
2018-06-05 02:26:04 +00:00
# generate a list of parent Blocks
while True:
# end if the maximum number of follows has been exceeded
if len(blocks) - 1 >= maximumFollows:
break
2018-06-05 02:26:04 +00:00
block = Block(blocks[-1], core = core).getParent()
2018-06-05 02:26:04 +00:00
# end if there is no parent Block
if block is None:
break
2018-06-05 02:26:04 +00:00
# end if the Block is pointing to a previously parsed Block
if block.getHash() in blocks:
break
2018-06-05 02:26:04 +00:00
# end if the block is not valid
if not block.isValid():
break
2018-06-05 02:26:04 +00:00
blocks.append(block.getHash())
2018-06-05 02:26:04 +00:00
buffer = ''
2018-06-05 02:26:04 +00:00
# combine block contents
for hash in blocks:
block = Block(hash, core = core)
contents = block.getContent()
2018-06-05 02:26:04 +00:00
contents = base64.b64decode(contents.encode())
if file is None:
2018-06-05 02:26:04 +00:00
buffer += contents.decode()
else:
file.write(contents)
2018-06-05 02:26:04 +00:00
return (None if not file is None else buffer)
2018-06-05 02:26:04 +00:00
def createChain(data = None, chunksize = 99800, file = None, type = 'chunk', sign = True, encrypt = False, verbose = False):
2018-06-04 16:29:04 +00:00
'''
Creates a chain of blocks to store larger amounts of data
2018-06-05 02:26:04 +00:00
The chunksize is set to 99800 because it provides the least amount of PoW for the most amount of data.
Inputs:
- data (*): if `file` is None, the data to be stored in blocks
- file (file/str): the filename or file object to read from (or None to read `data` instead)
- chunksize (int): the number of bytes per block chunk
- type (str): the type header for each of the blocks
- sign (bool): whether or not to sign each block
- encrypt (str): the public key to encrypt to, or False to disable encryption
- verbose (bool): whether or not to return a tuple containing more info
Outputs:
- if `verbose`:
- (tuple):
- (str): the child block hash
- (list): all block hashes associated with storing the file
- if not `verbose`:
- (str): the child block hash
2018-06-04 16:29:04 +00:00
'''
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
blocks = list()
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
# initial datatype checks
if data is None and file is None:
return blocks
elif not (file is None or (isinstance(file, str) and os.path.exists(file))):
return blocks
elif isinstance(file, str):
file = open(file, 'rb')
2018-06-05 02:26:04 +00:00
if not isinstance(data, str):
2018-06-04 16:29:04 +00:00
data = str(data)
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
if not file is None:
2018-06-05 02:26:04 +00:00
filesize = os.stat(file.name).st_size
offset = filesize % chunksize
maxtimes = int(filesize / chunksize)
for times in range(0, maxtimes + 1):
# read chunksize bytes from the file (end -> beginning)
if times < maxtimes:
file.seek(- ((times + 1) * chunksize), 2)
content = file.read(chunksize)
else:
file.seek(0, 0)
content = file.read(offset)
# encode it- python is really bad at handling certain bytes that
# are often present in binaries.
content = base64.b64encode(content).decode()
2018-06-04 16:29:04 +00:00
# if it is the end of the file, exit
if not content:
break
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
# create block
block = Block()
block.setType(type)
block.setContent(content)
block.setParent((blocks[-1] if len(blocks) != 0 else None))
hash = block.save(sign = sign)
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
# remember the hash in cache
blocks.append(hash)
elif not data is None:
2018-06-05 02:26:04 +00:00
for content in reversed([data[n:n + chunksize] for n in range(0, len(data), chunksize)]):
# encode chunk with base64
content = base64.b64encode(content.encode()).decode()
2018-06-04 16:29:04 +00:00
# create block
block = Block()
block.setType(type)
block.setContent(content)
block.setParent((blocks[-1] if len(blocks) != 0 else None))
hash = block.save(sign = sign)
2018-06-05 02:26:04 +00:00
2018-06-04 16:29:04 +00:00
# remember the hash in cache
blocks.append(hash)
2018-06-05 02:26:04 +00:00
# return different things depending on verbosity
if verbose:
return (blocks[-1], blocks)
return blocks[-1]
2018-05-15 06:43:29 +00:00
def exists(hash):
2018-05-16 02:12:23 +00:00
'''
Checks if a block is saved to file or not
Inputs:
- hash (str/Block):
- if (Block): check if this block is saved to file
- if (str): check if a block by this hash is in file
Outputs:
- (bool): whether or not the block file exists
'''
2018-06-14 04:17:58 +00:00
# no input data? scrap it.
2018-05-16 01:47:58 +00:00
if hash is None:
return False
'''
2018-06-14 04:17:58 +00:00
if type(hash) == Block:
2018-05-16 01:47:58 +00:00
blockfile = hash.getBlockFile()
else:
blockfile = onionrcore.Core().dataDir + 'blocks/%s.dat' % hash
'''
ret = isinstance(onionrstorage.getData(onionrcore.Core(), hash.getHash()), type(None))
2018-05-16 01:47:58 +00:00
return not ret
2018-06-14 04:17:58 +00:00
def getCache(hash = None):
# give a list of the hashes of the cached blocks
if hash is None:
return list(Block.blockCache.keys())
# if they inputted self or a Block, convert to hash
if type(hash) == Block:
hash = hash.getHash()
# just to make sure someone didn't put in a bool or something lol
hash = str(hash)
# if it exists, return its content
if hash in Block.getCache():
return Block.blockCache[hash]
return None
def cache(block, override = False):
# why even bother if they're giving bad data?
if not type(block) == Block:
return False
# only cache if written to file
if block.getHash() is None:
return False
# if it's already cached, what are we here for?
if block.getHash() in Block.getCache() and not override:
return False
# dump old cached blocks if the size exeeds the maximum
2018-09-24 23:48:00 +00:00
if sys.getsizeof(Block.blockCacheOrder) >= config.get('allocations.block_cache_total', 50000000): # 50MB default cache size
2018-06-14 04:17:58 +00:00
del Block.blockCache[blockCacheOrder.pop(0)]
# cache block content
Block.blockCache[block.getHash()] = block.getRaw()
Block.blockCacheOrder.append(block.getHash())
return True