2018-05-15 06:43:29 +00:00
|
|
|
'''
|
|
|
|
Onionr - P2P Microblogging Platform & Social network.
|
|
|
|
|
|
|
|
This class contains the OnionrBlocks class which is a class for working with Onionr blocks
|
|
|
|
'''
|
|
|
|
'''
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
|
|
|
|
2018-07-18 07:33:23 +00:00
|
|
|
import core as onionrcore, logger, config, onionrexceptions, nacl.exceptions
|
2018-06-14 04:17:58 +00:00
|
|
|
import json, os, sys, datetime, base64
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
class Block:
|
2018-06-14 04:17:58 +00:00
|
|
|
blockCacheOrder = list() # NEVER write your own code that writes to this!
|
|
|
|
blockCache = dict() # should never be accessed directly, look at Block.getCache()
|
|
|
|
|
2018-06-20 04:36:07 +00:00
|
|
|
def __init__(self, hash = None, core = None, type = None, content = None):
|
|
|
|
# take from arguments
|
2018-06-22 00:34:42 +00:00
|
|
|
# sometimes people input a bytes object instead of str in `hash`
|
|
|
|
try:
|
|
|
|
hash = hash.decode()
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
2018-07-11 19:45:38 +00:00
|
|
|
|
2018-06-20 04:36:07 +00:00
|
|
|
self.hash = hash
|
|
|
|
self.core = core
|
|
|
|
self.btype = type
|
|
|
|
self.bcontent = content
|
2018-05-16 02:12:23 +00:00
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
# initialize variables
|
|
|
|
self.valid = True
|
|
|
|
self.raw = None
|
|
|
|
self.signed = False
|
|
|
|
self.signature = None
|
|
|
|
self.signedData = None
|
2018-05-16 03:08:42 +00:00
|
|
|
self.blockFile = None
|
2018-06-01 07:02:56 +00:00
|
|
|
self.parent = None
|
2018-05-16 01:47:58 +00:00
|
|
|
self.bheader = {}
|
|
|
|
self.bmetadata = {}
|
2018-07-13 21:02:41 +00:00
|
|
|
self.isEncrypted = False
|
2018-07-18 07:33:23 +00:00
|
|
|
self.decrypted = False
|
|
|
|
self.signer = None
|
|
|
|
self.validSig = False
|
2018-05-16 01:47:58 +00:00
|
|
|
|
|
|
|
# handle arguments
|
2018-05-15 06:43:29 +00:00
|
|
|
if self.getCore() is None:
|
|
|
|
self.core = onionrcore.Core()
|
2018-06-20 04:36:07 +00:00
|
|
|
|
|
|
|
# update the blocks' contents if it exists
|
2018-05-15 06:43:29 +00:00
|
|
|
if not self.getHash() is None:
|
2018-07-18 04:45:51 +00:00
|
|
|
if not self.core._utils.validateHash(self.hash):
|
|
|
|
logger.debug('Block hash %s is invalid.' % self.getHash())
|
|
|
|
raise onionrexceptions.InvalidHexHash('Block hash is invalid.')
|
|
|
|
elif not self.update():
|
2018-06-14 04:17:58 +00:00
|
|
|
logger.debug('Failed to open block %s.' % self.getHash())
|
2018-06-22 00:34:42 +00:00
|
|
|
else:
|
2018-07-19 07:08:51 +00:00
|
|
|
pass
|
|
|
|
#logger.debug('Did not update block.')
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
# logic
|
|
|
|
|
2018-07-17 07:18:17 +00:00
|
|
|
def decrypt(self, anonymous=True, encodedData=True):
|
|
|
|
'''Decrypt a block, loading decrypted data into their vars'''
|
2018-07-18 07:33:23 +00:00
|
|
|
if self.decrypted:
|
|
|
|
return True
|
|
|
|
retData = False
|
|
|
|
core = self.getCore()
|
2018-07-17 07:18:17 +00:00
|
|
|
# decrypt data
|
2018-07-18 07:33:23 +00:00
|
|
|
if self.getHeader('encryptType') == 'asym':
|
|
|
|
try:
|
|
|
|
self.bcontent = core._crypto.pubKeyDecrypt(self.bcontent, anonymous=anonymous, encodedData=encodedData)
|
2018-07-20 03:02:09 +00:00
|
|
|
bmeta = core._crypto.pubKeyDecrypt(self.bmetadata, anonymous=anonymous, encodedData=encodedData)
|
|
|
|
try:
|
|
|
|
bmeta = bmeta.decode()
|
|
|
|
except AttributeError:
|
|
|
|
# yet another bytes fix
|
|
|
|
pass
|
|
|
|
self.bmetadata = json.loads(bmeta)
|
2018-07-18 07:33:23 +00:00
|
|
|
self.signature = core._crypto.pubKeyDecrypt(self.signature, anonymous=anonymous, encodedData=encodedData)
|
|
|
|
self.signer = core._crypto.pubKeyDecrypt(self.signer, anonymous=anonymous, encodedData=encodedData)
|
|
|
|
self.signedData = json.dumps(self.bmetadata) + self.bcontent.decode()
|
|
|
|
except nacl.exceptions.CryptoError:
|
|
|
|
pass
|
|
|
|
#logger.debug('Could not decrypt block. Either invalid key or corrupted data')
|
|
|
|
else:
|
|
|
|
retData = True
|
|
|
|
self.decrypted = True
|
|
|
|
else:
|
|
|
|
logger.warn('symmetric decryption is not yet supported by this API')
|
|
|
|
return retData
|
|
|
|
|
|
|
|
def verifySig(self):
|
|
|
|
'''Verify if a block's signature is signed by its claimed signer'''
|
|
|
|
core = self.getCore()
|
|
|
|
|
|
|
|
if core._crypto.edVerify(data=self.signedData, key=self.signer, sig=self.signature, encodedData=True):
|
|
|
|
self.validSig = True
|
|
|
|
else:
|
|
|
|
self.validSig = False
|
|
|
|
return self.validSig
|
2018-07-17 07:18:17 +00:00
|
|
|
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
def update(self, data = None, file = None):
|
|
|
|
'''
|
|
|
|
Loads data from a block in to the current object.
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- data (str):
|
|
|
|
- if None: will load from file by hash
|
|
|
|
- else: will load from `data` string
|
|
|
|
- file (str):
|
|
|
|
- if None: will load from file specified in this parameter
|
|
|
|
- else: will load from wherever block is stored by hash
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): indicates whether or not the operation was successful
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
try:
|
|
|
|
# import from string
|
|
|
|
blockdata = data
|
|
|
|
|
|
|
|
# import from file
|
|
|
|
if blockdata is None:
|
|
|
|
filelocation = file
|
|
|
|
|
2018-06-14 04:17:58 +00:00
|
|
|
readfile = True
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
if filelocation is None:
|
|
|
|
if self.getHash() is None:
|
|
|
|
return False
|
2018-06-14 04:17:58 +00:00
|
|
|
elif self.getHash() in Block.getCache():
|
|
|
|
# get the block from cache, if it's in it
|
|
|
|
blockdata = Block.getCache(self.getHash())
|
|
|
|
readfile = False
|
|
|
|
|
|
|
|
# read from file if it's still None
|
|
|
|
if blockdata is None:
|
2018-06-08 07:46:05 +00:00
|
|
|
filelocation = 'data/blocks/%s.dat' % self.getHash()
|
2018-05-16 01:47:58 +00:00
|
|
|
|
2018-06-14 04:17:58 +00:00
|
|
|
if readfile:
|
|
|
|
with open(filelocation, 'rb') as f:
|
|
|
|
blockdata = f.read().decode()
|
2018-05-16 01:47:58 +00:00
|
|
|
|
|
|
|
self.blockFile = filelocation
|
|
|
|
else:
|
|
|
|
self.blockFile = None
|
|
|
|
|
|
|
|
# parse block
|
|
|
|
self.raw = str(blockdata)
|
|
|
|
self.bheader = json.loads(self.getRaw()[:self.getRaw().index('\n')])
|
|
|
|
self.bcontent = self.getRaw()[self.getRaw().index('\n') + 1:]
|
2018-07-13 21:02:41 +00:00
|
|
|
if self.bheader['encryptType'] in ('asym', 'sym'):
|
|
|
|
self.bmetadata = self.getHeader('meta', None)
|
|
|
|
self.isEncrypted = True
|
|
|
|
else:
|
|
|
|
self.bmetadata = json.loads(self.getHeader('meta', None))
|
2018-06-20 04:36:07 +00:00
|
|
|
self.parent = self.getMetadata('parent', None)
|
|
|
|
self.btype = self.getMetadata('type', None)
|
2018-05-16 01:47:58 +00:00
|
|
|
self.signed = ('sig' in self.getHeader() and self.getHeader('sig') != '')
|
2018-07-18 07:33:23 +00:00
|
|
|
# TODO: detect if signer is hash of pubkey or not
|
|
|
|
self.signer = self.getHeader('signer', None)
|
2018-06-20 04:36:07 +00:00
|
|
|
self.signature = self.getHeader('sig', None)
|
2018-07-18 07:33:23 +00:00
|
|
|
# signed data is jsonMeta + block content (no linebreak)
|
|
|
|
self.signedData = (None if not self.isSigned() else self.getHeader('meta') + self.getContent())
|
2018-05-16 01:47:58 +00:00
|
|
|
self.date = self.getCore().getBlockDate(self.getHash())
|
|
|
|
|
|
|
|
if not self.getDate() is None:
|
|
|
|
self.date = datetime.datetime.fromtimestamp(self.getDate())
|
|
|
|
|
|
|
|
self.valid = True
|
2018-06-14 04:17:58 +00:00
|
|
|
|
|
|
|
if len(self.getRaw()) <= config.get('allocations.blockCache', 500000):
|
|
|
|
self.cache()
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return True
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('Failed to update block data.', error = e, timestamp = False)
|
|
|
|
|
|
|
|
self.valid = False
|
2018-05-15 06:43:29 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
def delete(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Deletes the block's file and records, if they exist
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the operation was successful
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
if self.exists():
|
|
|
|
os.remove(self.getBlockFile())
|
2018-05-16 02:12:23 +00:00
|
|
|
removeBlock(self.getHash())
|
2018-05-16 01:47:58 +00:00
|
|
|
return True
|
2018-05-15 06:43:29 +00:00
|
|
|
return False
|
|
|
|
|
2018-05-16 02:12:23 +00:00
|
|
|
def save(self, sign = False, recreate = True):
|
|
|
|
'''
|
|
|
|
Saves a block to file and imports it into Onionr
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- sign (bool): whether or not to sign the block before saving
|
|
|
|
- recreate (bool): if the block already exists, whether or not to recreate the block and save under a new hash
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the operation was successful
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
try:
|
|
|
|
if self.isValid() is True:
|
2018-05-16 02:12:23 +00:00
|
|
|
if (not self.getBlockFile() is None) and (recreate is True):
|
2018-05-16 01:47:58 +00:00
|
|
|
with open(self.getBlockFile(), 'wb') as blockFile:
|
|
|
|
blockFile.write(self.getRaw().encode())
|
|
|
|
self.update()
|
|
|
|
else:
|
2018-06-21 07:17:20 +00:00
|
|
|
self.hash = self.getCore().insertBlock(self.getContent(), header = self.getType(), sign = sign)
|
2018-05-16 01:47:58 +00:00
|
|
|
self.update()
|
2018-05-19 22:21:35 +00:00
|
|
|
return self.getHash()
|
2018-05-16 01:47:58 +00:00
|
|
|
else:
|
|
|
|
logger.warn('Not writing block; it is invalid.')
|
|
|
|
except Exception as e:
|
|
|
|
logger.error('Failed to save block.', error = e, timestamp = False)
|
|
|
|
return False
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
# getters
|
|
|
|
|
|
|
|
def getHash(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the hash of the block if saved to file
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the hash of the block, or None
|
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.hash
|
|
|
|
|
|
|
|
def getCore(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the Core instance being used by the Block
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (Core): the Core instance
|
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.core
|
|
|
|
|
|
|
|
def getType(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the type of the block
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the type of the block
|
|
|
|
'''
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.btype
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
def getRaw(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the raw contents of the block, if saved to file
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the raw contents of the block, or None
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return str(self.raw)
|
|
|
|
|
2018-06-20 04:36:07 +00:00
|
|
|
def getHeader(self, key = None, default = None):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the header information
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- key (str): only returns the value of the key in the header
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (dict/str): either the whole header as a dict, or one value
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
if not key is None:
|
2018-06-20 04:36:07 +00:00
|
|
|
if key in self.getHeader():
|
|
|
|
return self.getHeader()[key]
|
|
|
|
return default
|
2018-06-01 07:02:56 +00:00
|
|
|
return self.bheader
|
2018-05-16 01:47:58 +00:00
|
|
|
|
2018-06-20 04:36:07 +00:00
|
|
|
def getMetadata(self, key = None, default = None):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the metadata information
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- key (str): only returns the value of the key in the metadata
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (dict/str): either the whole metadata as a dict, or one value
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
if not key is None:
|
2018-06-20 04:36:07 +00:00
|
|
|
if key in self.getMetadata():
|
|
|
|
return self.getMetadata()[key]
|
|
|
|
return default
|
2018-06-01 07:02:56 +00:00
|
|
|
return self.bmetadata
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
def getContent(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the contents of the block
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the contents of the block
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return str(self.bcontent)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
def getParent(self):
|
|
|
|
'''
|
|
|
|
Returns the Block's parent Block, or None
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Outputs:
|
|
|
|
- (Block): the Block's parent
|
|
|
|
'''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-07 01:58:40 +00:00
|
|
|
if type(self.parent) == str:
|
2018-06-07 01:54:35 +00:00
|
|
|
if self.parent == self.getHash():
|
|
|
|
self.parent = self
|
|
|
|
elif Block.exists(self.parent):
|
2018-06-20 04:36:07 +00:00
|
|
|
self.parent = Block(self.getMetadata('parent'), core = self.getCore())
|
2018-06-07 01:54:35 +00:00
|
|
|
else:
|
|
|
|
self.parent = None
|
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
return self.parent
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
def getDate(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the date that the block was received, if loaded from file
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (datetime): the date that the block was received
|
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.date
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
def getBlockFile(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the location of the block file if it is saved
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the location of the block file, or None
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return self.blockFile
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
def isValid(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Checks if the block is valid
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the block is valid
|
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.valid
|
|
|
|
|
|
|
|
def isSigned(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Checks if the block was signed
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the block is signed
|
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
return self.signed
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
def getSignature(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the base64-encoded signature
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the signature, or None
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return self.signature
|
|
|
|
|
|
|
|
def getSignedData(self):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Returns the data that was signed
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (str): the data that was signed, or None
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return self.signedData
|
|
|
|
|
|
|
|
def isSigner(self, signer, encodedData = True):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Checks if the block was signed by the signer inputted
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- signer (str): the public key of the signer to check against
|
|
|
|
- encodedData (bool): whether or not the `signer` argument is base64 encoded
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the signer of the block is the signer inputted
|
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
try:
|
|
|
|
if (not self.isSigned()) or (not self.getCore()._utils.validatePubKey(signer)):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return bool(self.getCore()._crypto.edVerify(self.getSignedData(), signer, self.getSignature(), encodedData = encodedData))
|
|
|
|
except:
|
|
|
|
return False
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
# setters
|
|
|
|
|
|
|
|
def setType(self, btype):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Sets the type of the block
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- btype (str): the type of block to be set to
|
|
|
|
|
|
|
|
Outputs:
|
2018-06-01 07:02:56 +00:00
|
|
|
- (Block): the Block instance
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
self.btype = btype
|
|
|
|
return self
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
def setMetadata(self, key, val):
|
|
|
|
'''
|
|
|
|
Sets a custom metadata value
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Metadata should not store block-specific data structures.
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Inputs:
|
|
|
|
- key (str): the key
|
|
|
|
- val: the value (type is irrelevant)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Outputs:
|
|
|
|
- (Block): the Block instance
|
|
|
|
'''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
if key == 'parent' and (not val is None) and (not val == self.getParent().getHash()):
|
|
|
|
self.setParent(val)
|
|
|
|
else:
|
|
|
|
self.bmetadata[key] = val
|
|
|
|
return self
|
2018-05-15 06:43:29 +00:00
|
|
|
|
|
|
|
def setContent(self, bcontent):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Sets the contents of the block
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- bcontent (str): the contents to be set to
|
|
|
|
|
|
|
|
Outputs:
|
2018-06-01 07:02:56 +00:00
|
|
|
- (Block): the Block instance
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
self.bcontent = str(bcontent)
|
2018-05-15 06:43:29 +00:00
|
|
|
return self
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
def setParent(self, parent):
|
|
|
|
'''
|
|
|
|
Sets the Block's parent
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Inputs:
|
|
|
|
- parent (Block/str): the Block's parent, to be stored in metadata
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Outputs:
|
|
|
|
- (Block): the Block instance
|
|
|
|
'''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
if type(parent) == str:
|
|
|
|
parent = Block(parent, core = self.getCore())
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
self.parent = parent
|
|
|
|
self.setMetadata('parent', (None if parent is None else self.getParent().getHash()))
|
|
|
|
return self
|
2018-05-15 06:43:29 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# static functions
|
2018-05-15 06:43:29 +00:00
|
|
|
|
2018-05-16 02:12:23 +00:00
|
|
|
def getBlocks(type = None, signer = None, signed = None, reverse = False, core = None):
|
|
|
|
'''
|
|
|
|
Returns a list of Block objects based on supplied filters
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- type (str): filters by block type
|
|
|
|
- signer (str/list): filters by signer (one in the list has to be a signer)
|
|
|
|
- signed (bool): filters out by whether or not the block is signed
|
|
|
|
- reverse (bool): reverses the list if True
|
|
|
|
- core (Core): lets you optionally supply a core instance so one doesn't need to be started
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (list): a list of Block objects that match the input
|
|
|
|
'''
|
2018-05-15 06:43:29 +00:00
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
try:
|
|
|
|
core = (core if not core is None else onionrcore.Core())
|
|
|
|
|
|
|
|
relevant_blocks = list()
|
|
|
|
blocks = (core.getBlockList() if type is None else core.getBlocksByType(type))
|
|
|
|
|
|
|
|
for block in blocks:
|
|
|
|
if Block.exists(block):
|
2018-05-16 02:12:23 +00:00
|
|
|
block = Block(block, core = core)
|
2018-05-16 01:47:58 +00:00
|
|
|
|
|
|
|
relevant = True
|
|
|
|
|
|
|
|
if (not signed is None) and (block.isSigned() != bool(signed)):
|
|
|
|
relevant = False
|
|
|
|
if not signer is None:
|
|
|
|
if isinstance(signer, (str,)):
|
|
|
|
signer = [signer]
|
2018-07-18 04:45:51 +00:00
|
|
|
if isinstance(signer, (bytes,)):
|
|
|
|
signer = [signer.decode()]
|
2018-05-16 01:47:58 +00:00
|
|
|
|
|
|
|
isSigner = False
|
|
|
|
for key in signer:
|
|
|
|
if block.isSigner(key):
|
|
|
|
isSigner = True
|
|
|
|
break
|
|
|
|
|
|
|
|
if not isSigner:
|
|
|
|
relevant = False
|
|
|
|
|
|
|
|
if relevant:
|
|
|
|
relevant_blocks.append(block)
|
2018-07-18 04:45:51 +00:00
|
|
|
|
2018-05-16 02:12:23 +00:00
|
|
|
if bool(reverse):
|
|
|
|
relevant_blocks.reverse()
|
|
|
|
|
2018-05-16 01:47:58 +00:00
|
|
|
return relevant_blocks
|
|
|
|
except Exception as e:
|
2018-07-18 04:45:51 +00:00
|
|
|
logger.debug('Failed to get blocks.', error = e)
|
2018-05-16 01:47:58 +00:00
|
|
|
|
|
|
|
return list()
|
2018-05-15 06:43:29 +00:00
|
|
|
|
2018-06-05 02:26:04 +00:00
|
|
|
def mergeChain(child, file = None, maximumFollows = 32, core = None):
|
2018-06-01 07:02:56 +00:00
|
|
|
'''
|
|
|
|
Follows a child Block to its root parent Block, merging content
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
Inputs:
|
|
|
|
- child (str/Block): the child Block to be followed
|
|
|
|
- file (str/file): the file to write the content to, instead of returning it
|
|
|
|
- maximumFollows (int): the maximum number of Blocks to follow
|
|
|
|
'''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# validate data and instantiate Core
|
|
|
|
core = (core if not core is None else onionrcore.Core())
|
|
|
|
maximumFollows = max(0, maximumFollows)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# type conversions
|
2018-06-05 02:26:04 +00:00
|
|
|
if type(child) == list:
|
|
|
|
child = child[-1]
|
2018-06-01 07:02:56 +00:00
|
|
|
if type(child) == str:
|
|
|
|
child = Block(child)
|
|
|
|
if (not file is None) and (type(file) == str):
|
|
|
|
file = open(file, 'ab')
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# only store hashes to avoid intensive memory usage
|
|
|
|
blocks = [child.getHash()]
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# generate a list of parent Blocks
|
|
|
|
while True:
|
|
|
|
# end if the maximum number of follows has been exceeded
|
|
|
|
if len(blocks) - 1 >= maximumFollows:
|
|
|
|
break
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
block = Block(blocks[-1], core = core).getParent()
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# end if there is no parent Block
|
|
|
|
if block is None:
|
|
|
|
break
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# end if the Block is pointing to a previously parsed Block
|
|
|
|
if block.getHash() in blocks:
|
|
|
|
break
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# end if the block is not valid
|
|
|
|
if not block.isValid():
|
|
|
|
break
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
blocks.append(block.getHash())
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
buffer = ''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
# combine block contents
|
|
|
|
for hash in blocks:
|
|
|
|
block = Block(hash, core = core)
|
|
|
|
contents = block.getContent()
|
2018-06-05 02:26:04 +00:00
|
|
|
contents = base64.b64decode(contents.encode())
|
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
if file is None:
|
2018-06-05 02:26:04 +00:00
|
|
|
buffer += contents.decode()
|
2018-06-01 07:02:56 +00:00
|
|
|
else:
|
|
|
|
file.write(contents)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-01 07:02:56 +00:00
|
|
|
return (None if not file is None else buffer)
|
|
|
|
|
2018-06-05 02:26:04 +00:00
|
|
|
def createChain(data = None, chunksize = 99800, file = None, type = 'chunk', sign = True, encrypt = False, verbose = False):
|
2018-06-04 16:29:04 +00:00
|
|
|
'''
|
|
|
|
Creates a chain of blocks to store larger amounts of data
|
|
|
|
|
2018-06-05 02:26:04 +00:00
|
|
|
The chunksize is set to 99800 because it provides the least amount of PoW for the most amount of data.
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- data (*): if `file` is None, the data to be stored in blocks
|
|
|
|
- file (file/str): the filename or file object to read from (or None to read `data` instead)
|
|
|
|
- chunksize (int): the number of bytes per block chunk
|
|
|
|
- type (str): the type header for each of the blocks
|
|
|
|
- sign (bool): whether or not to sign each block
|
|
|
|
- encrypt (str): the public key to encrypt to, or False to disable encryption
|
|
|
|
- verbose (bool): whether or not to return a tuple containing more info
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- if `verbose`:
|
|
|
|
- (tuple):
|
|
|
|
- (str): the child block hash
|
|
|
|
- (list): all block hashes associated with storing the file
|
|
|
|
- if not `verbose`:
|
|
|
|
- (str): the child block hash
|
2018-06-04 16:29:04 +00:00
|
|
|
'''
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
blocks = list()
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# initial datatype checks
|
|
|
|
if data is None and file is None:
|
|
|
|
return blocks
|
|
|
|
elif not (file is None or (isinstance(file, str) and os.path.exists(file))):
|
|
|
|
return blocks
|
|
|
|
elif isinstance(file, str):
|
|
|
|
file = open(file, 'rb')
|
2018-06-05 02:26:04 +00:00
|
|
|
if not isinstance(data, str):
|
2018-06-04 16:29:04 +00:00
|
|
|
data = str(data)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
if not file is None:
|
2018-06-05 02:26:04 +00:00
|
|
|
filesize = os.stat(file.name).st_size
|
|
|
|
offset = filesize % chunksize
|
|
|
|
maxtimes = int(filesize / chunksize)
|
|
|
|
|
|
|
|
for times in range(0, maxtimes + 1):
|
|
|
|
# read chunksize bytes from the file (end -> beginning)
|
|
|
|
if times < maxtimes:
|
|
|
|
file.seek(- ((times + 1) * chunksize), 2)
|
|
|
|
content = file.read(chunksize)
|
|
|
|
else:
|
|
|
|
file.seek(0, 0)
|
|
|
|
content = file.read(offset)
|
|
|
|
|
|
|
|
# encode it- python is really bad at handling certain bytes that
|
|
|
|
# are often present in binaries.
|
|
|
|
content = base64.b64encode(content).decode()
|
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# if it is the end of the file, exit
|
|
|
|
if not content:
|
|
|
|
break
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# create block
|
|
|
|
block = Block()
|
|
|
|
block.setType(type)
|
|
|
|
block.setContent(content)
|
|
|
|
block.setParent((blocks[-1] if len(blocks) != 0 else None))
|
|
|
|
hash = block.save(sign = sign)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# remember the hash in cache
|
|
|
|
blocks.append(hash)
|
|
|
|
elif not data is None:
|
2018-06-05 02:26:04 +00:00
|
|
|
for content in reversed([data[n:n + chunksize] for n in range(0, len(data), chunksize)]):
|
|
|
|
# encode chunk with base64
|
|
|
|
content = base64.b64encode(content.encode()).decode()
|
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# create block
|
|
|
|
block = Block()
|
|
|
|
block.setType(type)
|
|
|
|
block.setContent(content)
|
|
|
|
block.setParent((blocks[-1] if len(blocks) != 0 else None))
|
|
|
|
hash = block.save(sign = sign)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
2018-06-04 16:29:04 +00:00
|
|
|
# remember the hash in cache
|
|
|
|
blocks.append(hash)
|
2018-06-05 02:26:04 +00:00
|
|
|
|
|
|
|
# return different things depending on verbosity
|
|
|
|
if verbose:
|
|
|
|
return (blocks[-1], blocks)
|
|
|
|
return blocks[-1]
|
|
|
|
|
2018-05-15 06:43:29 +00:00
|
|
|
def exists(hash):
|
2018-05-16 02:12:23 +00:00
|
|
|
'''
|
|
|
|
Checks if a block is saved to file or not
|
|
|
|
|
|
|
|
Inputs:
|
|
|
|
- hash (str/Block):
|
|
|
|
- if (Block): check if this block is saved to file
|
|
|
|
- if (str): check if a block by this hash is in file
|
|
|
|
|
|
|
|
Outputs:
|
|
|
|
- (bool): whether or not the block file exists
|
|
|
|
'''
|
|
|
|
|
2018-06-14 04:17:58 +00:00
|
|
|
# no input data? scrap it.
|
2018-05-16 01:47:58 +00:00
|
|
|
if hash is None:
|
|
|
|
return False
|
2018-06-14 04:17:58 +00:00
|
|
|
|
|
|
|
if type(hash) == Block:
|
2018-05-16 01:47:58 +00:00
|
|
|
blockfile = hash.getBlockFile()
|
|
|
|
else:
|
|
|
|
blockfile = 'data/blocks/%s.dat' % hash
|
|
|
|
|
|
|
|
return os.path.exists(blockfile) and os.path.isfile(blockfile)
|
2018-06-14 04:17:58 +00:00
|
|
|
|
|
|
|
def getCache(hash = None):
|
|
|
|
# give a list of the hashes of the cached blocks
|
|
|
|
if hash is None:
|
|
|
|
return list(Block.blockCache.keys())
|
|
|
|
|
|
|
|
# if they inputted self or a Block, convert to hash
|
|
|
|
if type(hash) == Block:
|
|
|
|
hash = hash.getHash()
|
|
|
|
|
|
|
|
# just to make sure someone didn't put in a bool or something lol
|
|
|
|
hash = str(hash)
|
|
|
|
|
|
|
|
# if it exists, return its content
|
|
|
|
if hash in Block.getCache():
|
|
|
|
return Block.blockCache[hash]
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
def cache(block, override = False):
|
|
|
|
# why even bother if they're giving bad data?
|
|
|
|
if not type(block) == Block:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# only cache if written to file
|
|
|
|
if block.getHash() is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# if it's already cached, what are we here for?
|
|
|
|
if block.getHash() in Block.getCache() and not override:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# dump old cached blocks if the size exeeds the maximum
|
|
|
|
if sys.getsizeof(Block.blockCacheOrder) >= config.get('allocations.blockCacheTotal', 50000000): # 50MB default cache size
|
|
|
|
del Block.blockCache[blockCacheOrder.pop(0)]
|
|
|
|
|
|
|
|
# cache block content
|
|
|
|
Block.blockCache[block.getHash()] = block.getRaw()
|
|
|
|
Block.blockCacheOrder.append(block.getHash())
|
|
|
|
|
|
|
|
return True
|