Onionr/onionr/onionrstorage/__init__.py

91 lines
3.3 KiB
Python
Raw Normal View History

2018-12-31 04:49:27 +00:00
'''
2019-06-16 06:06:32 +00:00
Onionr - Private P2P Communication
2018-12-31 04:49:27 +00:00
This file handles block storage, providing an abstraction for storing blocks between file system and database
'''
'''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
2019-07-19 19:49:56 +00:00
import sys, sqlite3, os
from onionrutils import bytesconverter, stringvalidators
2019-07-19 19:49:56 +00:00
from coredb import dbfiles
import filepaths, onionrcrypto, dbcreator, onionrexceptions
from onionrcrypto import hashers
2019-07-24 17:22:19 +00:00
from . import setdata
2019-01-05 06:15:31 +00:00
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
2019-07-24 17:22:19 +00:00
set_data = setdata.set_data
2019-07-19 19:49:56 +00:00
def _dbInsert(blockHash, data):
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
2019-01-05 06:15:31 +00:00
c = conn.cursor()
data = (blockHash, data)
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
conn.commit()
conn.close()
2019-07-19 19:49:56 +00:00
def _dbFetch(blockHash):
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
2019-01-05 06:15:31 +00:00
c = conn.cursor()
for i in c.execute('SELECT data from blockData where hash = ?', (blockHash,)):
return i[0]
conn.commit()
conn.close()
return None
2019-07-19 19:49:56 +00:00
def deleteBlock(blockHash):
2019-02-12 05:30:56 +00:00
# You should call core.removeBlock if you automatically want to remove storage byte count
2019-07-19 19:49:56 +00:00
if os.path.exists('%s/%s.dat' % (filepaths.block_data_location, blockHash)):
os.remove('%s/%s.dat' % (filepaths.block_data_location, blockHash))
2019-02-12 05:30:56 +00:00
return True
2019-07-19 19:49:56 +00:00
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
2019-02-12 05:30:56 +00:00
c = conn.cursor()
data = (blockHash,)
c.execute('DELETE FROM blockData where hash = ?', data)
conn.commit()
conn.close()
return True
2019-07-19 19:49:56 +00:00
def store(data, blockHash=''):
if not stringvalidators.validate_hash(blockHash): raise ValueError
2019-07-19 19:49:56 +00:00
ourHash = hashers.sha3_hash(data)
2019-01-05 22:16:36 +00:00
if blockHash != '':
if not ourHash == blockHash: raise ValueError('Hash specified does not meet internal hash check')
2019-01-05 22:16:36 +00:00
else:
blockHash = ourHash
2018-12-31 04:49:27 +00:00
2019-01-05 06:15:31 +00:00
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
2019-07-19 19:49:56 +00:00
_dbInsert(blockHash, data)
2019-01-05 06:15:31 +00:00
else:
2019-07-19 19:49:56 +00:00
with open('%s/%s.dat' % (filepaths.block_data_location, blockHash), 'wb') as blockFile:
2019-01-05 06:15:31 +00:00
blockFile.write(data)
2019-07-19 19:49:56 +00:00
def getData(bHash):
if not stringvalidators.validate_hash(bHash): raise ValueError
2019-01-05 06:15:31 +00:00
bHash = bytesconverter.bytes_to_str(bHash)
2019-01-05 06:15:31 +00:00
# First check DB for data entry by hash
# if no entry, check disk
# If no entry in either, raise an exception
retData = None
2019-07-19 19:49:56 +00:00
fileLocation = '%s/%s.dat' % (filepaths.block_data_location, bHash)
2019-01-05 06:15:31 +00:00
if os.path.exists(fileLocation):
2019-01-05 22:16:36 +00:00
with open(fileLocation, 'rb') as block:
2019-01-05 06:15:31 +00:00
retData = block.read()
else:
2019-07-20 15:52:03 +00:00
retData = _dbFetch(bHash)
if retData is None:
raise onionrexceptions.NoDataAvailable("Block data for %s is not available" % [bHash])
2019-01-05 22:16:36 +00:00
return retData