2020-03-26 08:57:54 +00:00
|
|
|
"""Onionr - Private P2P Communication.
|
2018-12-31 04:49:27 +00:00
|
|
|
|
2020-03-26 08:57:54 +00:00
|
|
|
Handle block storage, providing an abstraction for
|
|
|
|
storing blocks between file system and database
|
|
|
|
"""
|
2019-11-20 10:52:50 +00:00
|
|
|
import sys
|
|
|
|
import sqlite3
|
|
|
|
import os
|
|
|
|
from onionrutils import bytesconverter
|
|
|
|
from onionrutils import stringvalidators
|
|
|
|
from coredb import dbfiles
|
2020-08-17 00:52:50 +00:00
|
|
|
from filepaths import block_data_location
|
2019-11-20 10:52:50 +00:00
|
|
|
import onionrexceptions
|
|
|
|
from onionrcrypto import hashers
|
|
|
|
from . import setdata
|
2020-09-28 08:18:30 +00:00
|
|
|
from etc.onionrvalues import DATABASE_LOCK_TIMEOUT, BLOCK_EXPORT_FILE_EXT
|
2020-03-26 08:57:54 +00:00
|
|
|
"""
|
2018-12-31 04:49:27 +00:00
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
2020-03-26 08:57:54 +00:00
|
|
|
"""
|
2019-11-20 10:52:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-07-24 17:22:19 +00:00
|
|
|
set_data = setdata.set_data
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
def _dbInsert(block_hash, data):
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db,
|
|
|
|
timeout=DATABASE_LOCK_TIMEOUT)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
2020-08-17 00:52:50 +00:00
|
|
|
data = (block_hash, data)
|
2019-01-05 06:15:31 +00:00
|
|
|
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
def _dbFetch(block_hash):
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db,
|
|
|
|
timeout=DATABASE_LOCK_TIMEOUT)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
2020-08-17 00:52:50 +00:00
|
|
|
for i in c.execute(
|
|
|
|
'SELECT data from blockData where hash = ?', (block_hash,)):
|
2019-01-05 06:15:31 +00:00
|
|
|
return i[0]
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return None
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
def deleteBlock(block_hash):
|
|
|
|
# Call removeblock.remove_block to automatically want to remove storage byte count
|
2020-09-28 08:18:30 +00:00
|
|
|
if os.path.exists(f'{block_data_location}/{block_hash}{BLOCK_EXPORT_FILE_EXT}'):
|
|
|
|
os.remove(f'{block_data_location}/{block_hash}{BLOCK_EXPORT_FILE_EXT}')
|
2019-02-12 05:30:56 +00:00
|
|
|
return True
|
2020-08-17 00:52:50 +00:00
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db,
|
|
|
|
timeout=DATABASE_LOCK_TIMEOUT)
|
2019-02-12 05:30:56 +00:00
|
|
|
c = conn.cursor()
|
2020-08-17 00:52:50 +00:00
|
|
|
data = (block_hash,)
|
2019-02-12 05:30:56 +00:00
|
|
|
c.execute('DELETE FROM blockData where hash = ?', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return True
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
def store(data, block_hash=''):
|
|
|
|
if not stringvalidators.validate_hash(block_hash):
|
|
|
|
raise ValueError
|
2019-07-19 19:49:56 +00:00
|
|
|
ourHash = hashers.sha3_hash(data)
|
2020-08-17 00:52:50 +00:00
|
|
|
if block_hash != '':
|
|
|
|
if not ourHash == block_hash:
|
2019-11-20 10:52:50 +00:00
|
|
|
raise ValueError('Hash specified does not meet internal hash check')
|
2019-01-05 22:16:36 +00:00
|
|
|
else:
|
2020-08-17 00:52:50 +00:00
|
|
|
block_hash = ourHash
|
2020-02-08 09:07:07 +00:00
|
|
|
|
2019-01-05 06:15:31 +00:00
|
|
|
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
|
2020-08-17 00:52:50 +00:00
|
|
|
_dbInsert(block_hash, data)
|
2019-01-05 06:15:31 +00:00
|
|
|
else:
|
2020-08-17 00:52:50 +00:00
|
|
|
with open(
|
2020-09-28 08:18:30 +00:00
|
|
|
f'{block_data_location}/{block_hash}{BLOCK_EXPORT_FILE_EXT}', 'wb') as blck_file:
|
2020-08-17 00:52:50 +00:00
|
|
|
blck_file.write(data)
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def getData(bHash):
|
2020-02-08 09:07:07 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
if not stringvalidators.validate_hash(bHash):
|
|
|
|
raise ValueError
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-06-25 08:21:36 +00:00
|
|
|
bHash = bytesconverter.bytes_to_str(bHash)
|
2020-02-08 09:07:07 +00:00
|
|
|
bHash = bHash.strip()
|
2019-01-05 06:15:31 +00:00
|
|
|
# First check DB for data entry by hash
|
|
|
|
# if no entry, check disk
|
|
|
|
# If no entry in either, raise an exception
|
2020-08-17 00:52:50 +00:00
|
|
|
ret_data = None
|
2020-09-28 08:18:30 +00:00
|
|
|
fileLocation = '%s/%s%s' % (
|
|
|
|
block_data_location,
|
|
|
|
bHash, BLOCK_EXPORT_FILE_EXT)
|
2020-10-10 08:41:55 +00:00
|
|
|
not_found_msg = "Block data not found for: " + str(bHash)
|
2019-01-05 06:15:31 +00:00
|
|
|
if os.path.exists(fileLocation):
|
2019-01-05 22:16:36 +00:00
|
|
|
with open(fileLocation, 'rb') as block:
|
2020-08-17 00:52:50 +00:00
|
|
|
ret_data = block.read()
|
2019-01-05 06:15:31 +00:00
|
|
|
else:
|
2020-08-17 00:52:50 +00:00
|
|
|
ret_data = _dbFetch(bHash)
|
2020-02-08 09:07:07 +00:00
|
|
|
|
2020-08-17 00:52:50 +00:00
|
|
|
if ret_data is None:
|
2020-10-10 08:41:55 +00:00
|
|
|
raise onionrexceptions.NoDataAvailable(not_found_msg)
|
2020-08-17 00:52:50 +00:00
|
|
|
return ret_data
|