2018-12-31 04:49:27 +00:00
|
|
|
'''
|
2019-06-16 06:06:32 +00:00
|
|
|
Onionr - Private P2P Communication
|
2018-12-31 04:49:27 +00:00
|
|
|
|
|
|
|
This file handles block storage, providing an abstraction for storing blocks between file system and database
|
|
|
|
'''
|
2019-11-20 10:52:50 +00:00
|
|
|
import sys
|
|
|
|
import sqlite3
|
|
|
|
import os
|
|
|
|
from onionrutils import bytesconverter
|
|
|
|
from onionrutils import stringvalidators
|
|
|
|
from coredb import dbfiles
|
|
|
|
import filepaths
|
|
|
|
import onionrcrypto
|
|
|
|
import onionrexceptions
|
|
|
|
from onionrsetup import dbcreator
|
|
|
|
from onionrcrypto import hashers
|
|
|
|
from . import setdata
|
2018-12-31 04:49:27 +00:00
|
|
|
'''
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
2019-11-20 10:52:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-07-24 17:22:19 +00:00
|
|
|
set_data = setdata.set_data
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def _dbInsert(blockHash, data):
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
data = (blockHash, data)
|
|
|
|
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def _dbFetch(blockHash):
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
for i in c.execute('SELECT data from blockData where hash = ?', (blockHash,)):
|
|
|
|
return i[0]
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return None
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def deleteBlock(blockHash):
|
2019-10-14 00:56:14 +00:00
|
|
|
# You should call removeblock.remove_block if you automatically want to remove storage byte count
|
2019-07-19 19:49:56 +00:00
|
|
|
if os.path.exists('%s/%s.dat' % (filepaths.block_data_location, blockHash)):
|
|
|
|
os.remove('%s/%s.dat' % (filepaths.block_data_location, blockHash))
|
2019-02-12 05:30:56 +00:00
|
|
|
return True
|
2019-07-19 19:49:56 +00:00
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-02-12 05:30:56 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
data = (blockHash,)
|
|
|
|
c.execute('DELETE FROM blockData where hash = ?', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return True
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def store(data, blockHash=''):
|
2019-09-09 00:21:36 +00:00
|
|
|
if not stringvalidators.validate_hash(blockHash): raise ValueError
|
2019-07-19 19:49:56 +00:00
|
|
|
ourHash = hashers.sha3_hash(data)
|
2019-01-05 22:16:36 +00:00
|
|
|
if blockHash != '':
|
2019-11-20 10:52:50 +00:00
|
|
|
if not ourHash == blockHash:
|
|
|
|
raise ValueError('Hash specified does not meet internal hash check')
|
2019-01-05 22:16:36 +00:00
|
|
|
else:
|
|
|
|
blockHash = ourHash
|
2018-12-31 04:49:27 +00:00
|
|
|
|
2019-01-05 06:15:31 +00:00
|
|
|
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
|
2019-07-19 19:49:56 +00:00
|
|
|
_dbInsert(blockHash, data)
|
2019-01-05 06:15:31 +00:00
|
|
|
else:
|
2019-07-19 19:49:56 +00:00
|
|
|
with open('%s/%s.dat' % (filepaths.block_data_location, blockHash), 'wb') as blockFile:
|
2019-01-05 06:15:31 +00:00
|
|
|
blockFile.write(data)
|
|
|
|
|
2019-11-20 10:52:50 +00:00
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def getData(bHash):
|
2019-09-09 00:21:36 +00:00
|
|
|
if not stringvalidators.validate_hash(bHash): raise ValueError
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-06-25 08:21:36 +00:00
|
|
|
bHash = bytesconverter.bytes_to_str(bHash)
|
2019-01-07 05:50:20 +00:00
|
|
|
|
2019-01-05 06:15:31 +00:00
|
|
|
# First check DB for data entry by hash
|
|
|
|
# if no entry, check disk
|
|
|
|
# If no entry in either, raise an exception
|
2019-01-07 05:50:20 +00:00
|
|
|
retData = None
|
2019-07-19 19:49:56 +00:00
|
|
|
fileLocation = '%s/%s.dat' % (filepaths.block_data_location, bHash)
|
2019-11-20 10:52:50 +00:00
|
|
|
not_found_msg = "Flock data not found for: "
|
2019-01-05 06:15:31 +00:00
|
|
|
if os.path.exists(fileLocation):
|
2019-01-05 22:16:36 +00:00
|
|
|
with open(fileLocation, 'rb') as block:
|
2019-01-05 06:15:31 +00:00
|
|
|
retData = block.read()
|
|
|
|
else:
|
2019-07-20 15:52:03 +00:00
|
|
|
retData = _dbFetch(bHash)
|
2019-06-22 21:16:12 +00:00
|
|
|
if retData is None:
|
2019-11-20 10:52:50 +00:00
|
|
|
raise onionrexceptions.NoDataAvailable(not_found_msg + str(bHash))
|
|
|
|
return retData
|