2018-12-31 04:49:27 +00:00
|
|
|
'''
|
2019-06-16 06:06:32 +00:00
|
|
|
Onionr - Private P2P Communication
|
2018-12-31 04:49:27 +00:00
|
|
|
|
|
|
|
This file handles block storage, providing an abstraction for storing blocks between file system and database
|
|
|
|
'''
|
|
|
|
'''
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
|
|
it under the terms of the GNU General Public License as published by
|
|
|
|
the Free Software Foundation, either version 3 of the License, or
|
|
|
|
(at your option) any later version.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
|
|
'''
|
2019-07-19 19:49:56 +00:00
|
|
|
import sys, sqlite3, os
|
2019-06-25 23:07:35 +00:00
|
|
|
from onionrutils import bytesconverter, stringvalidators
|
2019-07-19 19:49:56 +00:00
|
|
|
from coredb import dbfiles
|
|
|
|
import filepaths, onionrcrypto, dbcreator, onionrexceptions
|
|
|
|
from onionrcrypto import hashers
|
2019-01-05 06:15:31 +00:00
|
|
|
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def dbCreate():
|
2019-01-05 22:16:36 +00:00
|
|
|
try:
|
2019-07-19 19:49:56 +00:00
|
|
|
dbcreator.DBCreator().createBlockDataDB()
|
2019-01-05 22:16:36 +00:00
|
|
|
except FileExistsError:
|
|
|
|
pass
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def _dbInsert(blockHash, data):
|
|
|
|
dbCreate()
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
data = (blockHash, data)
|
|
|
|
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def _dbFetch(blockHash):
|
|
|
|
dbCreate()
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-01-05 06:15:31 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
for i in c.execute('SELECT data from blockData where hash = ?', (blockHash,)):
|
|
|
|
return i[0]
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return None
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def deleteBlock(blockHash):
|
2019-02-12 05:30:56 +00:00
|
|
|
# You should call core.removeBlock if you automatically want to remove storage byte count
|
2019-07-19 19:49:56 +00:00
|
|
|
if os.path.exists('%s/%s.dat' % (filepaths.block_data_location, blockHash)):
|
|
|
|
os.remove('%s/%s.dat' % (filepaths.block_data_location, blockHash))
|
2019-02-12 05:30:56 +00:00
|
|
|
return True
|
2019-07-19 19:49:56 +00:00
|
|
|
dbCreate()
|
|
|
|
conn = sqlite3.connect(dbfiles.block_data_db, timeout=10)
|
2019-02-12 05:30:56 +00:00
|
|
|
c = conn.cursor()
|
|
|
|
data = (blockHash,)
|
|
|
|
c.execute('DELETE FROM blockData where hash = ?', data)
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
return True
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def store(data, blockHash=''):
|
2019-06-25 23:07:35 +00:00
|
|
|
assert stringvalidators.validate_hash(blockHash)
|
2019-07-19 19:49:56 +00:00
|
|
|
ourHash = hashers.sha3_hash(data)
|
2019-01-05 22:16:36 +00:00
|
|
|
if blockHash != '':
|
|
|
|
assert ourHash == blockHash
|
|
|
|
else:
|
|
|
|
blockHash = ourHash
|
2018-12-31 04:49:27 +00:00
|
|
|
|
2019-01-05 06:15:31 +00:00
|
|
|
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
|
2019-07-19 19:49:56 +00:00
|
|
|
_dbInsert(blockHash, data)
|
2019-01-05 06:15:31 +00:00
|
|
|
else:
|
2019-07-19 19:49:56 +00:00
|
|
|
with open('%s/%s.dat' % (filepaths.block_data_location, blockHash), 'wb') as blockFile:
|
2019-01-05 06:15:31 +00:00
|
|
|
blockFile.write(data)
|
|
|
|
|
2019-07-19 19:49:56 +00:00
|
|
|
def getData(bHash):
|
2019-06-25 23:07:35 +00:00
|
|
|
assert stringvalidators.validate_hash(bHash)
|
2019-01-05 06:15:31 +00:00
|
|
|
|
2019-06-25 08:21:36 +00:00
|
|
|
bHash = bytesconverter.bytes_to_str(bHash)
|
2019-01-07 05:50:20 +00:00
|
|
|
|
2019-01-05 06:15:31 +00:00
|
|
|
# First check DB for data entry by hash
|
|
|
|
# if no entry, check disk
|
|
|
|
# If no entry in either, raise an exception
|
2019-01-07 05:50:20 +00:00
|
|
|
retData = None
|
2019-07-19 19:49:56 +00:00
|
|
|
fileLocation = '%s/%s.dat' % (filepaths.block_data_location, bHash)
|
2019-01-05 06:15:31 +00:00
|
|
|
if os.path.exists(fileLocation):
|
2019-01-05 22:16:36 +00:00
|
|
|
with open(fileLocation, 'rb') as block:
|
2019-01-05 06:15:31 +00:00
|
|
|
retData = block.read()
|
|
|
|
else:
|
|
|
|
retData = _dbFetch(coreInst, bHash)
|
2019-06-22 21:16:12 +00:00
|
|
|
if retData is None:
|
|
|
|
raise onionrexceptions.NoDataAvailable("Block data for %s is not available" % [bHash])
|
2019-01-05 22:16:36 +00:00
|
|
|
return retData
|