Onionr/onionr/onionrstorage.py

90 lines
3.0 KiB
Python
Raw Normal View History

2018-12-31 04:49:27 +00:00
'''
Onionr - P2P Anonymous Storage Network
This file handles block storage, providing an abstraction for storing blocks between file system and database
'''
'''
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
2019-01-05 22:16:36 +00:00
import core, sys, sqlite3, os, dbcreator
2019-01-05 06:15:31 +00:00
DB_ENTRY_SIZE_LIMIT = 10000 # Will be a config option
class BlockCache:
def __init__(self):
self.blocks = {}
def cleanCache(self):
while sys.getsizeof(self.blocks) > 100000000:
self.blocks.pop(list(self.blocks.keys())[0])
2019-01-05 22:16:36 +00:00
def dbCreate(coreInst):
try:
dbcreator.DBCreator(coreInst).createBlockDataDB()
except FileExistsError:
pass
2019-01-05 06:15:31 +00:00
def _dbInsert(coreInst, blockHash, data):
2019-01-05 22:16:36 +00:00
assert isinstance(coreInst, core.Core)
dbCreate(coreInst)
2019-01-05 06:15:31 +00:00
conn = sqlite3.connect(coreInst.blockDataDB, timeout=10)
c = conn.cursor()
data = (blockHash, data)
c.execute('INSERT INTO blockData (hash, data) VALUES(?, ?);', data)
conn.commit()
conn.close()
def _dbFetch(coreInst, blockHash):
2019-01-05 22:16:36 +00:00
assert isinstance(coreInst, core.Core)
dbCreate(coreInst)
2019-01-05 06:15:31 +00:00
conn = sqlite3.connect(coreInst.blockDataDB, timeout=10)
c = conn.cursor()
for i in c.execute('SELECT data from blockData where hash = ?', (blockHash,)):
return i[0]
conn.commit()
conn.close()
return None
2019-01-05 22:16:36 +00:00
def store(coreInst, data, blockHash=''):
2019-01-05 06:15:31 +00:00
assert isinstance(coreInst, core.Core)
2019-01-05 22:16:36 +00:00
assert coreInst._utils.validateHash(blockHash)
ourHash = coreInst._crypto.sha3Hash(data)
if blockHash != '':
assert ourHash == blockHash
else:
blockHash = ourHash
2018-12-31 04:49:27 +00:00
2019-01-05 06:15:31 +00:00
if DB_ENTRY_SIZE_LIMIT >= sys.getsizeof(data):
_dbInsert(coreInst, blockHash, data)
else:
2019-01-05 22:16:36 +00:00
with open('%s/%s.dat' % (coreInst.blockDataLocation, blockHash), 'wb') as blockFile:
2019-01-05 06:15:31 +00:00
blockFile.write(data)
coreInst.blockCache.cleanCache()
2019-01-05 06:15:31 +00:00
def getData(coreInst, bHash):
assert isinstance(coreInst, core.Core)
2019-01-05 22:16:36 +00:00
assert coreInst._utils.validateHash(bHash)
2019-01-05 06:15:31 +00:00
bHash = coreInst._utils.bytesToStr(bHash)
2019-01-05 06:15:31 +00:00
# First check DB for data entry by hash
# if no entry, check disk
# If no entry in either, raise an exception
retData = None
2019-01-05 06:15:31 +00:00
fileLocation = '%s/%s.dat' % (coreInst.blockDataLocation, bHash)
if os.path.exists(fileLocation):
2019-01-05 22:16:36 +00:00
with open(fileLocation, 'rb') as block:
2019-01-05 06:15:31 +00:00
retData = block.read()
else:
retData = _dbFetch(coreInst, bHash)
2019-01-05 22:16:36 +00:00
return retData