work on using vdf instead
This commit is contained in:
parent
8d372cccfb
commit
599f222d43
30
graphing.py
Normal file
30
graphing.py
Normal file
@ -0,0 +1,30 @@
|
||||
import networkx as nx
|
||||
import matplotlib.pyplot as plt
|
||||
import networkx.drawing
|
||||
|
||||
import json
|
||||
from hashlib import sha3_256
|
||||
|
||||
def do_hash(data):
|
||||
h = sha3_256()
|
||||
h.update(data.encode())
|
||||
return h.digest()
|
||||
|
||||
|
||||
with open('stats.json', 'r') as raw_data:
|
||||
raw_data = raw_data.read()
|
||||
|
||||
G = nx.MultiGraph()
|
||||
js = json.loads(raw_data)
|
||||
|
||||
for node in js:
|
||||
G.add_node(node[:5] + '.onion')
|
||||
|
||||
for node in js:
|
||||
data = json.loads(js[node])
|
||||
for conn_node in data['peers']:
|
||||
G.add_edge(node[:5] + '.onion', conn_node[:5] + '.onion')
|
||||
|
||||
|
||||
nx.draw_spring(G, font_weight='bold', with_labels=True)
|
||||
plt.show()
|
@ -20,10 +20,12 @@ from onionrutils import validatemetadata
|
||||
from coredb import blockmetadb
|
||||
from onionrutils.localcommand import local_command
|
||||
import onionrcrypto
|
||||
from onionrcrypto.hashers import sha3_hash
|
||||
import onionrstorage
|
||||
from onionrblocks import onionrblacklist
|
||||
from onionrblocks import storagecounter
|
||||
from . import shoulddownload
|
||||
from onionrproofs.vdf import verify
|
||||
"""
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@ -96,6 +98,7 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
|
||||
realHash = realHash.decode() # bytes on some versions for some reason
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if realHash == blockHash:
|
||||
#content = content.decode() # decode here because sha3Hash needs bytes above
|
||||
metas = blockmetadata.get_block_metadata_from_data(content) # returns tuple(metadata, meta), meta is also in metadata
|
||||
@ -106,7 +109,7 @@ def download_blocks_from_communicator(comm_inst: "OnionrCommunicatorDaemon"):
|
||||
except onionrexceptions.DataExists:
|
||||
metadata_validation_result = False
|
||||
if metadata_validation_result: # check if metadata is valid, and verify nonce
|
||||
if onionrcrypto.cryptoutils.verify_POW(content): # check if POW is enough/correct
|
||||
if verify(content, blockHash): # check if POW is enough/correct
|
||||
logger.info('Attempting to save block %s...' % blockHash[:12])
|
||||
try:
|
||||
onionrstorage.set_data(content)
|
||||
|
@ -23,11 +23,16 @@ from onionrutils.basicrequests import do_post_request, do_get_request
|
||||
"""
|
||||
|
||||
|
||||
class ConnectedError(Exception): pass
|
||||
|
||||
|
||||
|
||||
class Client:
|
||||
def __init__(self):
|
||||
self.peers = []
|
||||
self.lookup_time = {}
|
||||
self.poll_delay = 10
|
||||
self.active_threads: set = set([])
|
||||
|
||||
def get_lookup_time(self, peer):
|
||||
try:
|
||||
@ -35,9 +40,21 @@ class Client:
|
||||
except KeyError:
|
||||
return 0
|
||||
|
||||
def peer_thread(self, peer):
|
||||
def do_peer_sync(): return
|
||||
if peer in self.active_threads:
|
||||
raise ConnectedError
|
||||
self.active_threads.add(peer)
|
||||
do_peer_sync()
|
||||
self.active_threads.remove(peer)
|
||||
|
||||
def start(self):
|
||||
while True:
|
||||
self.peers = random_shuffle(self.peers)
|
||||
peers = random_shuffle(list(set(self.peers) ^ self.active_threads))
|
||||
try:
|
||||
self.peer_thread(peers[0])
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
@ -13,6 +13,8 @@ from onionrutils import blockmetadata
|
||||
from coredb import blockmetadb
|
||||
import onionrstorage
|
||||
import onionrcrypto as crypto
|
||||
from onionrcrypto.hashers import sha3_hash
|
||||
from onionrproofs.vdf import verify
|
||||
from . import onionrblacklist
|
||||
|
||||
'''
|
||||
@ -52,7 +54,7 @@ def import_block_from_data(content):
|
||||
# check if metadata is valid
|
||||
if validatemetadata.validate_metadata(metadata, metas[2]):
|
||||
# check if POW is enough/correct
|
||||
if crypto.cryptoutils.verify_POW(content):
|
||||
if verify(content, sha3_hash(content)):
|
||||
logger.info(f'Imported block passed proof, saving: {data_hash}.',
|
||||
terminal=True)
|
||||
try:
|
||||
|
@ -31,8 +31,8 @@ import config, onionrcrypto as crypto, onionrexceptions
|
||||
from onionrusers import onionrusers
|
||||
from onionrutils import localcommand, blockmetadata, stringvalidators
|
||||
import coredb
|
||||
import onionrproofs
|
||||
from onionrproofs import subprocesspow
|
||||
from onionrproofs.vdf import multiprocess_create
|
||||
|
||||
import logger
|
||||
from onionrtypes import UserIDSecretKey
|
||||
|
||||
@ -187,14 +187,11 @@ def insert_block(data: Union[str, bytes], header: str = 'txt',
|
||||
expire = epoch.get_epoch() + expire
|
||||
metadata['expire'] = expire
|
||||
|
||||
# send block data (and metadata) to POW module to get tokenized block data
|
||||
if use_subprocess:
|
||||
payload = subprocesspow.SubprocessPOW(data, metadata).start()
|
||||
else:
|
||||
payload = onionrproofs.POW(metadata, data).waitForResult()
|
||||
if payload != False:
|
||||
data = json.dumps(metadata).encode() + b'\n' + data
|
||||
mimc_hash = multiprocess_create(data)
|
||||
if mimc_hash != False:
|
||||
try:
|
||||
retData = onionrstorage.set_data(payload)
|
||||
retData = onionrstorage.set_data(data, mimc_hash)
|
||||
except onionrexceptions.DiskAllocationReached:
|
||||
logger.error(allocationReachedMessage)
|
||||
retData = False
|
||||
|
@ -29,7 +29,7 @@ def create(data: bytes) -> str:
|
||||
return vdf_create(data, rounds)
|
||||
|
||||
|
||||
def multiproces_create(data: bytes) -> str:
|
||||
def multiprocess_create(data: bytes) -> str:
|
||||
parent_conn, child_conn = Pipe()
|
||||
def __do_create(conn, data):
|
||||
conn.send(create(data))
|
||||
|
@ -28,7 +28,7 @@ from onionrutils import blockmetadata, bytesconverter
|
||||
"""
|
||||
|
||||
|
||||
def set_data(data) -> str:
|
||||
def set_data(data, mimc_hash) -> str:
|
||||
"""Set the data assciated with a hash."""
|
||||
storage_counter = storagecounter.StorageCounter()
|
||||
data = data
|
||||
@ -41,20 +41,20 @@ def set_data(data) -> str:
|
||||
if not type(data) is bytes:
|
||||
data = data.encode()
|
||||
|
||||
dataHash = crypto.hashers.sha3_hash(data)
|
||||
mimc_hash = crypto.hashers.sha3_hash(data)
|
||||
|
||||
if type(dataHash) is bytes:
|
||||
dataHash = dataHash.decode()
|
||||
if type(mimc_hash) is bytes:
|
||||
mimc_hash = mimc_hash.decode()
|
||||
try:
|
||||
onionrstorage.getData(dataHash)
|
||||
onionrstorage.getData(mimc_hash)
|
||||
except onionrexceptions.NoDataAvailable:
|
||||
if storage_counter.add_bytes(dataSize) is not False:
|
||||
onionrstorage.store(data, blockHash=dataHash)
|
||||
onionrstorage.store(data, blockHash=mimc_hash)
|
||||
conn = sqlite3.connect(dbfiles.block_meta_db, timeout=30)
|
||||
c = conn.cursor()
|
||||
c.execute(
|
||||
"UPDATE hashes SET dataSaved=1 WHERE hash = ?;",
|
||||
(dataHash,))
|
||||
(mimc_hash,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
with open(filepaths.data_nonce_file, 'a') as nonceFile:
|
||||
@ -63,6 +63,6 @@ def set_data(data) -> str:
|
||||
raise onionrexceptions.DiskAllocationReached
|
||||
else:
|
||||
raise onionrexceptions.DataExists(
|
||||
"Data is already set for " + dataHash)
|
||||
"Data is already set for " + mimc_hash)
|
||||
|
||||
return dataHash
|
||||
return mimc_hash
|
||||
|
@ -1 +1 @@
|
||||
3msj7fgyxgpfsjvvtcji7a4tkjbna6jmpealv6mun7435jjyptctfxyd.onion,chz7aarrmhxnefa6jx7ai3h3f5oy4sz5x4o5bbhfcq4xr3zbvsynaoad.onion,llqcrrf5cdk7p277eynepnvoo4ggrnybmp2daqtsr2hshitlmvbipdqd.onion,lqyhqt5mtsvu5bdatn4ntaplsfsrfxzp3j6gze77g4nptpxe36q7poad.onion
|
||||
3msj7fgyxgpfsjvvtcji7a4tkjbna6jmpealv6mun7435jjyptctfxyd.onion,chz7aarrmhxnefa6jx7ai3h3f5oy4sz5x4o5bbhfcq4xr3zbvsynaoad.onion,llqcrrf5cdk7p277eynepnvoo4ggrnybmp2daqtsr2hshitlmvbipdqd.onion,lqyhqt5mtsvu5bdatn4ntaplsfsrfxzp3j6gze77g4nptpxe36q7poad.onion
|
@ -1 +0,0 @@
|
||||
1585619396
|
@ -21,14 +21,14 @@ class TestVdf(unittest.TestCase):
|
||||
res = vdf.create(b'test')
|
||||
int(res, 16)
|
||||
if len(res) == 0: raise ValueError
|
||||
self.assertEqual(vdf.multiproces_create(b'test'), res)
|
||||
self.assertEqual(vdf.multiprocess_create(b'test'), res)
|
||||
def test_speed(self):
|
||||
t = time()
|
||||
vdf.create(b'test')
|
||||
self.assertTrue(time() - t <= 10)
|
||||
# test 2 kb
|
||||
t = time()
|
||||
vdf.create(b't'*10000)
|
||||
vdf.create(b't'*2000)
|
||||
self.assertTrue(time() - t >= 10)
|
||||
#timeit(lambda: vdf.create(b'test'))
|
||||
|
||||
|
@ -3,6 +3,7 @@ import sys, os
|
||||
sys.path.append(".")
|
||||
sys.path.append("src/")
|
||||
import unittest, uuid, hashlib
|
||||
import secrets
|
||||
|
||||
TEST_DIR = 'testdata/%s-%s' % (uuid.uuid4(), os.path.basename(__file__)) + '/'
|
||||
print("Test directory:", TEST_DIR)
|
||||
@ -14,6 +15,8 @@ from utils import createdirs
|
||||
from onionrutils import bytesconverter
|
||||
import onionrcrypto
|
||||
from onionrblocks import onionrblockapi
|
||||
from onionrproofs.vdf import verify
|
||||
from onionrcrypto.hashers import sha3_hash
|
||||
|
||||
def setup_test():
|
||||
TEST_DIR = 'testdata/%s-%s' % (uuid.uuid4(), os.path.basename(__file__)) + '/'
|
||||
@ -24,9 +27,8 @@ def setup_test():
|
||||
class OnionrBlockTests(unittest.TestCase):
|
||||
def test_plaintext_insert(self):
|
||||
setup_test()
|
||||
message = 'hello world'
|
||||
message = 'hello world' + secrets.token_hex(5)
|
||||
bl = onionrblocks.insert(message)
|
||||
self.assertTrue(bl.startswith('0'))
|
||||
self.assertIn(bytesconverter.str_to_bytes(message), onionrstorage.getData(bl))
|
||||
|
||||
def test_encrypted_insert(self):
|
||||
|
@ -8,6 +8,12 @@ TEST_DIR = 'testdata/%s-%s' % (uuid.uuid4(), os.path.basename(__file__)) + '/'
|
||||
print("Test directory:", TEST_DIR)
|
||||
os.environ["ONIONR_HOME"] = TEST_DIR
|
||||
import unittest
|
||||
from utils import identifyhome, createdirs
|
||||
from onionrsetup import setup_config
|
||||
|
||||
createdirs.create_dirs()
|
||||
setup_config()
|
||||
|
||||
|
||||
from lan.getip import lan_ips, best_ip
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user