Added create block RPC wrapper

This commit is contained in:
Kevin F 2023-01-16 23:29:50 -06:00
parent 8712a1c401
commit ce3a548c70
3 changed files with 58 additions and 24 deletions

View File

@ -2,20 +2,16 @@
"""
import threading
import os
import time
import traceback
import collections
from typing import Union
import ujson
import jsonrpc
from logger import log as logging
rpc_results = collections.deque(maxlen=10000)
def get_results(id) -> Union[str, None]:
final = None
for result in rpc_results:
@ -29,13 +25,15 @@ def get_results(id) -> Union[str, None]:
def _exec_rpc(rpc_json_str):
json_resp = jsonrpc.JSONRPCResponseManager.handle(rpc_json_str, jsonrpc.dispatcher)
json_resp = jsonrpc.JSONRPCResponseManager.handle(
rpc_json_str, jsonrpc.dispatcher)
data = json_resp.data
rpc_results.append(data)
def threaded_rpc(rpc_json_str):
threading.Thread(
target=_exec_rpc,
args=(rpc_json_str,),
target=_exec_rpc,
args=(rpc_json_str,),
daemon=True,
name="JSON RPC").start()

View File

@ -57,6 +57,7 @@ import longrpc
plugin_apis['rpc.add_module_to_api'] = add_module_to_api
def _detect_cors_and_add_headers():
cherrypy.response.headers['Access-Control-Allow-Headers'] = 'Content-Type'
cherrypy.response.headers['Access-Control-Allow-Origin'] = '*'
@ -65,12 +66,13 @@ def _detect_cors_and_add_headers():
return True
return False
class OnionrRPC(object):
@cherrypy.expose
def threaded_rpc(self):
if _detect_cors_and_add_headers():
return ''
rpc_request_json: str = cherrypy.request.body.read().decode('utf-8')
longrpc.threaded_rpc(rpc_request_json)
return 'ok'
@ -79,19 +81,19 @@ class OnionrRPC(object):
def get_rpc_result(self, id=0):
if _detect_cors_and_add_headers():
return ''
results = longrpc.get_results(id)
if not results:
return '"no result"'
return results
@cherrypy.expose
def rpc(self):
# Basic RPC, intended for small amounts of work
# Use /queue_rpc for large workloads like creating blocks
# and getting results with /get_rpc_result?id=<id>
# Dispatcher is dictionary {<method_name>: callable}
if _detect_cors_and_add_headers():
return ''
@ -146,7 +148,7 @@ def on_settcpsocket_cmd(api, data=None):
config.set('rpc.bind_port', port, savefile=True)
logging.info(
'Set RPC to use TCP socket http://' +
'Set RPC to use TCP socket http://' +
f'{config.get("rpc.bind_host")}:{config.get("rpc.bind_port")}')

View File

@ -1,26 +1,39 @@
from secrets import randbits
import base64
from base64 import b85decode
from typing import Union
from onionrblocks import Block
import onionrblocks
from jsonrpc import dispatcher
from gossip.blockqueues import gossip_block_queues
from blockdb import get_blocks_after_timestamp
import blockdb
from utils import multiproc
@dispatcher.add_method
def get_block(block_id: str) -> dict:
bl = blockdb.get_block(block_id)
@dispatcher.add_method
def get_blocks(timestamp):
return [block.raw for block in get_blocks_after_timestamp(timestamp)]
blocks = []
for block in blockdb.get_blocks_after_timestamp(timestamp):
blocks.append({
'id': block.id,
'raw': base64.b64encode(block.raw).decode('utf-8')
})
return blocks
@dispatcher.add_method
def create_block(
block_data: 'base64', block_type: str, ttl: int, metadata: dict):
# Wrapper for onionrblocks.create_block (take base64 to be compatible with RPC)
bl = multiproc.subprocess_compute(
# Wrapper for onionrblocks.create_block
# (take base64 to be compatible with RPC)
bl: Block = multiproc.subprocess_compute(
onionrblocks.create_anonvdf_block,
3600,
base64.b64decode(block_data),
@ -28,14 +41,35 @@ def create_block(
ttl,
**metadata
)
try:
block_id = bl.id.decode('utf-8')
except AttributeError:
block_id = bl.id
bl_json = {
'id': block_id,
'raw': base64.b64encode(bl.raw).decode('utf-8')
}
return bl_json
return base64.b85encode(bl.raw).decode('utf-8')
queue_to_use = randbits(1)
@dispatcher.add_method
def insert_block(block):
block = Block(
block['id'], b85decode(block['raw']), auto_verify=False)
def create_and_insert_block(
block_data: 'base64',
block_type: str, ttl: int, metadata: dict) -> str:
bl = create_block(block_data, block_type, ttl, metadata)['id']
insert_block(bl)
return bl['id']
# As per dandelion++ spec the edge should be the same.
# We keep it the same for each daemon life time.
queue_to_use = randbits(1)
@dispatcher.add_method
def insert_block(block: Union[dict, Block]):
if isinstance(block, dict):
block = Block(
block['id'], base64.b64decode(block['raw']), auto_verify=False)
gossip_block_queues[queue_to_use].put_nowait(block)
return "ok"