Skip to content

Commit

Permalink
Merge pull request #57 from xelis-project/dev
Browse files Browse the repository at this point in the history
v1.10.0 Mainnet
  • Loading branch information
Slixe committed May 5, 2024
2 parents 393fc02 + 45795c2 commit 3a41abf
Show file tree
Hide file tree
Showing 49 changed files with 2,287 additions and 969 deletions.
490 changes: 328 additions & 162 deletions API.md

Large diffs are not rendered by default.

535 changes: 467 additions & 68 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions xelis_common/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "xelis_common"
version = "1.9.5"
version = "1.10.0"
edition = "2021"
authors = ["Slixe <slixeprivate@gmail.com>"]
build = "build.rs"
Expand All @@ -27,7 +27,7 @@ anyhow = "1.0.81"
log = "0.4"
fern = { version = "0.6", features = ["colored", "date-based"] }
chrono = "0.4.35"
tokio = { version = "1.36", features = ["macros", "signal", "time", "sync", "tracing"], optional = true }
tokio = { version = "1.36", features = ["macros", "signal", "time", "sync"], optional = true }
reqwest = { version = "0.11.25", default-features = false, features = ["json"], optional = true }
clap = { version = "4.5.2", features = ["derive"], optional = true }
crossterm = "0.27.0"
Expand All @@ -45,6 +45,7 @@ serde_regex = "1.1.0"
tokio-tungstenite = { version = "0.21", features = ["rustls-tls-webpki-roots"] }
# Used for U256
primitive-types = { version = "0.12.2", features = ["serde"] }
console-subscriber = { version = "0.2.0", optional = true }

[dev-dependencies]
tokio = { version = "1.36", features = ["full"] }
Expand All @@ -55,3 +56,4 @@ json_rpc = ["dep:reqwest"]
prompt = ["dep:tokio"]
clap = ["dep:clap"]
rpc_server = ["dep:actix-rt", "dep:actix-web", "dep:actix-ws", "dep:futures-util", "dep:tokio", "dep:reqwest"]
tracing = ["dep:console-subscriber", "tokio/tracing"]
53 changes: 53 additions & 0 deletions xelis_common/src/api/daemon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,12 @@ pub struct RPCBlockResponse<'a> {
pub block_type: BlockType,
pub difficulty: Cow<'a, Difficulty>,
pub supply: Option<u64>,
// Reward can be split into two parts
pub reward: Option<u64>,
// Miner reward (the one that found the block)
pub miner_reward: Option<u64>,
// And Dev Fee reward if enabled
pub dev_reward: Option<u64>,
pub cumulative_difficulty: Cow<'a, CumulativeDifficulty>,
pub total_fees: Option<u64>,
pub total_size_in_bytes: usize,
Expand Down Expand Up @@ -98,6 +103,20 @@ pub struct GetBlockTemplateParams<'a> {
pub address: Cow<'a, Address>
}

#[derive(Serialize, Deserialize)]
pub struct CreateMinerWorkParams<'a> {
// Block Template in hexadecimal format
pub template: Cow<'a, String>,
// Address of the miner, if empty, it will use the address from template
pub address: Option<Cow<'a, Address>>,
}

#[derive(Serialize, Deserialize)]
pub struct CreateMinerWorkResult {
// MinerWork struct in hexadecimal format
pub miner_work: String
}

#[derive(Serialize, Deserialize)]
pub struct GetBlockTemplateResult {
// block_template is Block Header in hexadecimal format
Expand Down Expand Up @@ -452,6 +471,40 @@ pub struct GetMempoolCacheResult {
balances: HashMap<Hash, CiphertextCache>
}

#[derive(Serialize, Deserialize)]
pub struct GetDifficultyResult {
pub difficulty: Difficulty,
pub hashrate: Difficulty,
pub hashrate_formatted: String
}

#[derive(Serialize, Deserialize)]
pub struct ValidateAddressParams<'a> {
pub address: Cow<'a, Address>,
#[serde(default)]
pub allow_integrated: bool
}

#[derive(Serialize, Deserialize)]
pub struct ValidateAddressResult {
pub is_valid: bool,
pub is_integrated: bool
}

#[derive(Serialize, Deserialize)]
pub struct ExtractKeyFromAddressParams<'a> {
pub address: Cow<'a, Address>,
#[serde(default)]
pub as_hex: bool
}

#[derive(Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ExtractKeyFromAddressResult {
Bytes(Vec<u8>),
Hex(String)
}

#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum NotifyEvent {
Expand Down
29 changes: 29 additions & 0 deletions xelis_common/src/block/header.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,4 +298,33 @@ impl Display for BlockHeader {
}
write!(f, "BlockHeader[height: {}, tips: [{}], timestamp: {}, nonce: {}, extra_nonce: {}, txs: {}]", self.height, tips.join(", "), self.timestamp, self.nonce, hex::encode(self.extra_nonce), self.txs_hashes.len())
}
}

#[cfg(test)]
mod tests {
use indexmap::IndexSet;
use crate::{crypto::{Hash, Hashable, KeyPair}, serializer::Serializer};
use super::BlockHeader;

#[test]
fn test_block_template() {
let mut tips = IndexSet::new();
tips.insert(Hash::zero());

let miner = KeyPair::new().get_public_key().compress();
let header = BlockHeader::new(0, 0, 0, tips, [0u8; 32], miner, IndexSet::new());

let serialized = header.to_bytes();
assert!(serialized.len() == header.size());

let deserialized = BlockHeader::from_bytes(&serialized).unwrap();
assert!(header.hash() == deserialized.hash());
}

#[test]
fn test_block_template_from_hex() {
let serialized = "00000000000000002d0000018f1cbd697000000000000000000eded85557e887b45989a727b6786e1bd250de65042d9381822fa73d01d2c4ff01d3a0154853dbb01dc28c9102e9d94bea355b8ee0d82c3e078ac80841445e86520000d67ad13934337b85c34985491c437386c95de0d97017131088724cfbedebdc55".to_owned();
let header = BlockHeader::from_hex(serialized.clone()).unwrap();
assert!(header.to_hex() == serialized);
}
}
13 changes: 12 additions & 1 deletion xelis_common/src/block/miner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use crate::{
time::TimestampMillis,
};

use super::{EXTRA_NONCE_SIZE, BLOCK_WORK_SIZE};
use super::{BlockHeader, BLOCK_WORK_SIZE, EXTRA_NONCE_SIZE};

// This structure is used by xelis-miner which allow to compute a valid block POW hash
#[derive(Clone, Debug)]
Expand Down Expand Up @@ -45,6 +45,17 @@ impl<'a> MinerWork<'a> {
}
}

pub fn from_block(header: BlockHeader) -> Self {
Self {
header_work_hash: header.get_work_hash(),
timestamp: header.get_timestamp(),
nonce: 0,
miner: Some(Cow::Owned(header.miner)),
extra_nonce: header.extra_nonce,
cache: None
}
}

pub fn nonce(&self) -> u64 {
self.nonce
}
Expand Down
2 changes: 2 additions & 0 deletions xelis_common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ pub mod queue;
pub mod varuint;
pub mod time;

pub mod thread_pool;

#[cfg(feature = "json_rpc")]
pub mod json_rpc;

Expand Down
15 changes: 15 additions & 0 deletions xelis_common/src/prompt/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,12 @@ impl<T> From<PoisonError<T>> for PromptError {
}
}

impl From<PromptError> for CommandError {
fn from(err: PromptError) -> Self {
Self::Any(err.into())
}
}

// State used to be shared between stdin thread and Prompt instance
struct State {
prompt: Mutex<Option<String>>,
Expand Down Expand Up @@ -443,6 +449,12 @@ impl Prompt {
};
prompt.setup_logger(level, dir_path, filename_log, disable_file_logging)?;

#[cfg(feature = "tracing")]
{
info!("Tracing enabled");
console_subscriber::init();
}

if prompt.state.is_interactive() {
let (input_sender, input_receiver) = mpsc::unbounded_channel::<String>();
let state = Arc::clone(&prompt.state);
Expand Down Expand Up @@ -744,6 +756,9 @@ impl Prompt {
.level_for("actix_server", log::LevelFilter::Warn)
.level_for("actix_web", log::LevelFilter::Off)
.level_for("actix_http", log::LevelFilter::Off)
.level_for("tracing", log::LevelFilter::Off)
.level_for("runtime", log::LevelFilter::Off)
.level_for("tokio", log::LevelFilter::Off)
.level_for("mio", log::LevelFilter::Warn)
.level_for("tokio_tungstenite", log::LevelFilter::Warn)
.level_for("tungstenite", log::LevelFilter::Warn)
Expand Down
8 changes: 5 additions & 3 deletions xelis_common/src/rpc_server/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ use thiserror::Error;
use anyhow::Error as AnyError;
use crate::{serializer::ReaderError, rpc_server::JSON_RPC_VERSION};

use super::Id;

#[derive(Error, Debug)]
pub enum InternalRpcError {
#[error("Invalid context")]
Expand Down Expand Up @@ -53,20 +55,20 @@ impl InternalRpcError {

#[derive(Debug)]
pub struct RpcResponseError {
id: Option<usize>,
id: Option<Id>,
error: InternalRpcError
}

impl RpcResponseError {
pub fn new(id: Option<usize>, error: InternalRpcError) -> Self {
pub fn new(id: Option<Id>, error: InternalRpcError) -> Self {
Self {
id,
error
}
}

pub fn get_id(&self) -> Value {
match self.id {
match &self.id {
Some(id) => json!(id),
None => Value::Null
}
Expand Down
13 changes: 10 additions & 3 deletions xelis_common/src/rpc_server/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,23 +16,30 @@ use self::websocket::{WebSocketServerShared, WebSocketHandler};

pub const JSON_RPC_VERSION: &str = "2.0";

#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum Id {
String(String),
Number(usize),
}

#[derive(Clone, Serialize, Deserialize)]
pub struct RpcRequest {
pub jsonrpc: String,
pub id: Option<usize>,
pub id: Option<Id>,
pub method: String,
pub params: Option<Value>
}

#[derive(Serialize)]
pub struct RpcResponse<'a> {
pub jsonrpc: &'a str,
pub id: Cow<'a, Option<usize>>,
pub id: Cow<'a, Option<Id>>,
pub result: Cow<'a, Value>
}

impl<'a> RpcResponse<'a> {
pub fn new(id: Cow<'a, Option<usize>>, result: Cow<'a, Value>) -> Self {
pub fn new(id: Cow<'a, Option<Id>>, result: Cow<'a, Value>) -> Self {
Self {
jsonrpc: JSON_RPC_VERSION,
id,
Expand Down
72 changes: 55 additions & 17 deletions xelis_common/src/rpc_server/rpc_handler.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
use std::{collections::HashMap, pin::Pin, future::Future};
use serde::de::DeserializeOwned;
use serde_json::{Value, json};
use serde_json::{json, Map, Value};
use crate::context::Context;

use super::{InternalRpcError, RpcResponseError, RpcRequest, JSON_RPC_VERSION};
use log::{error, trace};

pub type Handler = fn(Context, Value) -> Pin<Box<dyn Future<Output = Result<Value, InternalRpcError>> + Send>>;
pub type Handler = fn(&'_ Context, Value) -> Pin<Box<dyn Future<Output = Result<Value, InternalRpcError>> + Send + '_>>;

pub struct RPCHandler<T: Send + Clone + 'static> {
methods: HashMap<String, Handler>, // all RPC methods registered
Expand All @@ -25,16 +25,48 @@ where
}

pub async fn handle_request(&self, body: &[u8]) -> Result<Value, RpcResponseError> {
self.handle_request_with_context(Context::default(), body).await
let mut context = Context::new();

// Add the data
context.store(self.get_data().clone());

self.handle_request_with_context(context, body).await
}

pub async fn handle_request_with_context(&self, context: Context, body: &[u8]) -> Result<Value, RpcResponseError> {
let request = self.parse_request(body)?;
self.execute_method(context, request).await
let request: Value = serde_json::from_slice(body)
.map_err(|_| RpcResponseError::new(None, InternalRpcError::ParseBodyError))?;

match request {
e @ Value::Object(_) => self.execute_method(&context, self.parse_request(e)?).await.map(|e| e.unwrap_or(Value::Null)),
Value::Array(requests) => {
let mut responses = Vec::new();
for value in requests {
if value.is_object() {
let request = self.parse_request(value)?;
let response = match self.execute_method(&context, request).await {
Ok(response) => json!(response),
Err(e) => e.to_json()
};
responses.push(response);
} else {
responses.push(RpcResponseError::new(None, InternalRpcError::InvalidRequest).to_json());
}
}
Ok(serde_json::to_value(responses).map_err(|_| RpcResponseError::new(None, InternalRpcError::CustomStr("error while serializing response")))?)
},
_ => return Err(RpcResponseError::new(None, InternalRpcError::InvalidRequest))
}
}

pub fn parse_request(&self, body: &[u8]) -> Result<RpcRequest, RpcResponseError> {
let request: RpcRequest = serde_json::from_slice(&body).map_err(|_| RpcResponseError::new(None, InternalRpcError::ParseBodyError))?;
pub fn parse_request_from_bytes(&self, body: &[u8]) -> Result<RpcRequest, RpcResponseError> {
let request: Value = serde_json::from_slice(body)
.map_err(|_| RpcResponseError::new(None, InternalRpcError::ParseBodyError))?;
self.parse_request(request)
}

pub fn parse_request(&self, body: Value) -> Result<RpcRequest, RpcResponseError> {
let request: RpcRequest = serde_json::from_value(body).map_err(|_| RpcResponseError::new(None, InternalRpcError::ParseBodyError))?;
if request.jsonrpc != JSON_RPC_VERSION {
return Err(RpcResponseError::new(request.id, InternalRpcError::InvalidVersion));
}
Expand All @@ -45,21 +77,23 @@ where
self.methods.contains_key(method_name)
}

pub async fn execute_method(&self, mut context: Context, mut request: RpcRequest) -> Result<Value, RpcResponseError> {
pub async fn execute_method<'a>(&'a self, context: &'a Context, mut request: RpcRequest) -> Result<Option<Value>, RpcResponseError> {
let handler = match self.methods.get(&request.method) {
Some(handler) => handler,
None => return Err(RpcResponseError::new(request.id, InternalRpcError::MethodNotFound(request.method)))
};
trace!("executing '{}' RPC method", request.method);
let params = request.params.take().unwrap_or(Value::Null);
// Add the data
context.store(self.get_data().clone());
let result = handler(context, params).await.map_err(|err| RpcResponseError::new(request.id, err))?;
Ok(json!({
"jsonrpc": JSON_RPC_VERSION,
"id": request.id,
"result": result
}))
let result = handler(context, params).await.map_err(|err| RpcResponseError::new(request.id.clone(), err))?;
Ok(if request.id.is_some() {
Some(json!({
"jsonrpc": JSON_RPC_VERSION,
"id": request.id,
"result": result
}))
} else {
None
})
}

// register a new RPC method handler
Expand All @@ -74,6 +108,10 @@ where
}
}

pub fn parse_params<P: DeserializeOwned>(value: Value) -> Result<P, InternalRpcError> {
pub fn parse_params<P: DeserializeOwned>(mut value: Value) -> Result<P, InternalRpcError> {
if value.is_null() {
value = Value::Object(Map::new());
}

serde_json::from_value(value).map_err(|e| InternalRpcError::InvalidParams(e))
}

0 comments on commit 3a41abf

Please sign in to comment.