diff --git a/Cargo.lock b/Cargo.lock index 8113613e..884bbd09 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,18 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "acropolis_codec" +version = "0.1.0" +dependencies = [ + "acropolis_common", + "anyhow", + "pallas 0.33.0", + "pallas-primitives 0.33.0", + "pallas-traverse 0.33.0", + "tracing", +] + [[package]] name = "acropolis_common" version = "0.3.0" @@ -82,7 +94,26 @@ dependencies = [ "anyhow", "caryatid_sdk", "config", - "pallas", + "pallas 0.33.0", + "tokio", + "tracing", +] + +[[package]] +name = "acropolis_module_chain_store" +version = "0.1.0" +dependencies = [ + "acropolis_codec", + "acropolis_common", + "anyhow", + "caryatid_sdk", + "config", + "fjall", + "hex", + "imbl", + "minicbor 0.26.5", + "pallas-traverse 0.33.0", + "tempfile", "tokio", "tracing", ] @@ -130,7 +161,7 @@ dependencies = [ "dashmap", "hex", "imbl", - "pallas", + "pallas 0.33.0", "serde", "serde_json", "tokio", @@ -147,7 +178,7 @@ dependencies = [ "caryatid_sdk", "config", "hex", - "pallas", + "pallas 0.33.0", "reqwest 0.12.23", "serde_json", "tokio", @@ -183,7 +214,7 @@ dependencies = [ "config", "mithril-client", "mithril-common", - "pallas", + "pallas 0.32.1", "reqwest 0.11.27", "serde_json", "tokio", @@ -197,12 +228,13 @@ dependencies = [ "acropolis_common", "anyhow", "async-trait", + "base64 0.22.1", "blake2 0.10.6", "caryatid_sdk", "config", "hex", "num-rational", - "pallas", + "pallas 0.33.0", "reqwest 0.11.27", "serde", "serde_json", @@ -245,7 +277,7 @@ dependencies = [ "config", "fraction", "hex", - "pallas", + "pallas 0.33.0", "serde_json", "tokio", "tracing", @@ -277,7 +309,7 @@ dependencies = [ "dashmap", "hex", "imbl", - "pallas", + "pallas 0.33.0", "rayon", "serde", "serde_json", @@ -297,7 +329,7 @@ dependencies = [ "caryatid_sdk", "config", "hex", - "pallas", + "pallas 0.33.0", "serde", "serde_json", "serde_json_any_key", @@ -310,6 +342,7 @@ dependencies = [ name = "acropolis_module_tx_unpacker" version = "0.2.1" dependencies = [ + "acropolis_codec", "acropolis_common", "anyhow", "async-trait", @@ -317,7 +350,7 @@ dependencies = [ "config", "futures", "hex", - "pallas", + "pallas 0.33.0", "serde", "serde_json", "tokio", @@ -333,7 +366,7 @@ dependencies = [ "caryatid_sdk", "config", "crossbeam", - "pallas", + "pallas 0.33.0", "serde", "serde_json", "tokio", @@ -368,6 +401,7 @@ dependencies = [ "acropolis_module_accounts_state", "acropolis_module_assets_state", "acropolis_module_block_unpacker", + "acropolis_module_chain_store", "acropolis_module_drdd_state", "acropolis_module_drep_state", "acropolis_module_epochs_state", @@ -3806,16 +3840,33 @@ version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6cc85b0d73cc19b7c1e09d6f2c5c4abfad3923671654ba6ef8fd00c9d0ee4c58" dependencies = [ - "pallas-addresses", - "pallas-codec", - "pallas-configs", - "pallas-crypto", + "pallas-addresses 0.32.1", + "pallas-codec 0.32.1", + "pallas-configs 0.32.1", + "pallas-crypto 0.32.1", "pallas-hardano", - "pallas-network", - "pallas-primitives", - "pallas-traverse", - "pallas-txbuilder", - "pallas-utxorpc", + "pallas-network 0.32.1", + "pallas-primitives 0.32.1", + "pallas-traverse 0.32.1", + "pallas-txbuilder 0.32.1", + "pallas-utxorpc 0.32.1", +] + +[[package]] +name = "pallas" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37bba5e9e84978df0d42b72c3b92ead5a3b1c4852f66c08b648a5c057f58717a" +dependencies = [ + "pallas-addresses 0.33.0", + "pallas-codec 0.33.0", + "pallas-configs 0.33.0", + "pallas-crypto 0.33.0", + "pallas-network 0.33.0", + "pallas-primitives 0.33.0", + "pallas-traverse 0.33.0", + "pallas-txbuilder 0.33.0", + "pallas-utxorpc 0.33.0", ] [[package]] @@ -3829,8 +3880,24 @@ dependencies = [ "crc", "cryptoxide 0.4.4", "hex", - "pallas-codec", - "pallas-crypto", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "thiserror 1.0.69", +] + +[[package]] +name = "pallas-addresses" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18f5f4dd205316335bf8eef77227e01a8a00b1fd60503d807520e93dd0362d0e" +dependencies = [ + "base58", + "bech32 0.9.1", + "crc", + "cryptoxide 0.4.4", + "hex", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", "thiserror 1.0.69", ] @@ -3842,11 +3909,27 @@ checksum = "6a861573364d48ff0952b12d3f139e05a843b8209f134a0c2b028449cb59ed68" dependencies = [ "chrono", "hex", - "pallas-addresses", - "pallas-codec", - "pallas-crypto", - "pallas-primitives", - "pallas-traverse", + "pallas-addresses 0.32.1", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "pallas-primitives 0.32.1", + "pallas-traverse 0.32.1", + "rand 0.8.5", +] + +[[package]] +name = "pallas-applying" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b196174663e1c4eb80a286b8ddca78f75fca5fc57b0baaa5b1143a6dd76ca71b" +dependencies = [ + "chrono", + "hex", + "pallas-addresses 0.33.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", + "pallas-primitives 0.33.0", + "pallas-traverse 0.33.0", "rand 0.8.5", ] @@ -3862,6 +3945,18 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "pallas-codec" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2737b05f0dbb6d197feeb26ef15d2567e54833184bd469f5655a0537da89fa" +dependencies = [ + "hex", + "minicbor 0.25.1", + "serde", + "thiserror 1.0.69", +] + [[package]] name = "pallas-configs" version = "0.32.1" @@ -3871,10 +3966,28 @@ dependencies = [ "base64 0.22.1", "hex", "num-rational", - "pallas-addresses", - "pallas-codec", - "pallas-crypto", - "pallas-primitives", + "pallas-addresses 0.32.1", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "pallas-primitives 0.32.1", + "serde", + "serde_json", + "serde_with 3.14.1", +] + +[[package]] +name = "pallas-configs" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a4e63bff98bd71b3057a0986dc72e6ba58afaf063bce3dc8243fda5f0665726" +dependencies = [ + "base64 0.22.1", + "hex", + "num-rational", + "pallas-addresses 0.33.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", + "pallas-primitives 0.33.0", "serde", "serde_json", "serde_with 3.14.1", @@ -3888,7 +4001,22 @@ checksum = "59c89ea16190a87a1d8bd36923093740a2b659ed6129f4636329319a70cc4db3" dependencies = [ "cryptoxide 0.4.4", "hex", - "pallas-codec", + "pallas-codec 0.32.1", + "rand_core 0.6.4", + "serde", + "thiserror 1.0.69", + "zeroize", +] + +[[package]] +name = "pallas-crypto" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0368945cd093e550febe36aef085431b1611c2e9196297cd70f4b21a4add054c" +dependencies = [ + "cryptoxide 0.4.4", + "hex", + "pallas-codec 0.33.0", "rand_core 0.6.4", "serde", "thiserror 1.0.69", @@ -3902,8 +4030,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f980c9e0579642a5c8a902231a499b826fdb8673585be9d3068eb9b04ccc980" dependencies = [ "binary-layout", - "pallas-network", - "pallas-traverse", + "pallas-network 0.32.1", + "pallas-traverse 0.32.1", "tap", "thiserror 1.0.69", "tracing", @@ -3918,8 +4046,26 @@ dependencies = [ "byteorder", "hex", "itertools 0.13.0", - "pallas-codec", - "pallas-crypto", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "rand 0.8.5", + "socket2 0.5.10", + "thiserror 1.0.69", + "tokio", + "tracing", +] + +[[package]] +name = "pallas-network" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1244da7a760a08b8a9d9a28a28112f10a7b6476d64192696a269cfd09a7ec55c" +dependencies = [ + "byteorder", + "hex", + "itertools 0.13.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", "rand 0.8.5", "socket2 0.5.10", "thiserror 1.0.69", @@ -3937,8 +4083,24 @@ dependencies = [ "bech32 0.9.1", "hex", "log", - "pallas-codec", - "pallas-crypto", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "serde", + "serde_json", +] + +[[package]] +name = "pallas-primitives" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb2acde8875c43446194d387c60fe2d6a127e4f8384bef3dcabd5a04e9422429" +dependencies = [ + "base58", + "bech32 0.9.1", + "hex", + "log", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", "serde", "serde_json", ] @@ -3951,10 +4113,27 @@ checksum = "be7fbb1db75a0b6b32d1808b2cc5c7ba6dd261f289491bb86998b987b4716883" dependencies = [ "hex", "itertools 0.13.0", - "pallas-addresses", - "pallas-codec", - "pallas-crypto", - "pallas-primitives", + "pallas-addresses 0.32.1", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "pallas-primitives 0.32.1", + "paste", + "serde", + "thiserror 1.0.69", +] + +[[package]] +name = "pallas-traverse" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab64895a0d94fed1ef2d99dd37e480ed0483e91eb98dcd2f94cc614fb9575173" +dependencies = [ + "hex", + "itertools 0.13.0", + "pallas-addresses 0.33.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", + "pallas-primitives 0.33.0", "paste", "serde", "thiserror 1.0.69", @@ -3967,12 +4146,30 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fff83ae515a88b1ecf5354468d9fd3562d915e5eceb5c9467f6b1cdce60a3e9a" dependencies = [ "hex", - "pallas-addresses", - "pallas-codec", - "pallas-crypto", - "pallas-primitives", - "pallas-traverse", - "pallas-wallet", + "pallas-addresses 0.32.1", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "pallas-primitives 0.32.1", + "pallas-traverse 0.32.1", + "pallas-wallet 0.32.1", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "pallas-txbuilder" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46ff1f49d99aced71b20daa68577167e1db3f0aaffe92fbc1de6df0b6002a66e" +dependencies = [ + "hex", + "pallas-addresses 0.33.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", + "pallas-primitives 0.33.0", + "pallas-traverse 0.33.0", + "pallas-wallet 0.33.0", "serde", "serde_json", "thiserror 1.0.69", @@ -3984,11 +4181,26 @@ version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "810ccda35242fef9ea583a0819da7617b6761a86c6070f16aea27ac80ad4da75" dependencies = [ - "pallas-applying", - "pallas-codec", - "pallas-crypto", - "pallas-primitives", - "pallas-traverse", + "pallas-applying 0.32.1", + "pallas-codec 0.32.1", + "pallas-crypto 0.32.1", + "pallas-primitives 0.32.1", + "pallas-traverse 0.32.1", + "prost-types", + "utxorpc-spec", +] + +[[package]] +name = "pallas-utxorpc" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bdf89daca5ebfbcd9b5cf8b480486302ffd3401f6891d3c4f02087fd7687b94" +dependencies = [ + "pallas-applying 0.33.0", + "pallas-codec 0.33.0", + "pallas-crypto 0.33.0", + "pallas-primitives 0.33.0", + "pallas-traverse 0.33.0", "prost-types", "utxorpc-spec", ] @@ -4003,7 +4215,22 @@ dependencies = [ "bip39", "cryptoxide 0.4.4", "ed25519-bip32", - "pallas-crypto", + "pallas-crypto 0.32.1", + "rand 0.8.5", + "thiserror 1.0.69", +] + +[[package]] +name = "pallas-wallet" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d91b48fe1d0d07b425aed4b1c6ac5d962e0a392ccc58e2f3faa8ad250a5c364" +dependencies = [ + "bech32 0.9.1", + "bip39", + "cryptoxide 0.4.4", + "ed25519-bip32", + "pallas-crypto 0.33.0", "rand 0.8.5", "thiserror 1.0.69", ] diff --git a/Cargo.toml b/Cargo.toml index f0533b62..ccf0d323 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ [workspace] members = [ # Global message and common definitions + "codec", "common", # Modules @@ -21,11 +22,12 @@ members = [ "modules/epochs_state", # Tracks fees and blocks minted and epochs history "modules/accounts_state", # Tracks stake and reward accounts "modules/assets_state", # Tracks native asset mints and burns + "modules/chain_store", # Tracks historical information about blocks and TXs # Process builds "processes/omnibus", # All-inclusive omnibus process "processes/replayer", # All-inclusive process to replay messages - "processes/golden_tests", #All-inclusive golden tests process + "processes/golden_tests", # All-inclusive golden tests process ] resolver = "2" @@ -41,9 +43,11 @@ config = "0.15.11" dashmap = "6.1.0" hex = "0.4" imbl = { version = "5.0.0", features = ["serde"] } -pallas = "0.32.1" -pallas-addresses = "0.32.0" -pallas-crypto = "0.32.0" +pallas = "0.33.0" +pallas-addresses = "0.33.0" +pallas-crypto = "0.33.0" +pallas-primitives = "0.33.0" +pallas-traverse = "0.33.0" serde = { version = "1.0.214", features = ["derive"] } serde_json = "1.0.132" serde_with = { version = "3.12.0", features = ["hex"] } diff --git a/codec/Cargo.toml b/codec/Cargo.toml new file mode 100644 index 00000000..e80c45d2 --- /dev/null +++ b/codec/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "acropolis_codec" +version = "0.1.0" +edition = "2024" + +[dependencies] +acropolis_common = { path = "../common" } + +anyhow = { workspace = true } +pallas = { workspace = true } +pallas-primitives = { workspace = true } +pallas-traverse = { workspace = true } +tracing = { workspace = true } diff --git a/codec/src/block.rs b/codec/src/block.rs new file mode 100644 index 00000000..b898534c --- /dev/null +++ b/codec/src/block.rs @@ -0,0 +1,43 @@ +use acropolis_common::{ + GenesisDelegate, HeavyDelegate, crypto::keyhash_224, queries::blocks::BlockIssuer, +}; +use pallas_primitives::byron::BlockSig::DlgSig; +use pallas_traverse::MultiEraHeader; +use std::collections::HashMap; + +pub fn map_to_block_issuer( + header: &MultiEraHeader, + byron_heavy_delegates: &HashMap, HeavyDelegate>, + shelley_genesis_delegates: &HashMap, GenesisDelegate>, +) -> Option { + match header.issuer_vkey() { + Some(vkey) => match header { + MultiEraHeader::ShelleyCompatible(_) => { + let digest = keyhash_224(vkey); + if let Some(issuer) = shelley_genesis_delegates + .values() + .find(|v| v.delegate == digest) + .map(|i| BlockIssuer::GenesisDelegate(i.clone())) + { + Some(issuer) + } else { + Some(BlockIssuer::SPO(vkey.to_vec())) + } + } + _ => Some(BlockIssuer::SPO(vkey.to_vec())), + }, + None => match header { + MultiEraHeader::Byron(_) => match header.as_byron() { + Some(block_head) => match &block_head.consensus_data.3 { + DlgSig(sig) => byron_heavy_delegates + .values() + .find(|v| v.issuer_pk == *sig.0.issuer) + .map(|i| BlockIssuer::HeavyDelegate(i.clone())), + _ => None, + }, + None => None, + }, + _ => None, + }, + } +} diff --git a/codec/src/lib.rs b/codec/src/lib.rs new file mode 100644 index 00000000..7cb7bc0d --- /dev/null +++ b/codec/src/lib.rs @@ -0,0 +1,2 @@ +pub mod block; +pub mod map_parameters; diff --git a/modules/tx_unpacker/src/map_parameters.rs b/codec/src/map_parameters.rs similarity index 99% rename from modules/tx_unpacker/src/map_parameters.rs rename to codec/src/map_parameters.rs index d7ae1fd8..2d3ebde5 100644 --- a/modules/tx_unpacker/src/map_parameters.rs +++ b/codec/src/map_parameters.rs @@ -1,12 +1,12 @@ //! Acropolis transaction unpacker module for Caryatid //! Performs conversion from Pallas library data to Acropolis -use anyhow::{anyhow, bail, Result}; +use anyhow::{Result, anyhow, bail}; use pallas::ledger::{ primitives::{ - alonzo, babbage, conway, ExUnitPrices as PallasExUnitPrices, Nullable, - ProtocolVersion as PallasProtocolVersion, Relay as PallasRelay, ScriptHash, - StakeCredential as PallasStakeCredential, + ExUnitPrices as PallasExUnitPrices, Nullable, ProtocolVersion as PallasProtocolVersion, + Relay as PallasRelay, ScriptHash, StakeCredential as PallasStakeCredential, alonzo, + babbage, conway, }, traverse::{MultiEraCert, MultiEraPolicyAssets, MultiEraValue}, *, @@ -145,7 +145,7 @@ pub fn map_gov_action_id(pallas_action_id: &conway::GovActionId) -> Result>, pub protocol_consts: ProtocolConsts, pub start_time: u64, + pub heavy_delegation: HashMap, HeavyDelegate>, } // @@ -122,6 +126,9 @@ pub struct ShelleyParams { pub slots_per_kes_period: u32, pub system_start: DateTime, pub update_quorum: u32, + + #[serde_as(as = "HashMap")] + pub gen_delegs: HashMap, GenesisDelegate>, } #[serde_as] @@ -264,6 +271,15 @@ impl From for Nonce { } } +impl From for Nonce { + fn from(hash: BlockHash) -> Self { + Self { + tag: NonceVariant::Nonce, + hash: Some(*hash.deref()), + } + } +} + #[derive( Default, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, serde::Serialize, serde::Deserialize, )] diff --git a/common/src/queries/blocks.rs b/common/src/queries/blocks.rs index 3a0ffec3..9128e3fb 100644 --- a/common/src/queries/blocks.rs +++ b/common/src/queries/blocks.rs @@ -1,64 +1,231 @@ +use crate::{ + queries::misc::Order, + serialization::{Bech32Conversion, Bech32WithHrp}, + Address, BlockHash, GenesisDelegate, HeavyDelegate, KeyHash, TxHash, TxIdentifier, VRFKey, +}; +use cryptoxide::hashing::blake2b::Blake2b; +use serde::ser::{Serialize, SerializeStruct, Serializer}; +use serde_with::{hex::Hex, serde_as}; +use std::collections::HashMap; + +pub const DEFAULT_BLOCKS_QUERY_TOPIC: (&str, &str) = + ("blocks-state-query-topic", "cardano.query.blocks"); + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub enum BlocksStateQuery { GetLatestBlock, - GetLatestBlockTransactions, - GetLatestBlockTransactionsCBOR, - GetBlockInfo { block_key: Vec }, - GetNextBlocks { block_key: Vec }, - GetPreviousBlocks { block_key: Vec }, - GetBlockBySlot { slot_key: Vec }, - GetBlockByEpochSlot { slot_key: Vec }, - GetBlockTransactions { block_key: Vec }, - GetBlockTransactionsCBOR { block_key: Vec }, - GetBlockInvolvedAddresses { block_key: Vec }, + GetLatestBlockTransactions { + limit: u64, + skip: u64, + order: Order, + }, + GetLatestBlockTransactionsCBOR { + limit: u64, + skip: u64, + order: Order, + }, + GetBlockInfo { + block_key: BlockKey, + }, + GetNextBlocks { + block_key: BlockKey, + limit: u64, + skip: u64, + }, + GetPreviousBlocks { + block_key: BlockKey, + limit: u64, + skip: u64, + }, + GetBlockBySlot { + slot: u64, + }, + GetBlockByEpochSlot { + epoch: u64, + slot: u64, + }, + GetBlockTransactions { + block_key: BlockKey, + limit: u64, + skip: u64, + order: Order, + }, + GetBlockTransactionsCBOR { + block_key: BlockKey, + limit: u64, + skip: u64, + order: Order, + }, + GetBlockInvolvedAddresses { + block_key: BlockKey, + limit: u64, + skip: u64, + }, + GetBlockHashes { + block_numbers: Vec, + }, + GetTransactionHashes { + tx_ids: Vec, + }, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub enum BlockKey { + Hash(BlockHash), + Number(u64), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub enum BlocksStateQueryResponse { - LatestBlock(LatestBlock), - LatestBlockTransactions(LatestBlockTransactions), - LatestBlockTransactionsCBOR(LatestBlockTransactionsCBOR), + LatestBlock(BlockInfo), + LatestBlockTransactions(BlockTransactions), + LatestBlockTransactionsCBOR(BlockTransactionsCBOR), BlockInfo(BlockInfo), NextBlocks(NextBlocks), PreviousBlocks(PreviousBlocks), - BlockBySlot(BlockBySlot), - BlockByEpochSlot(BlockByEpochSlot), + BlockBySlot(BlockInfo), + BlockByEpochSlot(BlockInfo), BlockTransactions(BlockTransactions), BlockTransactionsCBOR(BlockTransactionsCBOR), BlockInvolvedAddresses(BlockInvolvedAddresses), + BlockHashes(BlockHashes), + TransactionHashes(TransactionHashes), NotFound, Error(String), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct LatestBlock {} +pub enum BlockIssuer { + HeavyDelegate(HeavyDelegate), + GenesisDelegate(GenesisDelegate), + SPO(Vec), +} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct LatestBlockTransactions {} +#[derive(Debug, Clone, serde::Deserialize)] +pub struct BlockInfo { + pub timestamp: u64, + pub number: u64, + pub hash: BlockHash, + pub slot: u64, + pub epoch: u64, + pub epoch_slot: u64, + pub issuer: Option, + pub size: u64, + pub tx_count: u64, + pub output: Option, + pub fees: Option, + pub block_vrf: Option, + pub op_cert: Option, + pub op_cert_counter: Option, + pub previous_block: Option, + pub next_block: Option, + pub confirmations: u64, +} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct LatestBlockTransactionsCBOR {} +impl Serialize for BlockInfo { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("BlockInfo", 17)?; + state.serialize_field("time", &self.timestamp)?; + state.serialize_field("height", &self.number)?; + state.serialize_field("hash", &self.hash)?; + state.serialize_field("slot", &self.slot)?; + state.serialize_field("epoch", &self.epoch)?; + state.serialize_field("epoch_slot", &self.epoch_slot)?; + state.serialize_field( + "slot_issuer", + &self.issuer.clone().map(|vkey| -> String { + match vkey { + BlockIssuer::HeavyDelegate(_) => "Byron genesis slot issuer".to_string(), + BlockIssuer::GenesisDelegate(_) => "Shelley genesis slot issuer".to_string(), + BlockIssuer::SPO(vkey) => { + let mut context = Blake2b::<224>::new(); + context.update_mut(&vkey); + let digest = context.finalize().as_slice().to_owned(); + digest.to_bech32_with_hrp("pool").unwrap_or(String::new()) + } + } + }), + )?; + state.serialize_field("size", &self.size)?; + state.serialize_field("tx_count", &self.tx_count)?; + state.serialize_field("output", &self.output)?; + state.serialize_field("fees", &self.fees)?; + state.serialize_field( + "block_vrf", + &self.block_vrf.clone().and_then(|vkey| vkey.to_bech32().ok()), + )?; + state.serialize_field("op_cert", &self.op_cert.clone().map(|v| hex::encode(v)))?; + state.serialize_field("op_cert_counter", &self.op_cert_counter)?; + state.serialize_field("previous_block", &self.previous_block)?; + state.serialize_field("next_block", &self.next_block)?; + state.serialize_field("confirmations", &self.confirmations)?; + state.end() + } +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockInfo {} +pub struct NextBlocks { + pub blocks: Vec, +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct NextBlocks {} +pub struct PreviousBlocks { + pub blocks: Vec, +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct PreviousBlocks {} +pub struct BlockTransactions { + pub hashes: Vec, +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockBySlot {} +pub struct BlockTransactionsCBOR { + pub txs: Vec, +} +#[serde_as] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockByEpochSlot {} +pub struct BlockTransaction { + pub hash: TxHash, + #[serde_as(as = "Hex")] + pub cbor: Vec, +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockTransactions {} +pub struct BlockInvolvedAddresses { + pub addresses: Vec, +} + +#[derive(Debug, Clone, serde::Deserialize)] +pub struct BlockInvolvedAddress { + pub address: Address, + pub txs: Vec, +} + +impl Serialize for BlockInvolvedAddress { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut state = serializer.serialize_struct("BlockInvolvedAddress", 2)?; + state.serialize_field( + "address", + &self.address.to_string().unwrap_or("".to_string()), + )?; + state.serialize_field("transactions", &self.txs)?; + state.end() + } +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockTransactionsCBOR {} +pub struct BlockHashes { + pub block_hashes: HashMap, +} #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -pub struct BlockInvolvedAddresses {} +pub struct TransactionHashes { + pub tx_hashes: HashMap, +} diff --git a/common/src/queries/misc.rs b/common/src/queries/misc.rs new file mode 100644 index 00000000..aec47185 --- /dev/null +++ b/common/src/queries/misc.rs @@ -0,0 +1,19 @@ +use std::str::FromStr; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)] +pub enum Order { + Asc, + Desc, +} + +impl FromStr for Order { + type Err = (); + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "asc" => Ok(Order::Asc), + "desc" => Ok(Order::Desc), + _ => Err(()), + } + } +} diff --git a/common/src/queries/mod.rs b/common/src/queries/mod.rs index 8aafd35f..b9d9d6f9 100644 --- a/common/src/queries/mod.rs +++ b/common/src/queries/mod.rs @@ -11,6 +11,7 @@ pub mod governance; pub mod ledger; pub mod mempool; pub mod metadata; +pub mod misc; pub mod network; pub mod parameters; pub mod pools; diff --git a/common/src/queries/utils.rs b/common/src/queries/utils.rs index 43ef1932..d385f11c 100644 --- a/common/src/queries/utils.rs +++ b/common/src/queries/utils.rs @@ -1,8 +1,9 @@ use anyhow::Result; use caryatid_sdk::Context; +use serde::Serialize; use std::sync::Arc; -use crate::messages::Message; +use crate::messages::{Message, RESTResponse}; pub async fn query_state( context: &Arc>, @@ -20,3 +21,41 @@ where Ok(extractor(message)?) } + +/// The outer option in the extractor return value is whether the response was handled by F +pub async fn rest_query_state( + context: &Arc>, + topic: &str, + request_msg: Arc, + extractor: F, +) -> Result +where + F: FnOnce(Message) -> Option, anyhow::Error>>, + T: Serialize, +{ + let result = query_state(&context, topic, request_msg, |response| { + match extractor(response) { + Some(response) => response, + None => Err(anyhow::anyhow!( + "Unexpected response message type while calling {topic}" + )), + } + }) + .await; + match result { + Ok(result) => match result { + Some(result) => match serde_json::to_string(&result) { + Ok(json) => Ok(RESTResponse::with_json(200, &json)), + Err(e) => Ok(RESTResponse::with_text( + 500, + &format!("Internal server error while calling {topic}: {e}"), + )), + }, + None => Ok(RESTResponse::with_text(404, "Not found")), + }, + Err(e) => Ok(RESTResponse::with_text( + 500, + &format!("Internal server error while calling {topic}: {e}"), + )), + } +} diff --git a/common/src/rest_helper.rs b/common/src/rest_helper.rs index c3ea34e5..135652cb 100644 --- a/common/src/rest_helper.rs +++ b/common/src/rest_helper.rs @@ -112,6 +112,40 @@ where }) } +// Handle a REST request with path and query parameters +pub fn handle_rest_with_path_and_query_parameters( + context: Arc>, + topic: &str, + handler: F, +) -> JoinHandle<()> +where + F: Fn(&[&str], HashMap) -> Fut + Send + Sync + Clone + 'static, + Fut: Future> + Send + 'static, +{ + let topic_owned = topic.to_string(); + context.handle(topic, move |message: Arc| { + let handler = handler.clone(); + let topic_owned = topic_owned.clone(); + async move { + let response = match message.as_ref() { + Message::RESTRequest(request) => { + let params_vec = + extract_params_from_topic_and_path(&topic_owned, &request.path_elements); + let params_slice: Vec<&str> = params_vec.iter().map(|s| s.as_str()).collect(); + let query_params = request.query_parameters.clone(); + match handler(¶ms_slice, query_params).await { + Ok(response) => response, + Err(error) => RESTResponse::with_text(500, &format!("{error:?}")), + } + } + _ => RESTResponse::with_text(500, "Unexpected message in REST request"), + }; + + Arc::new(Message::RESTResponse(response)) + } + }) +} + /// Extract parameters from the request path based on the topic pattern. /// Skips the first 3 parts of the topic as these are never parameters fn extract_params_from_topic_and_path(topic: &str, path_elements: &[String]) -> Vec { diff --git a/common/src/serialization.rs b/common/src/serialization.rs index 94f44510..261b058c 100644 --- a/common/src/serialization.rs +++ b/common/src/serialization.rs @@ -26,6 +26,13 @@ where } } +pub trait Bech32Conversion { + fn to_bech32(&self) -> Result; + fn from_bech32(s: &str) -> Result + where + Self: Sized; +} + pub trait Bech32WithHrp { fn to_bech32_with_hrp(&self, hrp: &str) -> Result; fn from_bech32_with_hrp(s: &str, expected_hrp: &str) -> Result, anyhow::Error>; diff --git a/common/src/types.rs b/common/src/types.rs index 898ca34a..87dec7e6 100644 --- a/common/src/types.rs +++ b/common/src/types.rs @@ -6,6 +6,7 @@ use crate::{ address::{Address, ShelleyAddress, StakeAddress}, protocol_params, rational_number::RationalNumber, + serialization::{Bech32Conversion, Bech32WithHrp}, }; use anyhow::{anyhow, bail, Error, Result}; use bech32::{Bech32, Hrp}; @@ -15,6 +16,7 @@ use serde::{Deserialize, Serialize}; use serde_with::{hex::Hex, serde_as}; use std::collections::{HashMap, HashSet}; use std::fmt::{Display, Formatter}; +use std::ops::Deref; use std::ops::Neg; use std::{cmp::Ordering, fmt}; @@ -90,6 +92,79 @@ pub enum BlockStatus { RolledBack, // Volatile, restarted after rollback } +macro_rules! declare_byte_array_type { + ($name:ident, $size:expr) => { + /// $name + #[serde_as] + #[derive( + Default, Debug, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, + )] + pub struct $name(#[serde_as(as = "Hex")] pub [u8; $size]); + + impl From<[u8; $size]> for $name { + fn from(bytes: [u8; $size]) -> Self { + Self(bytes) + } + } + + impl TryFrom> for $name { + type Error = Vec; + fn try_from(vec: Vec) -> Result { + Ok($name(vec.try_into()?)) + } + } + + impl TryFrom<&[u8]> for $name { + type Error = std::array::TryFromSliceError; + fn try_from(arr: &[u8]) -> Result { + Ok($name(arr.try_into()?)) + } + } + + impl AsRef<[u8]> for $name { + fn as_ref(&self) -> &[u8] { + &self.0 + } + } + + impl Deref for $name { + type Target = [u8; $size]; + fn deref(&self) -> &Self::Target { + &self.0 + } + } + }; +} + +macro_rules! declare_byte_array_type_with_bech32 { + ($name:ident, $size:expr, $hrp:expr) => { + declare_byte_array_type!($name, $size); + impl Bech32Conversion for $name { + fn to_bech32(&self) -> Result { + self.0.to_vec().to_bech32_with_hrp($hrp) + } + fn from_bech32(s: &str) -> Result { + match Vec::::from_bech32_with_hrp(s, $hrp) { + Ok(v) => match Self::try_from(v) { + Ok(s) => Ok(s), + Err(_) => Err(Error::msg(format!( + "Bad vector input to {}", + stringify!($name) + ))), + }, + Err(e) => Err(e), + } + } + } + }; +} + +declare_byte_array_type!(BlockHash, 32); + +declare_byte_array_type!(TxHash, 32); + +declare_byte_array_type_with_bech32!(VRFKey, 32, "vrf_vk"); + /// Block info, shared across multiple messages #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub struct BlockInfo { @@ -329,9 +404,6 @@ pub type GenesisKeyhash = Vec; /// Data hash used for metadata, anchors (SHA256) pub type DataHash = Vec; -/// Transaction hash -pub type TxHash = [u8; 32]; - /// Compact transaction identifier (block_number, tx_index). #[derive( Debug, Default, Clone, Copy, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, @@ -427,9 +499,6 @@ impl TxOutRef { } } -/// Block Hash -pub type BlockHash = [u8; 32]; - /// Amount of Ada, in Lovelace pub type Lovelace = u64; pub type LovelaceDelta = i64; @@ -1213,6 +1282,22 @@ pub struct BlockVersionData { pub update_vote_thd: u64, } +#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] +pub struct HeavyDelegate { + pub cert: Vec, + pub delegate_pk: Vec, + pub issuer_pk: Vec, +} + +#[serde_as] +#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] +pub struct GenesisDelegate { + #[serde_as(as = "Hex")] + pub delegate: Vec, + #[serde_as(as = "Hex")] + pub vrf: Vec, +} + #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] pub struct ProtocolConsts { pub k: usize, diff --git a/modules/accounts_state/src/monetary.rs b/modules/accounts_state/src/monetary.rs index 7e33851c..bb3c9485 100644 --- a/modules/accounts_state/src/monetary.rs +++ b/modules/accounts_state/src/monetary.rs @@ -107,12 +107,14 @@ fn calculate_monetary_expansion( #[cfg(test)] mod tests { use super::*; - use acropolis_common::protocol_params::{ - Nonce, NonceVariant, ProtocolVersion, ShelleyProtocolParams, - }; use acropolis_common::rational_number::rational_number_from_f32; use acropolis_common::NetworkId; + use acropolis_common::{ + protocol_params::{Nonce, NonceVariant, ProtocolVersion, ShelleyProtocolParams}, + GenesisDelegate, + }; use chrono::{DateTime, Utc}; + use std::collections::HashMap; // Known values at start of Shelley - from Java reference and DBSync const EPOCH_208_RESERVES: Lovelace = 13_888_022_852_926_644; @@ -173,6 +175,7 @@ mod tests { slots_per_kes_period: 129600, system_start: DateTime::::default(), update_quorum: 5, + gen_delegs: HashMap::, GenesisDelegate>::new(), } } diff --git a/modules/chain_store/Cargo.toml b/modules/chain_store/Cargo.toml new file mode 100644 index 00000000..3ed9fc55 --- /dev/null +++ b/modules/chain_store/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "acropolis_module_chain_store" +version = "0.1.0" +edition = "2021" +authors = ["Simon Gellis "] +description = "Chain Store Tracker" +license = "Apache-2.0" + +[dependencies] +caryatid_sdk = "0.12" +acropolis_common = { path = "../../common" } +acropolis_codec = { path = "../../codec" } +anyhow = "1.0" +config = "0.15.11" +fjall = "2.7.0" +hex = "0.4" +minicbor = { version = "0.26.0", features = ["std", "half", "derive"] } +pallas-traverse = { workspace = true } +tracing = "0.1.40" +tokio.workspace = true +imbl.workspace = true + +[dev-dependencies] +tempfile = "3" + +[lib] +path = "src/chain_store.rs" diff --git a/modules/chain_store/src/chain_store.rs b/modules/chain_store/src/chain_store.rs new file mode 100644 index 00000000..1942bd94 --- /dev/null +++ b/modules/chain_store/src/chain_store.rs @@ -0,0 +1,566 @@ +mod stores; + +use acropolis_codec::{block::map_to_block_issuer, map_parameters}; +use acropolis_common::{ + crypto::keyhash_224, + messages::{CardanoMessage, Message, StateQuery, StateQueryResponse}, + queries::blocks::{ + BlockHashes, BlockInfo, BlockInvolvedAddress, BlockInvolvedAddresses, BlockKey, + BlockTransaction, BlockTransactions, BlockTransactionsCBOR, BlocksStateQuery, + BlocksStateQueryResponse, NextBlocks, PreviousBlocks, TransactionHashes, + DEFAULT_BLOCKS_QUERY_TOPIC, + }, + queries::misc::Order, + state_history::{StateHistory, StateHistoryStore}, + Address, BlockHash, GenesisDelegate, HeavyDelegate, TxHash, VRFKey, +}; +use anyhow::{bail, Result}; +use caryatid_sdk::{module, Context, Module}; +use config::Config; +use std::cmp::Ordering; +use std::collections::{BTreeMap, HashMap}; +use std::sync::Arc; +use tokio::sync::Mutex; +use tracing::error; + +use crate::stores::{fjall::FjallStore, Block, Store}; + +const DEFAULT_BLOCKS_TOPIC: &str = "cardano.block.body"; +const DEFAULT_PROTOCOL_PARAMETERS_TOPIC: &str = "cardano.protocol.parameters"; +const DEFAULT_STORE: &str = "fjall"; + +#[module( + message_type(Message), + name = "chain-store", + description = "Block and TX state" +)] +pub struct ChainStore; + +impl ChainStore { + pub async fn init(&self, context: Arc>, config: Arc) -> Result<()> { + let new_blocks_topic = + config.get_string("blocks-topic").unwrap_or(DEFAULT_BLOCKS_TOPIC.to_string()); + let params_topic = config + .get_string("protocol-parameters-topic") + .unwrap_or(DEFAULT_PROTOCOL_PARAMETERS_TOPIC.to_string()); + let block_queries_topic = config + .get_string(DEFAULT_BLOCKS_QUERY_TOPIC.0) + .unwrap_or(DEFAULT_BLOCKS_QUERY_TOPIC.1.to_string()); + + let store_type = config.get_string("store").unwrap_or(DEFAULT_STORE.to_string()); + let store: Arc = match store_type.as_str() { + "fjall" => Arc::new(FjallStore::new(config.clone())?), + _ => bail!("Unknown store type {store_type}"), + }; + + let history = Arc::new(Mutex::new(StateHistory::::new( + "chain_store", + StateHistoryStore::default_epoch_store(), + ))); + history.lock().await.commit_forced(State::new()); + + let query_store = store.clone(); + let query_history = history.clone(); + context.handle(&block_queries_topic, move |req| { + let query_store = query_store.clone(); + let query_history = query_history.clone(); + async move { + let Message::StateQuery(StateQuery::Blocks(query)) = req.as_ref() else { + return Arc::new(Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error("Invalid message for blocks-state".into()), + ))); + }; + let Some(state) = query_history.lock().await.current().map(|s| s.clone()) else { + return Arc::new(Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error("unitialised state".to_string()), + ))); + }; + let res = Self::handle_blocks_query(&query_store, &state, &query) + .unwrap_or_else(|err| BlocksStateQueryResponse::Error(err.to_string())); + Arc::new(Message::StateQueryResponse(StateQueryResponse::Blocks(res))) + } + }); + + let mut new_blocks_subscription = context.subscribe(&new_blocks_topic).await?; + let mut params_subscription = context.subscribe(¶ms_topic).await?; + context.run(async move { + // Get promise of params message so the params queue is cleared and + // the message is ready as soon as possible when we need it + let mut params_message = params_subscription.read(); + loop { + let Ok((_, message)) = new_blocks_subscription.read().await else { + return; + }; + + if let Err(err) = Self::handle_new_block(&store, &message) { + error!("Could not insert block: {err}"); + } + + match message.as_ref() { + Message::Cardano((block_info, _)) => { + if block_info.new_epoch { + let Ok((_, message)) = params_message.await else { + return; + }; + let mut history = history.lock().await; + let mut state = history.get_current_state(); + if !Self::handle_new_params(&mut state, message).is_ok() { + return; + }; + history.commit(block_info.number, state); + // Have the next params message ready for the next epoch + params_message = params_subscription.read(); + } + } + _ => (), + } + } + }); + + Ok(()) + } + + fn handle_new_block(store: &Arc, message: &Message) -> Result<()> { + let Message::Cardano((info, CardanoMessage::BlockBody(body))) = message else { + bail!("Unexpected message type: {message:?}"); + }; + + store.insert_block(info, &body.raw) + } + + fn handle_blocks_query( + store: &Arc, + state: &State, + query: &BlocksStateQuery, + ) -> Result { + match query { + BlocksStateQuery::GetLatestBlock => { + let Some(block) = store.get_latest_block()? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let info = Self::to_block_info(block, store, &state, true)?; + Ok(BlocksStateQueryResponse::LatestBlock(info)) + } + BlocksStateQuery::GetLatestBlockTransactions { limit, skip, order } => { + let Some(block) = store.get_latest_block()? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let txs = Self::to_block_transactions(block, limit, skip, order)?; + Ok(BlocksStateQueryResponse::LatestBlockTransactions(txs)) + } + BlocksStateQuery::GetLatestBlockTransactionsCBOR { limit, skip, order } => { + let Some(block) = store.get_latest_block()? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let txs = Self::to_block_transactions_cbor(block, limit, skip, order)?; + Ok(BlocksStateQueryResponse::LatestBlockTransactionsCBOR(txs)) + } + BlocksStateQuery::GetBlockInfo { block_key } => { + let Some(block) = Self::get_block_by_key(store, block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let info = Self::to_block_info(block, store, &state, false)?; + Ok(BlocksStateQueryResponse::BlockInfo(info)) + } + BlocksStateQuery::GetBlockBySlot { slot } => { + let Some(block) = store.get_block_by_slot(*slot)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let info = Self::to_block_info(block, store, &state, false)?; + Ok(BlocksStateQueryResponse::BlockBySlot(info)) + } + BlocksStateQuery::GetBlockByEpochSlot { epoch, slot } => { + let Some(block) = store.get_block_by_epoch_slot(*epoch, *slot)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let info = Self::to_block_info(block, store, &state, false)?; + Ok(BlocksStateQueryResponse::BlockByEpochSlot(info)) + } + BlocksStateQuery::GetNextBlocks { + block_key, + limit, + skip, + } => { + if *limit == 0 { + return Ok(BlocksStateQueryResponse::NextBlocks(NextBlocks { + blocks: vec![], + })); + } + let Some(block) = Self::get_block_by_key(store, &block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let number = match block_key { + BlockKey::Number(number) => *number, + _ => Self::get_block_number(&block)?, + }; + let min_number = number + 1 + skip; + let max_number = min_number + limit - 1; + let blocks = store.get_blocks_by_number_range(min_number, max_number)?; + let info = Self::to_block_info_bulk(blocks, store, &state, false)?; + Ok(BlocksStateQueryResponse::NextBlocks(NextBlocks { + blocks: info, + })) + } + BlocksStateQuery::GetPreviousBlocks { + block_key, + limit, + skip, + } => { + if *limit == 0 { + return Ok(BlocksStateQueryResponse::PreviousBlocks(PreviousBlocks { + blocks: vec![], + })); + } + let Some(block) = Self::get_block_by_key(store, &block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let number = match block_key { + BlockKey::Number(number) => *number, + _ => Self::get_block_number(&block)?, + }; + let Some(max_number) = number.checked_sub(1 + skip) else { + return Ok(BlocksStateQueryResponse::PreviousBlocks(PreviousBlocks { + blocks: vec![], + })); + }; + let min_number = max_number.saturating_sub(limit - 1); + let blocks = store.get_blocks_by_number_range(min_number, max_number)?; + let info = Self::to_block_info_bulk(blocks, store, &state, false)?; + Ok(BlocksStateQueryResponse::PreviousBlocks(PreviousBlocks { + blocks: info, + })) + } + BlocksStateQuery::GetBlockTransactions { + block_key, + limit, + skip, + order, + } => { + let Some(block) = Self::get_block_by_key(store, block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let txs = Self::to_block_transactions(block, limit, skip, order)?; + Ok(BlocksStateQueryResponse::BlockTransactions(txs)) + } + BlocksStateQuery::GetBlockTransactionsCBOR { + block_key, + limit, + skip, + order, + } => { + let Some(block) = Self::get_block_by_key(store, block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let txs = Self::to_block_transactions_cbor(block, limit, skip, order)?; + Ok(BlocksStateQueryResponse::BlockTransactionsCBOR(txs)) + } + BlocksStateQuery::GetBlockInvolvedAddresses { + block_key, + limit, + skip, + } => { + let Some(block) = Self::get_block_by_key(store, block_key)? else { + return Ok(BlocksStateQueryResponse::NotFound); + }; + let addresses = Self::to_block_involved_addresses(block, limit, skip)?; + Ok(BlocksStateQueryResponse::BlockInvolvedAddresses(addresses)) + } + BlocksStateQuery::GetBlockHashes { block_numbers } => { + let mut block_hashes = HashMap::new(); + for block_number in block_numbers { + if let Ok(Some(block)) = store.get_block_by_number(*block_number) { + if let Ok(hash) = Self::get_block_hash(&block) { + block_hashes.insert(*block_number, hash); + } + } + } + Ok(BlocksStateQueryResponse::BlockHashes(BlockHashes { + block_hashes, + })) + } + BlocksStateQuery::GetTransactionHashes { tx_ids } => { + let mut block_ids: HashMap<_, Vec<_>> = HashMap::new(); + for tx_id in tx_ids { + block_ids.entry(tx_id.block_number()).or_default().push(tx_id); + } + let mut tx_hashes = HashMap::new(); + for (block_number, tx_ids) in block_ids { + if let Ok(Some(block)) = store.get_block_by_number(block_number.into()) { + for tx_id in tx_ids { + if let Ok(hashes) = Self::to_block_transaction_hashes(&block) { + if let Some(hash) = hashes.get(tx_id.tx_index() as usize) { + tx_hashes.insert(*tx_id, *hash); + } + } + } + } + } + Ok(BlocksStateQueryResponse::TransactionHashes( + TransactionHashes { tx_hashes }, + )) + } + } + } + + fn get_block_by_key(store: &Arc, block_key: &BlockKey) -> Result> { + match block_key { + BlockKey::Hash(hash) => store.get_block_by_hash(hash.as_ref()), + BlockKey::Number(number) => store.get_block_by_number(*number), + } + } + + fn get_block_number(block: &Block) -> Result { + Ok(pallas_traverse::MultiEraBlock::decode(&block.bytes)?.number()) + } + + fn get_block_hash(block: &Block) -> Result { + Ok(BlockHash( + *pallas_traverse::MultiEraBlock::decode(&block.bytes)?.hash(), + )) + } + + fn to_block_info( + block: Block, + store: &Arc, + state: &State, + is_latest: bool, + ) -> Result { + let blocks = vec![block]; + let mut info = Self::to_block_info_bulk(blocks, store, &state, is_latest)?; + Ok(info.remove(0)) + } + + fn to_block_info_bulk( + blocks: Vec, + store: &Arc, + state: &State, + final_block_is_latest: bool, + ) -> Result> { + if blocks.is_empty() { + return Ok(vec![]); + } + let mut decoded_blocks = vec![]; + for block in &blocks { + decoded_blocks.push(pallas_traverse::MultiEraBlock::decode(&block.bytes)?); + } + + let (latest_number, latest_hash) = if final_block_is_latest { + let latest = decoded_blocks.last().unwrap(); + (latest.number(), latest.hash()) + } else { + let raw_latest = store.get_latest_block()?.unwrap(); + let latest = pallas_traverse::MultiEraBlock::decode(&raw_latest.bytes)?; + (latest.number(), latest.hash()) + }; + + let mut next_hash = if final_block_is_latest { + None + } else { + let next_number = decoded_blocks.last().unwrap().number() + 1; + if next_number > latest_number { + None + } else if next_number == latest_number { + Some(latest_hash) + } else { + let raw_next = store.get_block_by_number(next_number)?; + if let Some(raw_next) = raw_next { + let next = pallas_traverse::MultiEraBlock::decode(&raw_next.bytes)?; + Some(next.hash()) + } else { + None + } + } + }; + + let mut block_info = vec![]; + for (block, decoded) in blocks.iter().zip(decoded_blocks).rev() { + let header = decoded.header(); + let mut output = None; + let mut fees = None; + for tx in decoded.txs() { + if let Some(new_fee) = tx.fee() { + fees = Some(fees.unwrap_or_default() + new_fee); + } + for o in tx.outputs() { + output = Some(output.unwrap_or_default() + o.value().coin()) + } + } + let (op_cert_hot_vkey, op_cert_counter) = match &header { + pallas_traverse::MultiEraHeader::BabbageCompatible(h) => { + let cert = &h.header_body.operational_cert; + ( + Some(&cert.operational_cert_hot_vkey), + Some(cert.operational_cert_sequence_number), + ) + } + pallas_traverse::MultiEraHeader::ShelleyCompatible(h) => ( + Some(&h.header_body.operational_cert_hot_vkey), + Some(h.header_body.operational_cert_sequence_number), + ), + _ => (None, None), + }; + let op_cert = op_cert_hot_vkey.map(|vkey| keyhash_224(vkey)); + + block_info.push(BlockInfo { + timestamp: block.extra.timestamp, + number: header.number(), + hash: BlockHash(*header.hash()), + slot: header.slot(), + epoch: block.extra.epoch, + epoch_slot: block.extra.epoch_slot, + issuer: map_to_block_issuer( + &header, + &state.byron_heavy_delegates, + &state.shelley_genesis_delegates, + ), + size: block.bytes.len() as u64, + tx_count: decoded.tx_count() as u64, + output, + fees, + block_vrf: header.vrf_vkey().map(|key| VRFKey::try_from(key).ok().unwrap()), + op_cert, + op_cert_counter, + previous_block: header.previous_hash().map(|h| BlockHash(*h)), + next_block: next_hash.map(|h| BlockHash(*h)), + confirmations: latest_number - header.number(), + }); + + next_hash = Some(header.hash()); + } + + block_info.reverse(); + Ok(block_info) + } + + fn to_block_transaction_hashes(block: &Block) -> Result> { + let decoded = pallas_traverse::MultiEraBlock::decode(&block.bytes)?; + let txs = decoded.txs(); + Ok(txs.iter().map(|tx| TxHash(*tx.hash())).collect()) + } + + fn to_block_transactions( + block: Block, + limit: &u64, + skip: &u64, + order: &Order, + ) -> Result { + let decoded = pallas_traverse::MultiEraBlock::decode(&block.bytes)?; + let txs = decoded.txs(); + let txs_iter: Box> = match *order { + Order::Asc => Box::new(txs.iter()), + Order::Desc => Box::new(txs.iter().rev()), + }; + let hashes = txs_iter + .skip(*skip as usize) + .take(*limit as usize) + .map(|tx| TxHash(*tx.hash())) + .collect(); + Ok(BlockTransactions { hashes }) + } + + fn to_block_transactions_cbor( + block: Block, + limit: &u64, + skip: &u64, + order: &Order, + ) -> Result { + let decoded = pallas_traverse::MultiEraBlock::decode(&block.bytes)?; + let txs = decoded.txs(); + let txs_iter: Box> = match *order { + Order::Asc => Box::new(txs.iter()), + Order::Desc => Box::new(txs.iter().rev()), + }; + let txs = txs_iter + .skip(*skip as usize) + .take(*limit as usize) + .map(|tx| { + let hash = TxHash(*tx.hash()); + let cbor = tx.encode(); + BlockTransaction { hash, cbor } + }) + .collect(); + Ok(BlockTransactionsCBOR { txs }) + } + + fn to_block_involved_addresses( + block: Block, + limit: &u64, + skip: &u64, + ) -> Result { + let decoded = pallas_traverse::MultiEraBlock::decode(&block.bytes)?; + let mut addresses = BTreeMap::new(); + for tx in decoded.txs() { + let hash = TxHash(*tx.hash()); + for output in tx.outputs() { + match output.address() { + Ok(pallas_address) => match map_parameters::map_address(&pallas_address) { + Ok(address) => { + addresses + .entry(BechOrdAddress(address)) + .or_insert_with(Vec::new) + .push(hash.clone()); + } + _ => (), + }, + _ => (), + } + } + } + let addresses: Vec = addresses + .into_iter() + .skip(*skip as usize) + .take(*limit as usize) + .map(|(address, txs)| BlockInvolvedAddress { + address: address.0, + txs, + }) + .collect(); + Ok(BlockInvolvedAddresses { addresses }) + } + + fn handle_new_params(state: &mut State, message: Arc) -> Result<()> { + match message.as_ref() { + Message::Cardano((_, CardanoMessage::ProtocolParams(params))) => { + if let Some(byron) = ¶ms.params.byron { + state.byron_heavy_delegates = byron.heavy_delegation.clone(); + } + if let Some(shelley) = ¶ms.params.shelley { + state.shelley_genesis_delegates = shelley.gen_delegs.clone(); + } + } + _ => (), + } + Ok(()) + } +} + +#[derive(Default, Debug, Clone)] +pub struct State { + // Keyed on cert + pub byron_heavy_delegates: HashMap, HeavyDelegate>, + pub shelley_genesis_delegates: HashMap, GenesisDelegate>, +} + +impl State { + pub fn new() -> Self { + Self { + byron_heavy_delegates: HashMap::new(), + shelley_genesis_delegates: HashMap::new(), + } + } +} + +#[derive(Eq, PartialEq)] +struct BechOrdAddress(Address); + +impl Ord for BechOrdAddress { + fn cmp(&self, other: &Self) -> Ordering { + self.0.to_string().into_iter().cmp(other.0.to_string()) + } +} + +impl PartialOrd for BechOrdAddress { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} diff --git a/modules/chain_store/src/stores/fjall.rs b/modules/chain_store/src/stores/fjall.rs new file mode 100644 index 00000000..5dc32e37 --- /dev/null +++ b/modules/chain_store/src/stores/fjall.rs @@ -0,0 +1,397 @@ +use std::{fs, path::Path, sync::Arc}; + +use acropolis_common::{BlockInfo, TxHash}; +use anyhow::Result; +use config::Config; +use fjall::{Batch, Keyspace, Partition}; + +use crate::stores::{Block, ExtraBlockData}; + +pub struct FjallStore { + keyspace: Keyspace, + blocks: FjallBlockStore, + txs: FjallTXStore, +} + +const DEFAULT_DATABASE_PATH: &str = "fjall-blocks"; +const DEFAULT_CLEAR_ON_START: bool = true; +const BLOCKS_PARTITION: &str = "blocks"; +const BLOCK_HASHES_BY_SLOT_PARTITION: &str = "block-hashes-by-slot"; +const BLOCK_HASHES_BY_NUMBER_PARTITION: &str = "block-hashes-by-number"; +const BLOCK_HASHES_BY_EPOCH_SLOT_PARTITION: &str = "block-hashes-by-epoch-slot"; +const TXS_PARTITION: &str = "txs"; + +impl FjallStore { + pub fn new(config: Arc) -> Result { + let path = config.get_string("database-path").unwrap_or(DEFAULT_DATABASE_PATH.to_string()); + let clear = config.get_bool("clear-on-start").unwrap_or(DEFAULT_CLEAR_ON_START); + let path = Path::new(&path); + if clear && path.exists() { + fs::remove_dir_all(path)?; + } + let fjall_config = fjall::Config::new(path); + let keyspace = fjall_config.open()?; + let blocks = FjallBlockStore::new(&keyspace)?; + let txs = FjallTXStore::new(&keyspace)?; + Ok(Self { + keyspace, + blocks, + txs, + }) + } +} + +impl super::Store for FjallStore { + fn insert_block(&self, info: &BlockInfo, block: &[u8]) -> Result<()> { + let extra = ExtraBlockData { + epoch: info.epoch, + epoch_slot: info.epoch_slot, + timestamp: info.timestamp, + }; + let tx_hashes = super::extract_tx_hashes(block)?; + let raw = Block { + bytes: block.to_vec(), + extra, + }; + + let mut batch = self.keyspace.batch(); + self.blocks.insert(&mut batch, info, &raw); + for (index, hash) in tx_hashes.iter().enumerate() { + let block_ref = BlockReference { + block_hash: info.hash.to_vec(), + index, + }; + self.txs.insert_tx(&mut batch, *hash, block_ref); + } + + batch.commit()?; + + Ok(()) + } + + fn get_block_by_hash(&self, hash: &[u8]) -> Result> { + self.blocks.get_by_hash(hash) + } + + fn get_block_by_slot(&self, slot: u64) -> Result> { + self.blocks.get_by_slot(slot) + } + + fn get_block_by_number(&self, number: u64) -> Result> { + self.blocks.get_by_number(number) + } + + fn get_blocks_by_number_range(&self, min_number: u64, max_number: u64) -> Result> { + self.blocks.get_by_number_range(min_number, max_number) + } + + fn get_block_by_epoch_slot(&self, epoch: u64, epoch_slot: u64) -> Result> { + self.blocks.get_by_epoch_slot(epoch, epoch_slot) + } + + fn get_latest_block(&self) -> Result> { + self.blocks.get_latest() + } +} + +struct FjallBlockStore { + blocks: Partition, + block_hashes_by_slot: Partition, + block_hashes_by_number: Partition, + block_hashes_by_epoch_slot: Partition, +} + +impl FjallBlockStore { + fn new(keyspace: &Keyspace) -> Result { + let blocks = + keyspace.open_partition(BLOCKS_PARTITION, fjall::PartitionCreateOptions::default())?; + let block_hashes_by_slot = keyspace.open_partition( + BLOCK_HASHES_BY_SLOT_PARTITION, + fjall::PartitionCreateOptions::default(), + )?; + let block_hashes_by_number = keyspace.open_partition( + BLOCK_HASHES_BY_NUMBER_PARTITION, + fjall::PartitionCreateOptions::default(), + )?; + let block_hashes_by_epoch_slot = keyspace.open_partition( + BLOCK_HASHES_BY_EPOCH_SLOT_PARTITION, + fjall::PartitionCreateOptions::default(), + )?; + Ok(Self { + blocks, + block_hashes_by_slot, + block_hashes_by_number, + block_hashes_by_epoch_slot, + }) + } + + fn insert(&self, batch: &mut Batch, info: &BlockInfo, raw: &Block) { + let encoded = { + let mut bytes = vec![]; + minicbor::encode(raw, &mut bytes).expect("infallible"); + bytes + }; + batch.insert(&self.blocks, &*info.hash, encoded); + batch.insert( + &self.block_hashes_by_slot, + info.slot.to_be_bytes(), + &*info.hash, + ); + batch.insert( + &self.block_hashes_by_number, + info.number.to_be_bytes(), + &*info.hash, + ); + batch.insert( + &self.block_hashes_by_epoch_slot, + epoch_slot_key(info.epoch, info.epoch_slot), + &*info.hash, + ); + } + + fn get_by_hash(&self, hash: &[u8]) -> Result> { + let Some(block) = self.blocks.get(hash)? else { + return Ok(None); + }; + Ok(minicbor::decode(&block)?) + } + + fn get_by_slot(&self, slot: u64) -> Result> { + let Some(hash) = self.block_hashes_by_slot.get(slot.to_be_bytes())? else { + return Ok(None); + }; + self.get_by_hash(&hash) + } + + fn get_by_number(&self, number: u64) -> Result> { + let Some(hash) = self.block_hashes_by_number.get(number.to_be_bytes())? else { + return Ok(None); + }; + self.get_by_hash(&hash) + } + + fn get_by_number_range(&self, min_number: u64, max_number: u64) -> Result> { + let min_number_bytes = min_number.to_be_bytes(); + let max_number_bytes = max_number.to_be_bytes(); + let mut blocks = vec![]; + for res in self.block_hashes_by_number.range(min_number_bytes..=max_number_bytes) { + let (_, hash) = res?; + if let Some(block) = self.get_by_hash(&hash)? { + blocks.push(block); + } + } + Ok(blocks) + } + + fn get_by_epoch_slot(&self, epoch: u64, epoch_slot: u64) -> Result> { + let Some(hash) = self.block_hashes_by_epoch_slot.get(epoch_slot_key(epoch, epoch_slot))? + else { + return Ok(None); + }; + self.get_by_hash(&hash) + } + + fn get_latest(&self) -> Result> { + let Some((_, hash)) = self.block_hashes_by_slot.last_key_value()? else { + return Ok(None); + }; + self.get_by_hash(&hash) + } +} + +fn epoch_slot_key(epoch: u64, epoch_slot: u64) -> [u8; 16] { + let mut key = [0; 16]; + key[..8].copy_from_slice(epoch.to_be_bytes().as_slice()); + key[8..].copy_from_slice(epoch_slot.to_be_bytes().as_slice()); + key +} + +struct FjallTXStore { + txs: Partition, +} +impl FjallTXStore { + fn new(keyspace: &Keyspace) -> Result { + let txs = + keyspace.open_partition(TXS_PARTITION, fjall::PartitionCreateOptions::default())?; + Ok(Self { txs }) + } + + fn insert_tx(&self, batch: &mut Batch, hash: TxHash, block_ref: BlockReference) { + let bytes = minicbor::to_vec(block_ref).expect("infallible"); + batch.insert(&self.txs, hash.as_ref(), bytes); + } +} + +#[derive(minicbor::Decode, minicbor::Encode)] +struct BlockReference { + #[n(0)] + block_hash: Vec, + #[n(1)] + index: usize, +} + +#[cfg(test)] +mod tests { + use crate::stores::Store; + + use super::*; + use acropolis_common::BlockHash; + use pallas_traverse::{wellknown::GenesisValues, MultiEraBlock}; + use tempfile::TempDir; + + const TEST_BLOCK: &str = "820785828a1a0010afaa1a0150d7925820a22f65265e7a71cfc3b637d6aefe8f8241d562f5b1b787ff36697ae4c3886f185820e856c84a3d90c8526891bd58d957afadc522de37b14ae04c395db8a7a1b08c4a582015587d5633be324f8de97168399ab59d7113f0a74bc7412b81f7cc1007491671825840af9ff8cb146880eba1b12beb72d86be46fbc98f6b88110cd009bd6746d255a14bb0637e3a29b7204bff28236c1b9f73e501fed1eb5634bd741be120332d25e5e5850a9f1de24d01ba43b025a3351b25de50cc77f931ed8cdd0be632ad1a437ec9cf327b24eb976f91dbf68526f15bacdf8f0c1ea4a2072df9412796b34836a816760f4909b98c0e76b160d9aec6b2da060071903705820b5858c659096fcc19f2f3baef5fdd6198641a623bd43e792157b5ea3a2ecc85c8458200ca1ec2c1c2af308bd9e7a86eb12d603a26157752f3f71c337781c456e6ed0c90018a558408e554b644a2b25cb5892d07a26c273893829f1650ec33bf6809d953451c519c32cfd48d044cd897a17cdef154d5f5c9b618d9b54f8c49e170082c08c236524098209005901c05a96b747789ef6678b2f4a2a7caca92e270f736e9b621686f95dd1332005102faee21ed50cf6fa6c67e38b33df686c79c91d55f30769f7c964d98aa84cbefe0a808ee6f45faaf9badcc3f746e6a51df1aa979195871fd5ffd91037ea216803be7e7fccbf4c13038c459c7a14906ab57f3306fe155af7877c88866eede7935f642f6a72f1368c33ed5cc7607c995754af787a5af486958edb531c0ae65ce9fdce423ad88925e13ef78700950093ae707bb1100299a66a5bb15137f7ba62132ba1c9b74495aac50e1106bacb5db2bed4592f66b610c2547f485d061c6c149322b0c92bdde644eb672267fdab5533157ff398b9e16dd6a06edfd67151e18a3ac93fc28a51f9a73f8b867f5f432b1d9b5ae454ef63dea7e1a78631cf3fee1ba82db61726701ac5db1c4fee4bb6316768c82c0cdc4ebd58ccc686be882f9608592b3c718e4b5d356982a6b83433fe76d37394eff9f3a8e4773e3bab9a8b93b4ea90fa33bfbcf0dc5a21bfe64be2eefaa82c0494ab729e50596110f60ae9ad64b3eb9ddb54001b03cc264b65634c071d3b24a44322f39a9eae239fd886db8d429969433cb2d0a82d7877f174b0e154262f1af44ce5bc053b62daadd2926f957440ff3981a600d9010281825820af09d312a642fecb47da719156517bec678469c15789bcf002ce2ef563edf54200018182581d6052e63f22c5107ed776b70f7b92248b02552fd08f3e747bc745099441821b00000001373049f4a1581c34250edd1e9836f5378702fbf9416b709bc140e04f668cc355208518a1494154414441636f696e1953a6021a000306b5031a01525e0209a1581c34250edd1e9836f5378702fbf9416b709bc140e04f668cc355208518a1494154414441636f696e010758206cf243cc513691d9edc092b1030c6d1e5f9a8621a4d4383032b3d292d4679d5c81a200d90102828258201287e9ce9e00a603d250b557146aa0581fc4edf277a244ce39d3b2f2ced5072f5840d40fbe736892d8dab09e864a25f2e59fb7bfe445d960bbace30996965dc12a34c59746febf9d32ade65b6a9e1a1a6efc53830a3acaab699972cd4f240c024c0f825820742d8af3543349b5b18f3cba28f23b2d6e465b9c136c42e1fae6b2390f565427584005637b5645784bd998bb8ed837021d520200211fdd958b9a4d4b3af128fa6e695fb86abad7a9ddad6f1db946f8b812113fa16cfb7025e2397277b14e8c9bed0a01d90102818200581c45d70e54f3b5e9c5a2b0cd417028197bd6f5fa5378c2f5eba896678da100d90103a100a11902a2a1636d73678f78264175746f2d4c6f6f702d5472616e73616374696f6e202336323733363820627920415441444160783c4c6976652045706f6368203235352c207765206861766520303131682035396d20323573206c65667420756e74696c20746865206e657874206f6e6578344974277320536f6e6e746167202d20323520466562727561722032303234202d2031333a33303a333520696e20417573747269616060607820412072616e646f6d205a656e2d51756f746520666f7220796f753a20f09f998f78344974206973206e6576657220746f6f206c61746520746f206265207768617420796f75206d696768742068617665206265656e2e6f202d2047656f72676520456c696f746078374e6f64652d5265766973696f6e3a203462623230343864623737643632336565366533363738363138633264386236633436373633333360782953616e63686f4e657420697320617765736f6d652c206861766520736f6d652066756e2120f09f988d7819204265737420726567617264732c204d617274696e203a2d2980"; + + // Mainnet blocks 1-9 + const TEST_BLOCKS: [&str; 9] = [ + "820183851a2d964a09582089d9b5a5b8ddc8d7e5a6795e9774d97faf1efea59b2caf7eaf9f8c5b32059df484830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e40988482000058401bc97a2fe02c297880ce8ecfd997fe4c1ec09ee10feeee9f686760166b05281d6283468ffd93becb0c956ccddd642df9b1244c915911185fa49355f6f22bfab98101820282840058401bc97a2fe02c297880ce8ecfd997fe4c1ec09ee10feeee9f686760166b05281d6283468ffd93becb0c956ccddd642df9b1244c915911185fa49355f6f22bfab9584061261a95b7613ee6bf2067dad77b70349729b0c50d57bc1cf30de0db4a1e73a885d0054af7c23fc6c37919dba41c602a57e2d0f9329a7954b867338d6fb2c9455840e03e62f083df5576360e60a32e22bbb07b3c8df4fcab8079f1d6f61af3954d242ba8a06516c395939f24096f3df14e103a7d9c2b80a68a9363cf1f27c7a4e307584044f18ef23db7d2813415cb1b62e8f3ead497f238edf46bb7a97fd8e9105ed9775e8421d18d47e05a2f602b700d932c181e8007bbfb231d6f1a050da4ebeeba048483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a095820f0f7892b5c333cffc4b3c4344de48af4cc63f55e44936196f365a9ef2244134f84830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e409884820001584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf6575281028202828400584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf657525840e8c03a03c0b2ddbea4195caf39f41e669f7d251ecf221fbb2f275c0a5d7e05d190dcc246f56c8e33ac0037066e2f664ddaa985ea5284082643308dde4f5bfedf5840c8b39f094dc00608acb2d20ff274cb3e0c022ccb0ce558ea7c1a2d3a32cd54b42cc30d32406bcfbb7f2f86d05d2032848be15b178e3ad776f8b1bc56a671400d5840923c7714af7fe4b1272fc042111ece6fd08f5f16298d62bae755c70c1e1605697cbaed500e196330f40813128250d9ede9c8557b33f48e8a5f32f765929e4a0d8483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a0958201dbc81e3196ba4ab9dcb07e1c37bb28ae1c289c0707061f28b567c2f48698d5084830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e409884820002584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf6575281038202828400584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf657525840e8c03a03c0b2ddbea4195caf39f41e669f7d251ecf221fbb2f275c0a5d7e05d190dcc246f56c8e33ac0037066e2f664ddaa985ea5284082643308dde4f5bfedf5840c8b39f094dc00608acb2d20ff274cb3e0c022ccb0ce558ea7c1a2d3a32cd54b42cc30d32406bcfbb7f2f86d05d2032848be15b178e3ad776f8b1bc56a671400d584094966ae05c576724fd892aa91959fc191833fade8e118c36a12eb453003b634ccc9bb7808bcf950c5da9145cffad9e26061bfe9853817706008f75a464c814038483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a09582052b7912de176ab76c233d6e08ccdece53ac1863c08cc59d3c5dec8d924d9b53684830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e409884820003584026566e86fc6b9b177c8480e275b2b112b573f6d073f9deea53b8d99c4ed976b335b2b3842f0e380001f090bc923caa9691ed9115e286da9421e2745c7acc87f181048202828400584026566e86fc6b9b177c8480e275b2b112b573f6d073f9deea53b8d99c4ed976b335b2b3842f0e380001f090bc923caa9691ed9115e286da9421e2745c7acc87f15840f14f712dc600d793052d4842d50cefa4e65884ea6cf83707079eb8ce302efc85dae922d5eb3838d2b91784f04824d26767bfb65bd36a36e74fec46d09d98858d58408ab43e904b06e799c1817c5ced4f3a7bbe15cdbf422dea9d2d5dc2c6105ce2f4d4c71e5d4779f6c44b770a133636109949e1f7786acb5a732bcdea0470fea4065840273c97ffc6e16c86772bdb9cb52bfe99585917f901ee90ce337a9654198fb09ca6bc51d74a492261c169ca5a196a04938c740ba6629254fe566a590370cc9b0f8483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a095820be06c81f4ad34d98578b67840d8e65b2aeb148469b290f6b5235e41b75d3857284830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e4098848200045840d2965c869901231798c5d02d39fca2a79aa47c3e854921b5855c82fd1470891517e1fa771655ec8cad13ecf6e5719adc5392fc057e1703d5f583311e837462f1810582028284005840d2965c869901231798c5d02d39fca2a79aa47c3e854921b5855c82fd1470891517e1fa771655ec8cad13ecf6e5719adc5392fc057e1703d5f583311e837462f158409180d818e69cd997e34663c418a648c076f2e19cd4194e486e159d8580bc6cda81344440c6ad0e5306fd035bef9281da5d8fbd38f59f588f7081016ee61113d25840cf6ddc111545f61c2442b68bd7864ea952c428d145438948ef48a4af7e3f49b175564007685be5ae3c9ece0ab27de09721db0cb63aa67dc081a9f82d7e84210d58409f9649c57d902a9fe94208b40eb31ffb4d703e5692c16bcd3a4370b448b4597edaa66f3e4f3bd5858d8e6a57cc0734ec04174d13cbc62eabe64af49271245f068483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a09582046debe49b4fe0bc8c07cfe650de89632ca1ab5d58f04f8c88d8102da7ef79b7f84830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e4098848200055840993a8f056d2d3e50b0ac60139f10df8f8123d5f7c4817b40dac2b5dd8aa94a82e8536832e6312ddfc0787d7b5310c815655ada4fdbcf6b12297d4458eccc2dfb810682028284005840993a8f056d2d3e50b0ac60139f10df8f8123d5f7c4817b40dac2b5dd8aa94a82e8536832e6312ddfc0787d7b5310c815655ada4fdbcf6b12297d4458eccc2dfb584089c29f8c4af27b7accbe589747820134ebbaa1caf3ce949270a3d0c7dcfd541b1def326d2ef0db780341c9e261f04890cdeef1f9c99f6d90b8edca7d3cfc09885840496b29b5c57e8ac7cffc6e8b5e40b3d260e407ad4d09792decb0a22d54da7f8828265688a18aa1a5c76d9e7477a5f4a650501409fdcd3855b300fd2e2bc3c6055840b3bea437aa37a2abdc1a35d9ff01cddb387c543d8034c565dc18525ccd16a0f761d3556d8b90add263db77ee6200aebd6ec2fcc2ec20153f9227b07053a7a50a8483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a095820365201e928da50760fce4bdad09a7338ba43a43aff1c0e8d3ec458388c932ec884830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e409884820006584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf6575281078202828400584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf657525840e8c03a03c0b2ddbea4195caf39f41e669f7d251ecf221fbb2f275c0a5d7e05d190dcc246f56c8e33ac0037066e2f664ddaa985ea5284082643308dde4f5bfedf5840c8b39f094dc00608acb2d20ff274cb3e0c022ccb0ce558ea7c1a2d3a32cd54b42cc30d32406bcfbb7f2f86d05d2032848be15b178e3ad776f8b1bc56a671400d584077ddc2fe0557a5c0454a7af6f29e39e603907b927aeeab23e18abe0022cf219197a9a359ab07986a6b42a6e970139edd4a36555661274ae3ac27d4e7c509790e8483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a095820e39d988dd815fc2cb234c2abef0d7f57765eeffb67331814bdb01c590359325e84830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e409884820007584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf6575281088202828400584050733161fdafb6c8cb6fae0e25bdf9555105b3678efb08f1775b9e90de4f5c77bcc8cefff8d9011cb278b28fddc86d9bab099656d77a7856c7619108cbf657525840e8c03a03c0b2ddbea4195caf39f41e669f7d251ecf221fbb2f275c0a5d7e05d190dcc246f56c8e33ac0037066e2f664ddaa985ea5284082643308dde4f5bfedf5840c8b39f094dc00608acb2d20ff274cb3e0c022ccb0ce558ea7c1a2d3a32cd54b42cc30d32406bcfbb7f2f86d05d2032848be15b178e3ad776f8b1bc56a671400d58405b2f5d0f55ec53bf74a09e2154f7ad56f437a1a9198041e3ec96f5f17a0cfa8c7d71a7871efabd990184b5166b2ac83af0b63bb727fd7157541db7a232ffdc048483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + "820183851a2d964a0958202d9136c363c69ad07e1a918de2ff5aeeba4361e33b9c2597511874f211ca26e984830058200e5751c026e543b2e8ab2eb06099daa1d1e5df47778f7787faab45cdf12fe3a85820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b8300582025777aca9e4a73d48fc73b4f961d345b06d4a6f349cb7916570d35537d53479f5820d36a2619a672494604e11bb447cbcf5231e9f2ba25c2169177edc941bd50ad6c5820afc0da64183bf2664f3d4eec7238d524ba607faeeab24fc100eb861dba69971b58204e66280cd94d591072349bec0a3090a53aa945562efb6d08d56e53654b0e4098848200085840d2965c869901231798c5d02d39fca2a79aa47c3e854921b5855c82fd1470891517e1fa771655ec8cad13ecf6e5719adc5392fc057e1703d5f583311e837462f1810982028284005840d2965c869901231798c5d02d39fca2a79aa47c3e854921b5855c82fd1470891517e1fa771655ec8cad13ecf6e5719adc5392fc057e1703d5f583311e837462f158409180d818e69cd997e34663c418a648c076f2e19cd4194e486e159d8580bc6cda81344440c6ad0e5306fd035bef9281da5d8fbd38f59f588f7081016ee61113d25840cf6ddc111545f61c2442b68bd7864ea952c428d145438948ef48a4af7e3f49b175564007685be5ae3c9ece0ab27de09721db0cb63aa67dc081a9f82d7e84210d58407b26babee8ad96bf5cdd20cac799ca56c90b6ff9df1f1140f50f021063f719e3791f22be92353a8ae16045b0d52a51c8b1219ce782fd4198cf15b745348021018483000000826a63617264616e6f2d736c00a058204ba92aa320c60acc9ad7b9a64f2eda55c4d2ec28e604faf186708b4f0c4e8edf849fff8300d9010280d90102809fff82809fff81a0", + ]; + + fn test_block_info(bytes: &[u8]) -> BlockInfo { + let block = MultiEraBlock::decode(bytes).unwrap(); + let genesis = GenesisValues::mainnet(); + let (epoch, epoch_slot) = block.epoch(&genesis); + let timestamp = block.wallclock(&genesis); + BlockInfo { + status: acropolis_common::BlockStatus::Immutable, + slot: block.slot(), + number: block.number(), + hash: BlockHash(*block.hash()), + epoch, + epoch_slot, + new_epoch: false, + timestamp, + era: acropolis_common::Era::Conway, + } + } + + fn test_block_bytes() -> Vec { + hex::decode(TEST_BLOCK).unwrap() + } + + fn test_block_range_bytes(count: usize) -> Vec> { + TEST_BLOCKS[0..count].iter().map(|b| hex::decode(b).unwrap()).collect() + } + + fn build_block(info: &BlockInfo, bytes: &[u8]) -> Block { + let extra = ExtraBlockData { + epoch: info.epoch, + epoch_slot: info.epoch_slot, + timestamp: info.timestamp, + }; + Block { + bytes: bytes.to_vec(), + extra, + } + } + + struct TestState { + #[expect(unused)] + dir: TempDir, + store: FjallStore, + } + + fn init_state() -> TestState { + let dir = tempfile::tempdir().unwrap(); + let dir_name = dir.path().to_str().expect("dir_name cannot be stored as string"); + let config = + Config::builder().set_default("database-path", dir_name).unwrap().build().unwrap(); + let store = FjallStore::new(Arc::new(config)).unwrap(); + TestState { dir, store } + } + + #[test] + fn should_get_block_by_hash() { + let state = init_state(); + let bytes = test_block_bytes(); + let info = test_block_info(&bytes); + let block = build_block(&info, &bytes); + + state.store.insert_block(&info, &bytes).unwrap(); + + let new_block = state.store.get_block_by_hash(&info.hash.as_ref()).unwrap(); + assert_eq!(block, new_block.unwrap()); + } + + #[test] + fn should_not_error_when_block_not_found() { + let state = init_state(); + let new_block = state.store.get_block_by_hash(&[0xfa, 0x15, 0xe]).unwrap(); + assert_eq!(new_block, None); + } + + #[test] + fn should_get_block_by_slot() { + let state = init_state(); + let bytes = test_block_bytes(); + let info = test_block_info(&bytes); + let block = build_block(&info, &bytes); + + state.store.insert_block(&info, &bytes).unwrap(); + + let new_block = state.store.get_block_by_slot(info.slot).unwrap(); + assert_eq!(block, new_block.unwrap()); + } + + #[test] + fn should_get_block_by_number() { + let state = init_state(); + let bytes = test_block_bytes(); + let info = test_block_info(&bytes); + let block = build_block(&info, &bytes); + + state.store.insert_block(&info, &bytes).unwrap(); + + let new_block = state.store.get_block_by_number(info.number).unwrap(); + assert_eq!(block, new_block.unwrap()); + } + + #[test] + fn should_get_blocks_by_number_range() { + let state = init_state(); + let blocks_bytes = test_block_range_bytes(6); + let mut blocks = Vec::new(); + for bytes in blocks_bytes { + let info = test_block_info(&bytes); + blocks.push(build_block(&info, &bytes)); + state.store.insert_block(&info, &bytes).unwrap(); + } + let new_blocks = state.store.get_blocks_by_number_range(2, 4).unwrap(); + assert_eq!(blocks[1], new_blocks[0]); + assert_eq!(blocks[2], new_blocks[1]); + assert_eq!(blocks[3], new_blocks[2]); + } + + #[test] + fn should_get_block_by_epoch_slot() { + let state = init_state(); + let bytes = test_block_bytes(); + let info = test_block_info(&bytes); + let block = build_block(&info, &bytes); + + state.store.insert_block(&info, &bytes).unwrap(); + + let new_block = state.store.get_block_by_epoch_slot(info.epoch, info.epoch_slot).unwrap(); + assert_eq!(block, new_block.unwrap()); + } + + #[test] + fn should_get_latest_block() { + let state = init_state(); + let bytes = test_block_bytes(); + let info = test_block_info(&bytes); + let block = build_block(&info, &bytes); + + state.store.insert_block(&info, &bytes).unwrap(); + + let new_block = state.store.get_latest_block().unwrap(); + assert_eq!(block, new_block.unwrap()); + } +} diff --git a/modules/chain_store/src/stores/mod.rs b/modules/chain_store/src/stores/mod.rs new file mode 100644 index 00000000..866c6199 --- /dev/null +++ b/modules/chain_store/src/stores/mod.rs @@ -0,0 +1,38 @@ +use acropolis_common::{BlockInfo, TxHash}; +use anyhow::{Context, Result}; + +pub mod fjall; + +pub trait Store: Send + Sync { + fn insert_block(&self, info: &BlockInfo, block: &[u8]) -> Result<()>; + + fn get_block_by_hash(&self, hash: &[u8]) -> Result>; + fn get_block_by_slot(&self, slot: u64) -> Result>; + fn get_block_by_number(&self, number: u64) -> Result>; + fn get_blocks_by_number_range(&self, min_number: u64, max_number: u64) -> Result>; + fn get_block_by_epoch_slot(&self, epoch: u64, epoch_slot: u64) -> Result>; + fn get_latest_block(&self) -> Result>; +} + +#[derive(Debug, PartialEq, Eq, minicbor::Decode, minicbor::Encode)] +pub struct Block { + #[n(0)] + pub bytes: Vec, + #[n(1)] + pub extra: ExtraBlockData, +} + +#[derive(Debug, PartialEq, Eq, minicbor::Decode, minicbor::Encode)] +pub struct ExtraBlockData { + #[n(0)] + pub epoch: u64, + #[n(1)] + pub epoch_slot: u64, + #[n(2)] + pub timestamp: u64, +} + +pub(crate) fn extract_tx_hashes(block: &[u8]) -> Result> { + let block = pallas_traverse::MultiEraBlock::decode(block).context("could not decode block")?; + Ok(block.txs().into_iter().map(|tx| TxHash(*tx.hash())).collect()) +} diff --git a/modules/epochs_state/src/state.rs b/modules/epochs_state/src/state.rs index 0a46c07a..417d8804 100644 --- a/modules/epochs_state/src/state.rs +++ b/modules/epochs_state/src/state.rs @@ -134,7 +134,7 @@ impl State { let evolving = Nonces::evolve(¤t_nonces.evolving, &nonce_vrf_output)?; // there must be parent hash - let Some(parent_hash) = header.previous_hash().map(|h| *h as BlockHash) else { + let Some(parent_hash) = header.previous_hash().map(|h| BlockHash(*h)) else { return Err(anyhow::anyhow!("Header Parent hash error")); }; diff --git a/modules/genesis_bootstrapper/src/genesis_bootstrapper.rs b/modules/genesis_bootstrapper/src/genesis_bootstrapper.rs index b256954c..06b7fe88 100644 --- a/modules/genesis_bootstrapper/src/genesis_bootstrapper.rs +++ b/modules/genesis_bootstrapper/src/genesis_bootstrapper.rs @@ -8,7 +8,7 @@ use acropolis_common::{ UTXODeltasMessage, }, Address, BlockHash, BlockInfo, BlockStatus, ByronAddress, Era, Lovelace, LovelaceDelta, Pot, - PotDelta, TxIdentifier, TxOutRef, TxOutput, UTXODelta, UTxOIdentifier, Value, + PotDelta, TxHash, TxIdentifier, TxOutRef, TxOutput, UTXODelta, UTxOIdentifier, Value, }; use anyhow::Result; use blake2::{digest::consts::U32, Blake2b, Digest}; @@ -141,7 +141,7 @@ impl GenesisBootstrapper { let mut total_allocated: u64 = 0; for (tx_index, (hash, address, amount)) in gen_utxos.iter().enumerate() { let tx_identifier = TxIdentifier::new(0, tx_index as u16); - let tx_ref = TxOutRef::new(**hash, 0); + let tx_ref = TxOutRef::new(TxHash(**hash), 0); gen_utxo_identifiers.push((tx_ref, tx_identifier)); diff --git a/modules/mithril_snapshot_fetcher/src/mithril_snapshot_fetcher.rs b/modules/mithril_snapshot_fetcher/src/mithril_snapshot_fetcher.rs index 20804f2f..0b9fabad 100644 --- a/modules/mithril_snapshot_fetcher/src/mithril_snapshot_fetcher.rs +++ b/modules/mithril_snapshot_fetcher/src/mithril_snapshot_fetcher.rs @@ -4,7 +4,7 @@ use acropolis_common::{ genesis_values::GenesisValues, messages::{BlockBodyMessage, BlockHeaderMessage, CardanoMessage, Message}, - BlockInfo, BlockStatus, Era, + BlockHash, BlockInfo, BlockStatus, Era, }; use anyhow::{anyhow, bail, Result}; use caryatid_sdk::{module, Context, Module}; @@ -324,7 +324,7 @@ impl MithrilSnapshotFetcher { status: BlockStatus::Immutable, slot, number, - hash: *block.hash(), + hash: BlockHash(*block.hash()), epoch, epoch_slot, new_epoch, diff --git a/modules/parameters_state/Cargo.toml b/modules/parameters_state/Cargo.toml index 3b36030e..f020a06b 100644 --- a/modules/parameters_state/Cargo.toml +++ b/modules/parameters_state/Cargo.toml @@ -26,6 +26,7 @@ serde_json = { workspace = true } serde_with = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } +base64 = "0.22.1" [build-dependencies] reqwest = { version = "0.11", features = ["blocking"] } diff --git a/modules/parameters_state/src/genesis_params.rs b/modules/parameters_state/src/genesis_params.rs index 67251a96..d8327c8f 100644 --- a/modules/parameters_state/src/genesis_params.rs +++ b/modules/parameters_state/src/genesis_params.rs @@ -3,9 +3,10 @@ use acropolis_common::{ protocol_params::{AlonzoParams, BabbageParams, ByronParams, ConwayParams, ShelleyParams}, rational_number::{rational_number_from_f32, RationalNumber}, Anchor, BlockVersionData, Committee, Constitution, CostModel, Credential, DRepVotingThresholds, - Era, PoolVotingThresholds, ProtocolConsts, SoftForkRule, TxFeePolicy, + Era, HeavyDelegate, PoolVotingThresholds, ProtocolConsts, SoftForkRule, TxFeePolicy, }; use anyhow::{anyhow, bail, Result}; +use base64::prelude::*; use hex::decode; use pallas::ledger::configs::*; use serde::Deserialize; @@ -178,11 +179,25 @@ fn map_protocol_consts(c: &byron::ProtocolConsts) -> Result { } fn map_byron(genesis: &byron::GenesisFile) -> Result { + let heavy_delegation = genesis + .heavy_delegation + .iter() + .map(|(k, v)| { + let k = hex::decode(k)?; + let v = HeavyDelegate { + cert: hex::decode(v.cert.clone())?, + delegate_pk: BASE64_STANDARD.decode(v.delegate_pk.clone())?, + issuer_pk: BASE64_STANDARD.decode(v.issuer_pk.clone())?, + }; + Ok::<(Vec, HeavyDelegate), anyhow::Error>((k, v)) + }) + .collect::>()?; Ok(ByronParams { block_version_data: map_block_version_data(&genesis.block_version_data)?, fts_seed: genesis.fts_seed.as_ref().map(|s| decode_hex_string(s, 42)).transpose()?, protocol_consts: map_protocol_consts(&genesis.protocol_consts)?, start_time: genesis.start_time, + heavy_delegation, }) } diff --git a/modules/rest_blockfrost/src/handlers/blocks.rs b/modules/rest_blockfrost/src/handlers/blocks.rs new file mode 100644 index 00000000..205fdab5 --- /dev/null +++ b/modules/rest_blockfrost/src/handlers/blocks.rs @@ -0,0 +1,578 @@ +//! REST handlers for Acropolis Blockfrost /blocks endpoints +use acropolis_common::{ + extract_strict_query_params, + messages::{Message, RESTResponse, StateQuery, StateQueryResponse}, + queries::{ + blocks::{BlockKey, BlocksStateQuery, BlocksStateQueryResponse}, + misc::Order, + utils::rest_query_state, + }, + BlockHash, +}; +use anyhow::{anyhow, Result}; +use caryatid_sdk::Context; +use std::collections::HashMap; +use std::sync::Arc; + +use crate::handlers_config::HandlersConfig; +use crate::types::BlockInfoREST; + +fn parse_block_key(key: &str) -> Result { + match key.len() { + 64 => match hex::decode(key) { + Ok(key) => match BlockHash::try_from(key) { + Ok(block_hash) => Ok(BlockKey::Hash(block_hash)), + Err(_) => Err(anyhow::Error::msg("Invalid block hash")), + }, + Err(error) => Err(error.into()), + }, + _ => match key.parse::() { + Ok(key) => Ok(BlockKey::Number(key)), + Err(error) => Err(error.into()), + }, + } +} + +/// Handle `/blocks/latest`, `/blocks/{hash_or_number}` +pub async fn handle_blocks_latest_hash_number_blockfrost( + context: Arc>, + params: Vec, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + match param.as_str() { + "latest" => handle_blocks_latest_blockfrost(context, handlers_config).await, + _ => handle_blocks_hash_number_blockfrost(context, param, handlers_config).await, + } +} + +/// Handle `/blocks/latest` +async fn handle_blocks_latest_blockfrost( + context: Arc>, + handlers_config: Arc, +) -> Result { + let blocks_latest_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetLatestBlock, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + blocks_latest_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::LatestBlock(blocks_latest), + )) => Some(Ok(Some(BlockInfoREST(blocks_latest)))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}` +async fn handle_blocks_hash_number_blockfrost( + context: Arc>, + hash_or_number: &str, + handlers_config: Arc, +) -> Result { + let block_key = match parse_block_key(hash_or_number) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + let block_info_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockInfo { block_key }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_info_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockInfo(block_info), + )) => Some(Ok(Some(BlockInfoREST(block_info)))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/latest/txs`, `/blocks/{hash_or_number}/txs` +pub async fn handle_blocks_latest_hash_number_transactions_blockfrost( + context: Arc>, + params: Vec, + query_params: HashMap, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + extract_strict_query_params!(query_params, { + "count" => limit: Option, + "page" => page: Option, + "order" => order: Option, + }); + let limit = limit.unwrap_or(100); + let skip = (page.unwrap_or(1) - 1) * limit; + let order = order.unwrap_or(Order::Asc); + + match param.as_str() { + "latest" => { + handle_blocks_latest_transactions_blockfrost( + context, + limit, + skip, + order, + handlers_config, + ) + .await + } + _ => { + handle_blocks_hash_number_transactions_blockfrost( + context, + param, + limit, + skip, + order, + handlers_config, + ) + .await + } + } +} + +/// Handle `/blocks/latest/txs` +async fn handle_blocks_latest_transactions_blockfrost( + context: Arc>, + limit: u64, + skip: u64, + order: Order, + handlers_config: Arc, +) -> Result { + let blocks_latest_txs_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetLatestBlockTransactions { limit, skip, order }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + blocks_latest_txs_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::LatestBlockTransactions(blocks_txs), + )) => Some(Ok(Some(blocks_txs))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}/txs` +async fn handle_blocks_hash_number_transactions_blockfrost( + context: Arc>, + hash_or_number: &str, + limit: u64, + skip: u64, + order: Order, + handlers_config: Arc, +) -> Result { + let block_key = match parse_block_key(hash_or_number) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + let block_txs_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockTransactions { + block_key, + limit, + skip, + order, + }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_txs_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockTransactions(block_txs), + )) => Some(Ok(Some(block_txs))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/latest/txs/cbor`, `/blocks/{hash_or_number}/txs/cbor` +pub async fn handle_blocks_latest_hash_number_transactions_cbor_blockfrost( + context: Arc>, + params: Vec, + query_params: HashMap, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + extract_strict_query_params!(query_params, { + "count" => limit: Option, + "page" => page: Option, + "order" => order: Option, + }); + let limit = limit.unwrap_or(100); + let skip = (page.unwrap_or(1) - 1) * limit; + let order = order.unwrap_or(Order::Asc); + + match param.as_str() { + "latest" => { + handle_blocks_latest_transactions_cbor_blockfrost( + context, + limit, + skip, + order, + handlers_config, + ) + .await + } + _ => { + handle_blocks_hash_number_transactions_cbor_blockfrost( + context, + param, + limit, + skip, + order, + handlers_config, + ) + .await + } + } +} + +/// Handle `/blocks/latest/txs/cbor` +async fn handle_blocks_latest_transactions_cbor_blockfrost( + context: Arc>, + limit: u64, + skip: u64, + order: Order, + handlers_config: Arc, +) -> Result { + let blocks_latest_txs_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetLatestBlockTransactionsCBOR { limit, skip, order }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + blocks_latest_txs_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::LatestBlockTransactionsCBOR(blocks_txs), + )) => Some(Ok(Some(blocks_txs))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}/txs/cbor` +async fn handle_blocks_hash_number_transactions_cbor_blockfrost( + context: Arc>, + hash_or_number: &str, + limit: u64, + skip: u64, + order: Order, + handlers_config: Arc, +) -> Result { + let block_key = match parse_block_key(hash_or_number) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + let block_txs_cbor_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockTransactionsCBOR { + block_key, + limit, + skip, + order, + }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_txs_cbor_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockTransactionsCBOR(block_txs_cbor), + )) => Some(Ok(Some(block_txs_cbor))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}/next` +pub async fn handle_blocks_hash_number_next_blockfrost( + context: Arc>, + params: Vec, + query_params: HashMap, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + let block_key = match parse_block_key(param) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + extract_strict_query_params!(query_params, { + "count" => limit: Option, + "page" => page: Option, + }); + let limit = limit.unwrap_or(100); + let skip = (page.unwrap_or(1) - 1) * limit; + + let blocks_next_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetNextBlocks { + block_key, + limit, + skip, + }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + blocks_next_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NextBlocks(blocks_next), + )) => Some(Ok(Some(blocks_next))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}/previous` +pub async fn handle_blocks_hash_number_previous_blockfrost( + context: Arc>, + params: Vec, + query_params: HashMap, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + let block_key = match parse_block_key(param) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + extract_strict_query_params!(query_params, { + "count" => limit: Option, + "page" => page: Option, + }); + let limit = limit.unwrap_or(100); + let skip = (page.unwrap_or(1) - 1) * limit; + + let blocks_previous_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetPreviousBlocks { + block_key, + limit, + skip, + }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + blocks_previous_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::PreviousBlocks(blocks_previous), + )) => Some(Ok(Some(blocks_previous))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/slot/{slot_number}` +pub async fn handle_blocks_slot_blockfrost( + context: Arc>, + params: Vec, + handlers_config: Arc, +) -> Result { + let slot = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + let slot = match slot.parse::() { + Ok(slot) => slot, + Err(error) => return Err(error.into()), + }; + + let block_slot_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockBySlot { slot }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_slot_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockBySlot(block_info), + )) => Some(Ok(Some(block_info))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/epoch/{epoch_number}/slot/{slot_number}` +pub async fn handle_blocks_epoch_slot_blockfrost( + context: Arc>, + params: Vec, + handlers_config: Arc, +) -> Result { + let (epoch, slot) = match params.as_slice() { + [param1, param2] => (param1, param2), + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + let epoch = match epoch.parse::() { + Ok(epoch) => epoch, + Err(error) => return Err(error.into()), + }; + + let slot = match slot.parse::() { + Ok(slot) => slot, + Err(error) => return Err(error.into()), + }; + + let block_epoch_slot_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockByEpochSlot { epoch, slot }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_epoch_slot_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockByEpochSlot(block_info), + )) => Some(Ok(Some(block_info))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} + +/// Handle `/blocks/{hash_or_number}/addresses` +pub async fn handle_blocks_hash_number_addresses_blockfrost( + context: Arc>, + params: Vec, + query_params: HashMap, + handlers_config: Arc, +) -> Result { + let param = match params.as_slice() { + [param] => param, + _ => return Ok(RESTResponse::with_text(400, "Invalid parameters")), + }; + + let block_key = match parse_block_key(param) { + Ok(block_key) => block_key, + Err(error) => return Err(error), + }; + + extract_strict_query_params!(query_params, { + "count" => limit: Option, + "page" => page: Option, + }); + let limit = limit.unwrap_or(100); + let skip = (page.unwrap_or(1) - 1) * limit; + + let block_involved_addresses_msg = Arc::new(Message::StateQuery(StateQuery::Blocks( + BlocksStateQuery::GetBlockInvolvedAddresses { + block_key, + limit, + skip, + }, + ))); + rest_query_state( + &context, + &handlers_config.blocks_query_topic, + block_involved_addresses_msg, + |message| match message { + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::BlockInvolvedAddresses(block_addresses), + )) => Some(Ok(Some(block_addresses))), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::NotFound, + )) => Some(Ok(None)), + Message::StateQueryResponse(StateQueryResponse::Blocks( + BlocksStateQueryResponse::Error(e), + )) => Some(Err(anyhow!(e))), + _ => None, + }, + ) + .await +} diff --git a/modules/rest_blockfrost/src/handlers/mod.rs b/modules/rest_blockfrost/src/handlers/mod.rs index 008683c4..0ef2a94c 100644 --- a/modules/rest_blockfrost/src/handlers/mod.rs +++ b/modules/rest_blockfrost/src/handlers/mod.rs @@ -1,5 +1,6 @@ pub mod accounts; pub mod assets; +pub mod blocks; pub mod epochs; pub mod governance; pub mod pools; diff --git a/modules/rest_blockfrost/src/handlers_config.rs b/modules/rest_blockfrost/src/handlers_config.rs index 53ea6b79..5eb0d57f 100644 --- a/modules/rest_blockfrost/src/handlers_config.rs +++ b/modules/rest_blockfrost/src/handlers_config.rs @@ -3,6 +3,7 @@ use std::sync::Arc; use acropolis_common::queries::{ accounts::DEFAULT_ACCOUNTS_QUERY_TOPIC, assets::{DEFAULT_ASSETS_QUERY_TOPIC, DEFAULT_OFFCHAIN_TOKEN_REGISTRY_URL}, + blocks::DEFAULT_BLOCKS_QUERY_TOPIC, epochs::DEFAULT_EPOCHS_QUERY_TOPIC, governance::{DEFAULT_DREPS_QUERY_TOPIC, DEFAULT_GOVERNANCE_QUERY_TOPIC}, parameters::DEFAULT_PARAMETERS_QUERY_TOPIC, @@ -17,6 +18,7 @@ const DEFAULT_EXTERNAL_API_TIMEOUT: (&str, i64) = ("external_api_timeout", 3); / pub struct HandlersConfig { pub accounts_query_topic: String, pub assets_query_topic: String, + pub blocks_query_topic: String, pub pools_query_topic: String, pub dreps_query_topic: String, pub governance_query_topic: String, @@ -37,6 +39,10 @@ impl From> for HandlersConfig { .get_string(DEFAULT_ASSETS_QUERY_TOPIC.0) .unwrap_or(DEFAULT_ASSETS_QUERY_TOPIC.1.to_string()); + let blocks_query_topic = config + .get_string(DEFAULT_BLOCKS_QUERY_TOPIC.0) + .unwrap_or(DEFAULT_BLOCKS_QUERY_TOPIC.1.to_string()); + let pools_query_topic = config .get_string(DEFAULT_POOLS_QUERY_TOPIC.0) .unwrap_or(DEFAULT_POOLS_QUERY_TOPIC.1.to_string()); @@ -72,6 +78,7 @@ impl From> for HandlersConfig { Self { accounts_query_topic, assets_query_topic, + blocks_query_topic, pools_query_topic, dreps_query_topic, governance_query_topic, diff --git a/modules/rest_blockfrost/src/rest_blockfrost.rs b/modules/rest_blockfrost/src/rest_blockfrost.rs index 745bcaa5..f90d955a 100644 --- a/modules/rest_blockfrost/src/rest_blockfrost.rs +++ b/modules/rest_blockfrost/src/rest_blockfrost.rs @@ -1,10 +1,10 @@ //! Acropolis Blockfrost-Compatible REST Module -use std::{future::Future, sync::Arc}; +use std::{collections::HashMap, future::Future, sync::Arc}; use acropolis_common::{ messages::{Message, RESTResponse}, - rest_helper::handle_rest_with_path_parameter, + rest_helper::{handle_rest_with_path_and_query_parameters, handle_rest_with_path_parameter}, }; use anyhow::Result; use caryatid_sdk::{module, Context, Module}; @@ -21,6 +21,14 @@ use handlers::{ handle_asset_addresses_blockfrost, handle_asset_history_blockfrost, handle_asset_transactions_blockfrost, handle_assets_list_blockfrost, }, + blocks::{ + handle_blocks_epoch_slot_blockfrost, handle_blocks_hash_number_addresses_blockfrost, + handle_blocks_hash_number_next_blockfrost, handle_blocks_hash_number_previous_blockfrost, + handle_blocks_latest_hash_number_blockfrost, + handle_blocks_latest_hash_number_transactions_blockfrost, + handle_blocks_latest_hash_number_transactions_cbor_blockfrost, + handle_blocks_slot_blockfrost, + }, epochs::{ handle_epoch_info_blockfrost, handle_epoch_next_blockfrost, handle_epoch_params_blockfrost, handle_epoch_pool_blocks_blockfrost, handle_epoch_pool_stakes_blockfrost, @@ -53,6 +61,32 @@ use crate::{ const DEFAULT_HANDLE_SINGLE_ACCOUNT_TOPIC: (&str, &str) = ("handle-topic-account-single", "rest.get.accounts.*"); +// Blocks topics +const DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TOPIC: (&str, &str) = + ("handle-blocks-latest-hash-number", "rest.get.blocks.*"); +const DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TRANSACTIONS_TOPIC: (&str, &str) = ( + "handle-blocks-latest-hash-number-transactions", + "rest.get.blocks.*.txs", +); +const DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TRANSACTIONS_CBOR_TOPIC: (&str, &str) = ( + "handle-blocks-latest-hash-number-transactions-cbor", + "rest.get.blocks.*.txs.cbor", +); +const DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_NEXT_TOPIC: (&str, &str) = + ("handle-blocks-hash-number-next", "rest.get.blocks.*.next"); +const DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_PREVIOUS_TOPIC: (&str, &str) = ( + "handle-blocks-hash-number-previous", + "rest.get.blocks.*.previous", +); +const DEFAULT_HANDLE_BLOCKS_SLOT_TOPIC: (&str, &str) = + ("handle-blocks-slot", "rest.get.blocks.slot.*"); +const DEFAULT_HANDLE_BLOCKS_EPOCH_SLOT_TOPIC: (&str, &str) = + ("handle-blocks-epoch-slot", "rest.get.blocks.epoch.*.slot.*"); +const DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_ADDRESSES_TOPIC: (&str, &str) = ( + "handle-blocks-hash-number-addresses", + "rest.get.blocks.*.addresses", +); + // Governance topics const DEFAULT_HANDLE_DREPS_LIST_TOPIC: (&str, &str) = ("handle-topic-dreps-list", "rest.get.governance.dreps"); @@ -193,6 +227,70 @@ impl BlockfrostREST { handle_single_account_blockfrost, ); + // Handler for /blocks/latest, /blocks/{hash_or_number} + register_handler( + context.clone(), + DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TOPIC, + handlers_config.clone(), + handle_blocks_latest_hash_number_blockfrost, + ); + + // Handler for /blocks/latest/txs, /blocks/{hash_or_number}/txs + register_handler_with_query( + context.clone(), + DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TRANSACTIONS_TOPIC, + handlers_config.clone(), + handle_blocks_latest_hash_number_transactions_blockfrost, + ); + + // Handler for /blocks/latest/txs/cbor, /blocks/{hash_or_number}/txs/cbor + register_handler_with_query( + context.clone(), + DEFAULT_HANDLE_BLOCKS_LATEST_HASH_NUMBER_TRANSACTIONS_CBOR_TOPIC, + handlers_config.clone(), + handle_blocks_latest_hash_number_transactions_cbor_blockfrost, + ); + + // Handler for /blocks/{hash_or_number}/next + register_handler_with_query( + context.clone(), + DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_NEXT_TOPIC, + handlers_config.clone(), + handle_blocks_hash_number_next_blockfrost, + ); + + // Handler for /blocks/{hash_or_number}/previous + register_handler_with_query( + context.clone(), + DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_PREVIOUS_TOPIC, + handlers_config.clone(), + handle_blocks_hash_number_previous_blockfrost, + ); + + // Handler for /blocks/slot/{slot_number} + register_handler( + context.clone(), + DEFAULT_HANDLE_BLOCKS_SLOT_TOPIC, + handlers_config.clone(), + handle_blocks_slot_blockfrost, + ); + + // Handler for /blocks/epoch/{epoch_number}/slot/{slot_number} + register_handler( + context.clone(), + DEFAULT_HANDLE_BLOCKS_EPOCH_SLOT_TOPIC, + handlers_config.clone(), + handle_blocks_epoch_slot_blockfrost, + ); + + // Handler for /blocks/{hash_or_number}/addresses + register_handler_with_query( + context.clone(), + DEFAULT_HANDLE_BLOCKS_HASH_NUMBER_ADDRESSES_TOPIC, + handlers_config.clone(), + handle_blocks_hash_number_addresses_blockfrost, + ); + // Handler for /governance/dreps register_handler( context.clone(), @@ -503,3 +601,34 @@ fn register_handler( async move { handler_fn(context, params, handlers_config).await } }); } + +fn register_handler_with_query( + context: Arc>, + topic: (&str, &str), + handlers_config: Arc, + handler_fn: F, +) where + F: Fn(Arc>, Vec, HashMap, Arc) -> Fut + + Send + + Sync + + Clone + + 'static, + Fut: Future> + Send + 'static, +{ + let topic_name = context.config.get_string(topic.0).unwrap_or_else(|_| topic.1.to_string()); + + tracing::info!("Creating request handler on '{}'", topic_name); + + handle_rest_with_path_and_query_parameters( + context.clone(), + &topic_name, + move |params, query_params| { + let context = context.clone(); + let handler_fn = handler_fn.clone(); + let params: Vec = params.iter().map(|s| s.to_string()).collect(); + let handlers_config = handlers_config.clone(); + + async move { handler_fn(context, params, query_params, handlers_config).await } + }, + ); +} diff --git a/modules/rest_blockfrost/src/types.rs b/modules/rest_blockfrost/src/types.rs index 6730eaf7..3f505c18 100644 --- a/modules/rest_blockfrost/src/types.rs +++ b/modules/rest_blockfrost/src/types.rs @@ -2,6 +2,7 @@ use crate::cost_models::{PLUTUS_V1, PLUTUS_V2, PLUTUS_V3}; use acropolis_common::{ messages::EpochActivityMessage, protocol_params::{Nonce, NonceVariant, ProtocolParams}, + queries::blocks::BlockInfo, queries::governance::DRepActionUpdate, rest_helper::ToCheckedF64, AssetAddressEntry, AssetMetadataStandard, AssetMintRecord, KeyHash, PolicyAsset, @@ -50,6 +51,10 @@ impl From for EpochActivityRest { } } +// REST response structure for /blocks/latest +#[derive(Serialize)] +pub struct BlockInfoREST(pub BlockInfo); + // REST response structure for /governance/dreps #[derive(Serialize)] pub struct DRepsListREST { diff --git a/modules/tx_unpacker/Cargo.toml b/modules/tx_unpacker/Cargo.toml index f4faf903..8619cb78 100644 --- a/modules/tx_unpacker/Cargo.toml +++ b/modules/tx_unpacker/Cargo.toml @@ -10,6 +10,7 @@ license = "Apache-2.0" [dependencies] acropolis_common = { path = "../../common" } +acropolis_codec = { path = "../../codec" } caryatid_sdk = { workspace = true } diff --git a/modules/tx_unpacker/src/tx_unpacker.rs b/modules/tx_unpacker/src/tx_unpacker.rs index 03f28759..d091fe5b 100644 --- a/modules/tx_unpacker/src/tx_unpacker.rs +++ b/modules/tx_unpacker/src/tx_unpacker.rs @@ -1,6 +1,7 @@ //! Acropolis transaction unpacker module for Caryatid //! Unpacks transaction bodies into UTXO events +use acropolis_codec::*; use acropolis_common::{ messages::{ AssetDeltasMessage, BlockTxsMessage, CardanoMessage, GovernanceProceduresMessage, Message, @@ -19,7 +20,6 @@ use pallas::ledger::primitives::KeyValuePairs; use pallas::ledger::{primitives, traverse, traverse::MultiEraTx}; use tracing::{debug, error, info, info_span, Instrument}; -mod map_parameters; mod utxo_registry; use crate::utxo_registry::UTxORegistry; @@ -180,7 +180,7 @@ impl TxUnpacker { for input in inputs { // MultiEraInput // Lookup and remove UTxOIdentifier from registry let oref = input.output_ref(); - let tx_ref = TxOutRef::new(**oref.hash(), oref.index() as u16); + let tx_ref = TxOutRef::new(TxHash(**oref.hash()), oref.index() as u16); match utxo_registry.consume(&tx_ref) { Ok(tx_identifier) => { @@ -267,7 +267,7 @@ impl TxUnpacker { if publish_certificates_topic.is_some() { let tx_hash = tx.hash(); for ( cert_index, cert) in certs.iter().enumerate() { - match map_parameters::map_certificate(&cert, *tx_hash, tx_index, cert_index) { + match map_parameters::map_certificate(&cert, TxHash(*tx_hash), tx_index, cert_index) { Ok(tx_cert) => { certificates.push(tx_cert); }, @@ -340,7 +340,7 @@ impl TxUnpacker { if publish_governance_procedures_topic.is_some() { if let Some(pp) = props { // Nonempty set -- governance_message.proposal_procedures will not be empty - let mut proc_id = GovActionId { transaction_id: *tx.hash(), action_index: 0 }; + let mut proc_id = GovActionId { transaction_id: TxHash(*tx.hash()), action_index: 0 }; for (action_index, pallas_governance_proposals) in pp.iter().enumerate() { match proc_id.set_action_index(action_index) .and_then (|proc_id| map_parameters::map_governance_proposals_procedures(&proc_id, &pallas_governance_proposals)) @@ -354,7 +354,7 @@ impl TxUnpacker { if let Some(pallas_vp) = votes { // Nonempty set -- governance_message.voting_procedures will not be empty match map_parameters::map_all_governance_voting_procedures(pallas_vp) { - Ok(vp) => voting_procedures.push((*tx.hash(), vp)), + Ok(vp) => voting_procedures.push((TxHash(*tx.hash()), vp)), Err(e) => error!("Cannot decode governance voting procedures in slot {}: {e}", block.slot) } } diff --git a/modules/tx_unpacker/src/utxo_registry.rs b/modules/tx_unpacker/src/utxo_registry.rs index be186e4a..d10de175 100644 --- a/modules/tx_unpacker/src/utxo_registry.rs +++ b/modules/tx_unpacker/src/utxo_registry.rs @@ -174,7 +174,7 @@ mod tests { use anyhow::Result; fn make_hash(byte: u8) -> TxHash { - [byte; 32] + TxHash([byte; 32]) } impl UTxORegistry { /// Lookup unspent tx output diff --git a/modules/upstream_chain_fetcher/src/body_fetcher.rs b/modules/upstream_chain_fetcher/src/body_fetcher.rs index afa2a5ee..555df36f 100644 --- a/modules/upstream_chain_fetcher/src/body_fetcher.rs +++ b/modules/upstream_chain_fetcher/src/body_fetcher.rs @@ -3,7 +3,7 @@ use acropolis_common::{ messages::{BlockBodyMessage, BlockHeaderMessage}, - BlockInfo, BlockStatus, Era, + BlockHash, BlockInfo, BlockStatus, Era, }; use anyhow::{bail, Result}; use crossbeam::channel::{Receiver, TryRecvError}; @@ -120,7 +120,7 @@ impl BodyFetcher { }, // TODO vary with 'k' slot, number, - hash, + hash: BlockHash(hash), epoch, epoch_slot, new_epoch, diff --git a/processes/omnibus/Cargo.toml b/processes/omnibus/Cargo.toml index 52a81cf6..86d38012 100644 --- a/processes/omnibus/Cargo.toml +++ b/processes/omnibus/Cargo.toml @@ -26,6 +26,7 @@ acropolis_module_rest_blockfrost = { path = "../../modules/rest_blockfrost" } acropolis_module_spdd_state = { path = "../../modules/spdd_state" } acropolis_module_drdd_state = { path = "../../modules/drdd_state" } acropolis_module_assets_state = { path = "../../modules/assets_state" } +acropolis_module_chain_store = { path = "../../modules/chain_store" } caryatid_process = { workspace = true } caryatid_sdk = { workspace = true } diff --git a/processes/omnibus/omnibus.toml b/processes/omnibus/omnibus.toml index 4fa7998e..93097705 100644 --- a/processes/omnibus/omnibus.toml +++ b/processes/omnibus/omnibus.toml @@ -102,6 +102,10 @@ store-addresses = false # Enables /assets/{asset} endpoint (requires store-assets to be enabled) index-by-policy = false +[module.chain-store] +# Clear state on start up (default true) +clear-on-start = true + [module.clock] [module.rest-server] diff --git a/processes/omnibus/src/main.rs b/processes/omnibus/src/main.rs index ecbebffe..4904427d 100644 --- a/processes/omnibus/src/main.rs +++ b/processes/omnibus/src/main.rs @@ -12,6 +12,7 @@ use tracing_subscriber; use acropolis_module_accounts_state::AccountsState; use acropolis_module_assets_state::AssetsState; use acropolis_module_block_unpacker::BlockUnpacker; +use acropolis_module_chain_store::ChainStore; use acropolis_module_drdd_state::DRDDState; use acropolis_module_drep_state::DRepState; use acropolis_module_epochs_state::EpochsState; @@ -103,6 +104,7 @@ pub async fn main() -> Result<()> { BlockfrostREST::register(&mut process); SPDDState::register(&mut process); DRDDState::register(&mut process); + ChainStore::register(&mut process); Clock::::register(&mut process); RESTServer::::register(&mut process);