Skip to content

Commit

Permalink
feat(): add store histogram metrics to storage and sync (#1200)
Browse files Browse the repository at this point in the history
  • Loading branch information
DvirYo-starkware authored Sep 21, 2023
1 parent ee2e54b commit cd525a5
Show file tree
Hide file tree
Showing 12 changed files with 101 additions and 3 deletions.
7 changes: 7 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ jsonschema = "0.17.0"
lazy_static = "1.4.0"
libmdbx = "0.3.5"
metrics = "0.21.0"
metrics-exporter-prometheus = "0.12.1"
mockall = "0.11.2"
mockito = "0.31.0"
num-bigint = "0.4"
Expand Down
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ Assuming the node monitoring gateway is exposed at `localhost:8081` one might se
* `metrics`
  get metrics of the node activity. See details below.

<!--- TODO(dvir): add store metrics (storage and sync) --->
### Metrics
Papyrus collects three kinds of metrics: rpc metrics, sync metrics, and process metrics.
By default, the node will not collect metrics and the metric path will return an error code `405 - Method Not Allowed`.
Expand Down
6 changes: 6 additions & 0 deletions crates/papyrus_proc_macros/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,11 @@ license-file.workspace = true
syn = "2.0.15"
quote = "1.0.26"

[dev-dependencies]
metrics.workspace = true
metrics-exporter-prometheus.workspace = true
prometheus-parse.workspace = true
test_utils = { path = "../test_utils" }

[lib]
proc-macro = true
48 changes: 45 additions & 3 deletions crates/papyrus_proc_macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use proc_macro::TokenStream;
use quote::ToTokens;
use syn::{parse_macro_input, ItemTrait, LitStr, Meta, TraitItem};
use quote::{quote, ToTokens};
use syn::{parse_macro_input, ExprLit, ItemFn, ItemTrait, LitStr, Meta, TraitItem};

/// This macro is a wrapper around the "rpc" macro supplied by the jsonrpsee library that generates
/// a server and client traits from a given trait definition. The wrapper gets a version id and
Expand All @@ -9,21 +9,25 @@ use syn::{parse_macro_input, ItemTrait, LitStr, Meta, TraitItem};
/// able to merge multiple versions of jsonrpc APIs into one server and not have a clash in method
/// resolution.
///
/// Example:
/// # Example:
///
/// Given this code:
/// ```rust,ignore
/// #[versioned_rpc("V0_3_0")]
/// pub trait JsonRpc {
/// #[method(name = "blockNumber")]
/// fn block_number(&self) -> Result<BlockNumber, Error>;
/// }
/// ```
///
/// The macro will generate this code:
/// ```rust,ignore
/// #[rpc(server, client, namespace = "starknet")]
/// pub trait JsonRpcV0_3_0 {
/// #[method(name = "V0_3_0_blockNumber")]
/// fn block_number(&self) -> Result<BlockNumber, Error>;
/// }
/// ```
#[proc_macro_attribute]
pub fn versioned_rpc(attr: TokenStream, input: TokenStream) -> TokenStream {
let version = parse_macro_input!(attr as syn::LitStr);
Expand Down Expand Up @@ -92,3 +96,41 @@ pub fn versioned_rpc(attr: TokenStream, input: TokenStream) -> TokenStream {

versioned_trait.to_token_stream().into()
}

/// This macro will emit a histogram metric with the given name and the latency of the function.
///
/// # Example
/// Given this code:
///
/// ```rust,ignore
/// #[latency_histogram("metric_name")]
/// fn foo() {
/// // Some code ...
/// }
/// ```
/// Every call to foo will update the histogram metric with the name “metric_name” with the time it
/// took to execute foo.
#[proc_macro_attribute]
pub fn latency_histogram(attr: TokenStream, input: TokenStream) -> TokenStream {
let mut input_fn = parse_macro_input!(input as ItemFn);
let metric_name = parse_macro_input!(attr as ExprLit);
let origin_block = &mut input_fn.block;

// Create a new block with the metric update.
let expanded_block = quote! {
{
let start_function_time=std::time::Instant::now();
let return_value=#origin_block;
metrics::histogram!(#metric_name, start_function_time.elapsed().as_secs_f64());
return_value
}
};

// Create a new function with the modified block.
let modified_function = ItemFn {
block: syn::parse2(expanded_block).expect("Parse tokens in latency_histogram attribute."),
..input_fn
};

modified_function.to_token_stream().into()
}
28 changes: 28 additions & 0 deletions crates/papyrus_proc_macros/tests/latency_histogram.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
use metrics_exporter_prometheus::PrometheusBuilder;
use papyrus_proc_macros::latency_histogram;
use prometheus_parse::Value::Untyped;
use test_utils::prometheus_is_contained;

#[test]
fn latency_histogram_test() {
#[latency_histogram("foo_histogram")]
fn foo() -> usize {
#[allow(clippy::let_and_return)]
let start_function_time = 1000;
start_function_time
}

let handle = PrometheusBuilder::new().install_recorder().unwrap();

assert!(handle.render().is_empty());
assert_eq!(foo(), 1000);
assert_eq!(
prometheus_is_contained(handle.render(), "foo_histogram_count", &[]),
Some(Untyped(1f64))
);
// Test that the "start_function_time" variable from the macro is not shadowed.
assert_ne!(
prometheus_is_contained(handle.render(), "foo_histogram_sum", &[]),
Some(Untyped(1000f64))
);
}
2 changes: 2 additions & 0 deletions crates/papyrus_storage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,11 @@ flate2.workspace = true
indexmap = { workspace = true, features = ["serde"] }
integer-encoding.workspace = true
libmdbx = { workspace = true, features = ["lifetimed-bytes"] }
metrics.workspace = true
num-bigint.workspace = true
primitive-types.workspace = true
papyrus_config = { path = "../papyrus_config", version = "0.0.5" }
papyrus_proc_macros = { path = "../papyrus_proc_macros"}
parity-scale-codec.workspace = true
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = ["arbitrary_precision"] }
Expand Down
2 changes: 2 additions & 0 deletions crates/papyrus_storage/src/body/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
mod body_test;
pub mod events;

use papyrus_proc_macros::latency_histogram;
use serde::{Deserialize, Serialize};
use starknet_api::block::{BlockBody, BlockNumber};
use starknet_api::core::ContractAddress;
Expand Down Expand Up @@ -268,6 +269,7 @@ impl<'env, Mode: TransactionKind> StorageTxn<'env, Mode> {
}

impl<'env> BodyStorageWriter for StorageTxn<'env, RW> {
#[latency_histogram("storage_append_body_latency_seconds")]
fn append_body(self, block_number: BlockNumber, block_body: BlockBody) -> StorageResult<Self> {
let markers_table = self.txn.open_table(&self.tables.markers)?;
let transactions_table = self.txn.open_table(&self.tables.transactions)?;
Expand Down
2 changes: 2 additions & 0 deletions crates/papyrus_storage/src/compiled_class.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
mod casm_test;

use cairo_lang_starknet::casm_contract_class::CasmContractClass;
use papyrus_proc_macros::latency_histogram;
use starknet_api::block::BlockNumber;
use starknet_api::core::ClassHash;
use starknet_api::state::ThinStateDiff;
Expand Down Expand Up @@ -77,6 +78,7 @@ impl<'env, Mode: TransactionKind> CasmStorageReader for StorageTxn<'env, Mode> {
}

impl<'env> CasmStorageWriter for StorageTxn<'env, RW> {
#[latency_histogram("storage_append_casm_latency_seconds")]
fn append_casm(self, class_hash: &ClassHash, casm: &CasmContractClass) -> StorageResult<Self> {
let casm_table = self.txn.open_table(&self.tables.casms)?;
let markers_table = self.txn.open_table(&self.tables.markers)?;
Expand Down
2 changes: 2 additions & 0 deletions crates/papyrus_storage/src/state/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ use std::collections::HashSet;

use cairo_lang_starknet::casm_contract_class::CasmContractClass;
use indexmap::IndexMap;
use papyrus_proc_macros::latency_histogram;
use starknet_api::block::BlockNumber;
use starknet_api::core::{ClassHash, ContractAddress, Nonce};
use starknet_api::deprecated_contract_class::ContractClass as DeprecatedContractClass;
Expand Down Expand Up @@ -377,6 +378,7 @@ impl<'env, Mode: TransactionKind> StateReader<'env, Mode> {
}

impl<'env> StateStorageWriter for StorageTxn<'env, RW> {
#[latency_histogram("storage_append_state_diff_latency_seconds")]
fn append_state_diff(
self,
block_number: BlockNumber,
Expand Down
1 change: 1 addition & 0 deletions crates/papyrus_sync/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ papyrus_storage = { path = "../papyrus_storage", version = "0.0.5" }
papyrus_base_layer = { path = "../papyrus_base_layer" }
papyrus_common = { path = "../papyrus_common" }
papyrus_config = { path = "../papyrus_config", version = "0.0.5" }
papyrus_proc_macros = { path = "../papyrus_proc_macros"}
reqwest = { workspace = true, features = ["json", "blocking"] }
serde = { workspace = true, features = ["derive"] }
starknet_api.workspace = true
Expand Down
4 changes: 4 additions & 0 deletions crates/papyrus_sync/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use papyrus_common::{metrics as papyrus_metrics, BlockHashAndNumber};
use papyrus_config::converters::deserialize_seconds_to_duration;
use papyrus_config::dumping::{ser_param, SerializeConfig};
use papyrus_config::{ParamPath, ParamPrivacyInput, SerializedParam};
use papyrus_proc_macros::latency_histogram;
use papyrus_storage::base_layer::BaseLayerStorageWriter;
use papyrus_storage::body::BodyStorageWriter;
use papyrus_storage::compiled_class::{CasmStorageReader, CasmStorageWriter};
Expand Down Expand Up @@ -341,6 +342,7 @@ impl<
}
}

#[latency_histogram("sync_store_block_latency_seconds")]
#[instrument(skip(self, block), level = "debug", fields(block_hash = %block.header.block_hash), err)]
fn store_block(
&mut self,
Expand Down Expand Up @@ -375,6 +377,7 @@ impl<
Ok(())
}

#[latency_histogram("sync_store_state_diff_latency_seconds")]
#[instrument(skip(self, state_diff, deployed_contract_class_definitions), level = "debug", err)]
fn store_state_diff(
&mut self,
Expand Down Expand Up @@ -405,6 +408,7 @@ impl<
Ok(())
}

#[latency_histogram("sync_store_compiled_class_latency_seconds")]
#[instrument(skip(self, compiled_class), level = "debug", err)]
fn store_compiled_class(
&mut self,
Expand Down

0 comments on commit cd525a5

Please sign in to comment.