Skip to content

Commit

Permalink
Add index logic to record new accounts
Browse files Browse the repository at this point in the history
  • Loading branch information
DOBEN committed Jul 18, 2024
1 parent 8877fef commit 43a97ad
Show file tree
Hide file tree
Showing 6 changed files with 84 additions and 243 deletions.
55 changes: 0 additions & 55 deletions compliant-reward-distribution/README.md

This file was deleted.

70 changes: 0 additions & 70 deletions compliant-reward-distribution/docker-compose.yml

This file was deleted.

2 changes: 0 additions & 2 deletions compliant-reward-distribution/indexer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,6 @@ tonic = { version = "0.10", features = ["tls-roots", "tls"] }
thiserror = "1.0"
chrono = "0.4"
concordium-rust-sdk = { version = "4.2"}
# It is essential that the bump_alloc feature is **not** enabled since it will
# globally set an inefficient allocator.
tokio = { version = "1.35", features = ["rt-multi-thread", "macros", "sync", "signal"] }
tokio-postgres = { version = "0.7", features = [
"with-serde_json-1",
Expand Down
27 changes: 26 additions & 1 deletion compliant-reward-distribution/indexer/resources/schema.sql
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,32 @@ CREATE TABLE IF NOT EXISTS settings (
latest_processed_block_height INT8
);

-- Table containing item_status_changed_events successfully submitted to the database from the contract monitored.
-- Table containing new accounts created on the blockchain which are eligible
-- for an reward after completing some tasks/checks.
CREATE TABLE IF NOT EXISTS accounts (
-- Primary key.
id INT8 PRIMARY KEY,
-- The account address created on chain.
account_address BYTEA NOT NULL,
-- The timestamp of the block when the account was created on chain.
block_time TIMESTAMP WITH TIME ZONE NOT NULL,
-- The transaction hash of the transaction that created the account on chain.
transaction_hash BYTEA NOT NULL,
-- A boolean specifying if the account has already claimed.
claimed BOOL NOT NULL,
-- A link to a twitter post submitted by the above account address (task 1).
twitter_post_link BYTEA,
-- A boolean specifying if the identity associated with the account is eligible for the reward (task 2).
-- An associated ZK proof was verfied by this backend before this flag is set.
zk_proof_valid BOOL,
-- A version that specifies the setting of the ZK proof during the verification. This enables us
-- to update the ZK proof verification logic in the future and invalidate older proofs.
zk_proof_version INT8,
-- A hash of the revealed `firstName|lastName|passportNumber` to prevent
-- claiming with different accounts for the same identity.
uniqueness_hash BYTEA
);

CREATE TABLE IF NOT EXISTS item_status_changed_events (
-- Primary key.
id INT8 PRIMARY KEY,
Expand Down
135 changes: 41 additions & 94 deletions compliant-reward-distribution/indexer/src/bin/indexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,19 +2,18 @@
//! postgres database. The database is configured with the tables from the file
//! `../resources/schema.sql`. A table
//! `settings` exists to store global configurations.
use ::indexer::db::{DatabaseError, DatabasePool};
use ::indexer::db::DatabasePool;
use anyhow::Context;
use clap::Parser;
use concordium_rust_sdk::{
indexer::{self, ContractUpdateInfo, ProcessorConfig, TransactionIndexer},
smart_contracts::common::to_bytes,
indexer::{self, ProcessorConfig, TransactionIndexer},
types::{
queries::BlockInfo, smart_contracts::OwnedReceiveName, AbsoluteBlockHeight,
BlockItemSummary, ContractAddress,
queries::BlockInfo, AbsoluteBlockHeight, BlockItemSummary,
BlockItemSummaryDetails::AccountCreation,
},
v2::{self as sdk, Client},
};
use tokio_postgres::types::{Json, ToSql};
use tokio_postgres::types::ToSql;

/// Command line configuration of the application.
#[derive(Debug, clap::Parser)]
Expand Down Expand Up @@ -91,94 +90,42 @@ impl indexer::ProcessEvent for StoreEvents {
.await
.context("Failed to execute latest_processed_block_height transaction")?;

// for single_contract_update_info in contract_update_info {
// for (_contract_invoked, _entry_point_name, events) in
// single_contract_update_info.0.execution_tree.events()
// {
// for (event_index, event) in events.iter().enumerate() {
// // let parsed_event: contract::Event<AdditionalData> =
// event.parse()?;

// // if let contract::Event::<AdditionalData>::ItemStatusChanged(
// // item_status_change_event,
// // ) = parsed_event
// // {
// // let params: [&(dyn ToSql + Sync); 6] = [
// // &(block_info.block_slot_time),
// //
// &single_contract_update_info.0.transaction_hash.as_ref(),
// // &(event_index as i64), //
// &(item_status_change_event.item_id.0 as i64), //
// &Json(&item_status_change_event.new_status), //
// &item_status_change_event.additional_data.bytes, //
// ];

// // let statement = db_transaction
// // .prepare_cached(
// // "INSERT INTO item_status_changed_events (id,
// block_time, \ // transaction_hash,
// event_index, item_id, new_status, \ //
// additional_data) SELECT COALESCE(MAX(id) + 1, 0), $1, $2, $3, \
// // $4, $5, $6 FROM item_status_changed_events;",
// // )
// // .await
// // .context("Failed to prepare item_status_change_event
// transaction")?;

// // db_transaction
// // .execute(&statement, &params)
// // .await
// // .context("Failed to execute item_status_change_event
// transaction")?;

// // tracing::debug!(
// // "Preparing item_status_change_event from block {},
// transaction hash \ // {}, and event index {}.",
// // block_info.block_height,
// // single_contract_update_info.0.transaction_hash,
// // event_index
// // );
// // } else if let contract::Event::<AdditionalData>::ItemCreated(
// // item_created_event,
// // ) = parsed_event
// // {
// // let params: [&(dyn ToSql + Sync); 6] = [
// // &(block_info.block_slot_time),
// //
// &single_contract_update_info.0.transaction_hash.as_ref(),
// // &(event_index as i64), //
// &(item_created_event.item_id.0 as i64), //
// &to_bytes(&item_created_event.metadata_url), //
// &Json(&item_created_event.initial_status), // ];

// // let statement = db_transaction
// // .prepare_cached(
// // "INSERT INTO item_created_events (id, block_time,
// \ // transaction_hash, event_index, item_id,
// metadata_url, \ // initial_status) SELECT
// COALESCE(MAX(id) + 1, 0), $1, $2, $3, $4, \ //
// $5, $6 FROM item_created_events;", // )
// // .await
// // .context("Failed to prepare item_created_event
// transaction")?;

// // db_transaction
// // .execute(&statement, &params)
// // .await
// // .context("Failed to execute item_created_event
// transaction")?;

// // tracing::debug!(
// // "Preparing event from block {}, transaction hash {},
// and event index \ // {}.",
// // block_info.block_height,
// // single_contract_update_info.0.transaction_hash,
// // event_index
// // );
// // }
// }
// }
// }
for tx in block_items {
match &tx.details {
AccountCreation(account_creation_details) => {
let params: [&(dyn ToSql + Sync); 4] = [
&account_creation_details.address.0.as_ref(),
&block_info.block_slot_time,
&tx.hash.as_ref(),
&false,
];
let statement = db_transaction
.prepare_cached(
"INSERT INTO accounts (id,account_address, \
block_time,transaction_hash, claimed) SELECT COALESCE(MAX(id) + 1, \
0), $1,$2, $3, $4 FROM accounts;",
)
.await
.context(
"Failed to prepare transaction to add a new account to the database",
)?;

db_transaction.execute(&statement, &params).await.context(
"Failed to execute transaction to add a new account to the database",
)?;

tracing::debug!(
"Preparing database transaction for account {} from transaction hash {} \
in block {}.",
account_creation_details.address,
tx.hash,
block_info.block_height,
);
}
_ => continue,
}
}

// Commit the transaction
db_transaction
.commit()
Expand Down
Loading

0 comments on commit 43a97ad

Please sign in to comment.