diff --git a/compliant-reward-distribution/README.md b/compliant-reward-distribution/README.md deleted file mode 100644 index 9462453d..00000000 --- a/compliant-reward-distribution/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# Track and Trace Project - -## Hosted front end - -[Hosted front end link](https://trackntrace.testnet.concordium.com/) - -## Overview - -This project contains a complete implementation of a track and trace solution with [CIS-3](https://proposals.concordium.software/CIS/cis-3.html) compliant sponsored transactions. - -It has five primary components. - -- A [smart contract](./smart-contract/README.md), located in `./smart-contract` -- A [frontend](./frontend/README.md), located in `./frontend` -- An [indexer](./indexer/README.md) service, located in `./indexer` -- A [sponsored transaction service](./sponsored-transaction-service/README.md), located in `./sponsored-transaction-service` - - This service is generic and compatible with any CIS-3 contracts. -- A [server](./indexer/README.md) that hosts the frontend, located in `./indexer` - -Explanations for each component reside in README.md files inside their respective folder. - -You can run the services and servers manually as explained in the READMEs or use the docker files in `./dockerfiles`. - -However, the easiest option is to use [docker-compose](https://docs.docker.com/compose/) with the configuration file `./docker-compose.yml`. - -For this to work, you should do the following: - -1. Deploy and initialize your version of the Track and Trace smart contract. -2. [Export your account keys from the Browser Wallet](https://developer.concordium.software/en/mainnet/net/guides/export-key.html) and generate a `./private-keys` folder to save the key file into it. -3. Set the following environment variables: - - Set the `TRACK_AND_TRACE_CONTRACT_ADDRESS` variable to the contract address of your contract instance. - - Set the `TRACK_AND_TRACE_PRIVATE_KEY_FILE` variable to the path of your keys from step 2. - - (Optional) Set the `TRACK_AND_TRACE_NETWORK` variable to the correct net (testnet/mainnet). Defaults to testnet. - - (Optional) Set the `TRACK_AND_TRACE_NODE` to the gRPC endpoint of the node you want to use. Make sure it runs on the right net, i.e., testnet or mainnet. Defaults to `https://grpc.testnet.concordium.com:20000`. -4. Run `docker-compose up` to build and start all the services. - -e.g. - -```bash -TRACK_AND_TRACE_CONTRACT_ADDRESS="<8901,0>" TRACK_AND_TRACE_PRIVATE_KEY_FILE="./private-keys/4SizPU2ipqQQza9Xa6fUkQBCDjyd1vTNUNDGbBeiRGpaJQc6qX.export" docker-compose up -``` - -5. Access the frontend at `http://localhost:8080` - - The sponsored transaction service runs on port `8000` by default, and the postgres database runs on `5432`. Both are configurable in the `./docker-compose.yml` file. - -## Switching to a different contract address - -The indexer service saves the contract address used into the underlying PostgreSQL database. -If you want to use a different contract address than initially set up, you therefore need to delete the PostgreSQL database before running the `docker-compose up` command again. - -To do so, run the following command: - -``` shell - docker volume rm trackandtrace_postgres_data -``` diff --git a/compliant-reward-distribution/docker-compose.yml b/compliant-reward-distribution/docker-compose.yml deleted file mode 100644 index 078ed192..00000000 --- a/compliant-reward-distribution/docker-compose.yml +++ /dev/null @@ -1,70 +0,0 @@ -version: '3.8' - -# This docker compose setup expects the following environment variables to be present: -# - `TRACK_AND_TRACE_CONTRACT_ADDRESS`: The address of the track and trace contract instance. (Format: '<1234,0>') -# - `TRACK_AND_TRACE_PRIVATE_KEY_FILE`: A path to the account keys used to sponsor the transactions. (For example './private-keys/my-account.export') -# -# The following optional environment variables can be set: -# - `TRACK_AND_TRACE_NETWORK`: The network to run the services on. Either 'mainnet' or 'testnet'. (Defaults to 'testnet') -# - `TRACK_AND_TRACE_NODE`: The gRPC interface of a node on the correct network. (Defaults to 'https://grpc.testnet.concordium.com') - -services: - sponsored-transaction-service: - build: - context: ../ - dockerfile: ./trackAndTrace/dockerfiles/sponsored-transaction-service.Dockerfile - restart: always - ports: - - 8000:8000 - volumes: - - ${TRACK_AND_TRACE_PRIVATE_KEY_FILE:?Please specify the private key file of the sponsor account.}:/private-keys/sponsor-account.export - environment: - CCD_SPONSORED_TRANSACTION_SERVICE_ALLOWED_ACCOUNTS: Any # This should ideally be limited to avoid draining the sponsor account funds. - CCD_SPONSORED_TRANSACTION_SERVICE_ALLOWED_CONTRACTS: ${TRACK_AND_TRACE_CONTRACT_ADDRESS:?Please specify the Track and Trace contract instance address (format <1234,0>)} - CCD_SPONSORED_TRANSACTION_SERVICE_PRIVATE_KEY_FILE: /private-keys/sponsor-account.export - CCD_SPONSORED_TRANSACTION_SERVICE_LISTEN_ADDRESS: 0.0.0.0:8000 - CCD_SPONSORED_TRANSACTION_SERVICE_NODE: ${TRACK_AND_TRACE_NODE:-https://grpc.testnet.concordium.com:20000} - - server: - build: - context: ../ - dockerfile: ./trackAndTrace/dockerfiles/server.Dockerfile - restart: always - environment: - CCD_SERVER_DB_CONNECTION: "host=postgres dbname=indexer user=postgres password=password port=5432" - CCD_SERVER_CONTRACT_ADDRESS: ${TRACK_AND_TRACE_CONTRACT_ADDRESS:?Please specify the Track and Trace contract instance address (format <1234,0>)} - CCD_SERVER_SPONSORED_TRANSACTION_BACKEND: "http://localhost:8000" - CCD_SERVER_NETWORK: ${TRACK_AND_TRACE_NETWORK:-testnet} - CCD_SERVER_NODE: ${TRACK_AND_TRACE_NODE:-https://grpc.testnet.concordium.com:20000} - ports: - - 8080:8080 - depends_on: - - postgres - - indexer - - indexer: - build: - context: ../ - dockerfile: ./trackAndTrace/dockerfiles/indexer.Dockerfile - restart: always - environment: - CCD_INDEXER_CONTRACT: ${TRACK_AND_TRACE_CONTRACT_ADDRESS:?Please specify the Track and Trace contract instance address (format <1234,0>)} - CCD_INDEXER_NODE: ${TRACK_AND_TRACE_NODE:-https://grpc.testnet.concordium.com:20000} - CCD_INDEXER_DB_CONNECTION: "host=postgres dbname=indexer user=postgres password=password port=5432" - depends_on: - - postgres - - postgres: - image: postgres:latest - ports: - - 5432:5432 - restart: always - volumes: - - postgres_data:/var/lib/postgresql/data - environment: - POSTGRES_DB: indexer - POSTGRES_USER: postgres - POSTGRES_PASSWORD: password - -volumes: - postgres_data: diff --git a/compliant-reward-distribution/indexer/Cargo.toml b/compliant-reward-distribution/indexer/Cargo.toml index 31910876..57fa0f51 100644 --- a/compliant-reward-distribution/indexer/Cargo.toml +++ b/compliant-reward-distribution/indexer/Cargo.toml @@ -24,8 +24,6 @@ tonic = { version = "0.10", features = ["tls-roots", "tls"] } thiserror = "1.0" chrono = "0.4" concordium-rust-sdk = { version = "4.2"} -# It is essential that the bump_alloc feature is **not** enabled since it will -# globally set an inefficient allocator. tokio = { version = "1.35", features = ["rt-multi-thread", "macros", "sync", "signal"] } tokio-postgres = { version = "0.7", features = [ "with-serde_json-1", diff --git a/compliant-reward-distribution/indexer/resources/schema.sql b/compliant-reward-distribution/indexer/resources/schema.sql index fa82f302..14dfda6a 100644 --- a/compliant-reward-distribution/indexer/resources/schema.sql +++ b/compliant-reward-distribution/indexer/resources/schema.sql @@ -12,7 +12,32 @@ CREATE TABLE IF NOT EXISTS settings ( latest_processed_block_height INT8 ); --- Table containing item_status_changed_events successfully submitted to the database from the contract monitored. +-- Table containing new accounts created on the blockchain which are eligible +-- for an reward after completing some tasks/checks. +CREATE TABLE IF NOT EXISTS accounts ( + -- Primary key. + id INT8 PRIMARY KEY, + -- The account address created on chain. + account_address BYTEA NOT NULL, + -- The timestamp of the block when the account was created on chain. + block_time TIMESTAMP WITH TIME ZONE NOT NULL, + -- The transaction hash of the transaction that created the account on chain. + transaction_hash BYTEA NOT NULL, + -- A boolean specifying if the account has already claimed. + claimed BOOL NOT NULL, + -- A link to a twitter post submitted by the above account address (task 1). + twitter_post_link BYTEA, + -- A boolean specifying if the identity associated with the account is eligible for the reward (task 2). + -- An associated ZK proof was verfied by this backend before this flag is set. + zk_proof_valid BOOL, + -- A version that specifies the setting of the ZK proof during the verification. This enables us + -- to update the ZK proof verification logic in the future and invalidate older proofs. + zk_proof_version INT8, + -- A hash of the revealed `firstName|lastName|passportNumber` to prevent + -- claiming with different accounts for the same identity. + uniqueness_hash BYTEA +); + CREATE TABLE IF NOT EXISTS item_status_changed_events ( -- Primary key. id INT8 PRIMARY KEY, diff --git a/compliant-reward-distribution/indexer/src/bin/indexer.rs b/compliant-reward-distribution/indexer/src/bin/indexer.rs index fcad4bea..95e98bec 100644 --- a/compliant-reward-distribution/indexer/src/bin/indexer.rs +++ b/compliant-reward-distribution/indexer/src/bin/indexer.rs @@ -2,19 +2,18 @@ //! postgres database. The database is configured with the tables from the file //! `../resources/schema.sql`. A table //! `settings` exists to store global configurations. -use ::indexer::db::{DatabaseError, DatabasePool}; +use ::indexer::db::DatabasePool; use anyhow::Context; use clap::Parser; use concordium_rust_sdk::{ - indexer::{self, ContractUpdateInfo, ProcessorConfig, TransactionIndexer}, - smart_contracts::common::to_bytes, + indexer::{self, ProcessorConfig, TransactionIndexer}, types::{ - queries::BlockInfo, smart_contracts::OwnedReceiveName, AbsoluteBlockHeight, - BlockItemSummary, ContractAddress, + queries::BlockInfo, AbsoluteBlockHeight, BlockItemSummary, + BlockItemSummaryDetails::AccountCreation, }, v2::{self as sdk, Client}, }; -use tokio_postgres::types::{Json, ToSql}; +use tokio_postgres::types::ToSql; /// Command line configuration of the application. #[derive(Debug, clap::Parser)] @@ -91,94 +90,42 @@ impl indexer::ProcessEvent for StoreEvents { .await .context("Failed to execute latest_processed_block_height transaction")?; - // for single_contract_update_info in contract_update_info { - // for (_contract_invoked, _entry_point_name, events) in - // single_contract_update_info.0.execution_tree.events() - // { - // for (event_index, event) in events.iter().enumerate() { - // // let parsed_event: contract::Event = - // event.parse()?; - - // // if let contract::Event::::ItemStatusChanged( - // // item_status_change_event, - // // ) = parsed_event - // // { - // // let params: [&(dyn ToSql + Sync); 6] = [ - // // &(block_info.block_slot_time), - // // - // &single_contract_update_info.0.transaction_hash.as_ref(), - // // &(event_index as i64), // - // &(item_status_change_event.item_id.0 as i64), // - // &Json(&item_status_change_event.new_status), // - // &item_status_change_event.additional_data.bytes, // - // ]; - - // // let statement = db_transaction - // // .prepare_cached( - // // "INSERT INTO item_status_changed_events (id, - // block_time, \ // transaction_hash, - // event_index, item_id, new_status, \ // - // additional_data) SELECT COALESCE(MAX(id) + 1, 0), $1, $2, $3, \ - // // $4, $5, $6 FROM item_status_changed_events;", - // // ) - // // .await - // // .context("Failed to prepare item_status_change_event - // transaction")?; - - // // db_transaction - // // .execute(&statement, ¶ms) - // // .await - // // .context("Failed to execute item_status_change_event - // transaction")?; - - // // tracing::debug!( - // // "Preparing item_status_change_event from block {}, - // transaction hash \ // {}, and event index {}.", - // // block_info.block_height, - // // single_contract_update_info.0.transaction_hash, - // // event_index - // // ); - // // } else if let contract::Event::::ItemCreated( - // // item_created_event, - // // ) = parsed_event - // // { - // // let params: [&(dyn ToSql + Sync); 6] = [ - // // &(block_info.block_slot_time), - // // - // &single_contract_update_info.0.transaction_hash.as_ref(), - // // &(event_index as i64), // - // &(item_created_event.item_id.0 as i64), // - // &to_bytes(&item_created_event.metadata_url), // - // &Json(&item_created_event.initial_status), // ]; - - // // let statement = db_transaction - // // .prepare_cached( - // // "INSERT INTO item_created_events (id, block_time, - // \ // transaction_hash, event_index, item_id, - // metadata_url, \ // initial_status) SELECT - // COALESCE(MAX(id) + 1, 0), $1, $2, $3, $4, \ // - // $5, $6 FROM item_created_events;", // ) - // // .await - // // .context("Failed to prepare item_created_event - // transaction")?; - - // // db_transaction - // // .execute(&statement, ¶ms) - // // .await - // // .context("Failed to execute item_created_event - // transaction")?; - - // // tracing::debug!( - // // "Preparing event from block {}, transaction hash {}, - // and event index \ // {}.", - // // block_info.block_height, - // // single_contract_update_info.0.transaction_hash, - // // event_index - // // ); - // // } - // } - // } - // } + for tx in block_items { + match &tx.details { + AccountCreation(account_creation_details) => { + let params: [&(dyn ToSql + Sync); 4] = [ + &account_creation_details.address.0.as_ref(), + &block_info.block_slot_time, + &tx.hash.as_ref(), + &false, + ]; + let statement = db_transaction + .prepare_cached( + "INSERT INTO accounts (id,account_address, \ + block_time,transaction_hash, claimed) SELECT COALESCE(MAX(id) + 1, \ + 0), $1,$2, $3, $4 FROM accounts;", + ) + .await + .context( + "Failed to prepare transaction to add a new account to the database", + )?; + + db_transaction.execute(&statement, ¶ms).await.context( + "Failed to execute transaction to add a new account to the database", + )?; + + tracing::debug!( + "Preparing database transaction for account {} from transaction hash {} \ + in block {}.", + account_creation_details.address, + tx.hash, + block_info.block_height, + ); + } + _ => continue, + } + } + // Commit the transaction db_transaction .commit() diff --git a/compliant-reward-distribution/indexer/src/db.rs b/compliant-reward-distribution/indexer/src/db.rs index ba80d63f..d049313b 100644 --- a/compliant-reward-distribution/indexer/src/db.rs +++ b/compliant-reward-distribution/indexer/src/db.rs @@ -34,7 +34,7 @@ type DatabaseResult = Result; #[derive(Debug, Serialize)] pub struct StoredConfiguration { /// The genesis block hash of the network monitored. - pub genesis_block_hash: BlockHash, + pub genesis_block_hash: BlockHash, /// The last block height that was processed. pub latest_processed_block_height: Option, } @@ -66,13 +66,13 @@ impl TryFrom for StoredConfiguration { #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct StoredItemStatusChangedEvent { /// The timestamp of the block the event was included in. - pub block_time: DateTime, + pub block_time: DateTime, /// The transaction hash that the event was recorded in. pub transaction_hash: TransactionHash, /// The index from the array of logged events in a transaction. - pub event_index: u64, + pub event_index: u64, /// The item's id as logged in the event. - pub item_id: u64, + pub item_id: u64, } impl TryFrom for StoredItemStatusChangedEvent { @@ -85,12 +85,12 @@ impl TryFrom for StoredItemStatusChangedEvent { let raw_event_index: i64 = value.try_get("event_index")?; let events = Self { - block_time: value.try_get("block_time")?, + block_time: value.try_get("block_time")?, transaction_hash: raw_transaction_hash .try_into() .map_err(|_| DatabaseError::TypeConversion("transaction_hash".to_string()))?, - event_index: raw_event_index as u64, - item_id: raw_item_id as u64, + event_index: raw_event_index as u64, + item_id: raw_item_id as u64, }; Ok(events) } @@ -100,15 +100,15 @@ impl TryFrom for StoredItemStatusChangedEvent { #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct StoredItemCreatedEvent { /// The timestamp of the block the event was included in. - pub block_time: DateTime, + pub block_time: DateTime, /// The transaction hash that the event was recorded in. pub transaction_hash: TransactionHash, /// The index from the array of logged events in a transaction. - pub event_index: u64, + pub event_index: u64, /// The item's id as logged in the event. - pub item_id: u64, + pub item_id: u64, /// The item's metadata_url as logged in the event. - pub metadata_url: Option, + pub metadata_url: Option, } impl TryFrom for StoredItemCreatedEvent { @@ -121,13 +121,13 @@ impl TryFrom for StoredItemCreatedEvent { let raw_event_index: i64 = value.try_get("event_index")?; let events = Self { - block_time: value.try_get("block_time")?, + block_time: value.try_get("block_time")?, transaction_hash: raw_transaction_hash .try_into() .map_err(|_| DatabaseError::TypeConversion("transaction_hash".to_string()))?, - event_index: raw_event_index as u64, - item_id: raw_item_id as u64, - metadata_url: from_bytes(value.try_get("metadata_url")?) + event_index: raw_event_index as u64, + item_id: raw_item_id as u64, + metadata_url: from_bytes(value.try_get("metadata_url")?) .map_err(|_| DatabaseError::TypeConversion("metadata_url".to_string()))?, }; Ok(events) @@ -141,15 +141,11 @@ pub struct Database { } impl From for Database { - fn from(client: Object) -> Self { - Self { client } - } + fn from(client: Object) -> Self { Self { client } } } impl AsRef for Database { - fn as_ref(&self) -> &Object { - &self.client - } + fn as_ref(&self) -> &Object { &self.client } } impl Database {