Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Added deploy script for checking deployments #738

Open
wants to merge 9 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/multichain-contract.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,10 @@ jobs:
env:
RUST_LOG: INFO
RUST_BACKTRACE: 1

- name: Compile Contract (dev)
run: cd ./chain-signatures && cargo build -p mpc-contract --target wasm32-unknown-unknown --release --features dev

- name: Run Deploy Checker
working-directory: infra/scripts/deploy
run: cargo test -- run_dev
45 changes: 34 additions & 11 deletions .github/workflows/multichain-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,12 @@ env:
TAG: ${{ github.sha }}

jobs:
build-mpc-recovery:
build-chain-signatures:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
name: "Checkout mpc-recovery"
name: "Checkout chain-signatures"

- name: Login to GCP Artifact Registry
run: echo "$GOOGLE_CREDENTIALS" | docker login -u _json_key --password-stdin https://us-east1-docker.pkg.dev
Expand All @@ -32,23 +32,46 @@ jobs:
file: ./Dockerfile.multichain
tags: "${{ env.IMAGE }}:${{ env.TAG }}"

check-deploy:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
needs: build-chain-signatures
steps:
- uses: actions/checkout@v3
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable

- uses: Swatinem/rust-cache@v1
- name: Compile Contract
working-directory: chain-signatures
run: cargo build -p mpc-contract --target wasm32-unknown-unknown --release --features dev

- name: Build Deploy Script
working-directory: infra/scripts/deploy
run: cargo build --release
- name: Run Deploy Check
working-directory: infra/scripts/deploy
run: cargo run --release -- check --network testnet --mpc-contract-id "v5.multichain-mpc-dev.testnet"

deploy:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
needs: build-mpc-recovery
needs: check-deploy
steps:
- id: 'auth'
uses: 'google-github-actions/auth@v2'
- id: "auth"
uses: "google-github-actions/auth@v2"
with:
credentials_json: '${{ secrets.GCP_CREDENTIALS_DEV }}'
credentials_json: "${{ secrets.GCP_CREDENTIALS_DEV }}"

- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v2'
- name: "Set up Cloud SDK"
uses: "google-github-actions/setup-gcloud@v2"

- name: 'Set project'
run: 'gcloud config set project pagoda-discovery-platform-dev'
- name: "Set project"
run: "gcloud config set project pagoda-discovery-platform-dev"

- name: 'Update Nodes'
- name: "Update Nodes"
run: |
gcloud compute instances update-container multichain-dev-0 --zone us-central1-a --container-image=${{ env.IMAGE }}:${{ env.TAG }} & \
gcloud compute instances update-container multichain-dev-1 --zone us-central1-a --container-image=${{ env.IMAGE }}:${{ env.TAG }} & \
Expand Down
7 changes: 7 additions & 0 deletions chain-signatures/contract/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,3 +30,10 @@ digest = "0.10.7"
# near dependencies
near-crypto = "0.23.0"
near-workspaces = { git = "https://github.com/near/near-workspaces-rs", branch = "node/1.40" }

[features]
default = []

migrate = []
dev = ["migrate"]
testnet = ["migrate"]
2 changes: 2 additions & 0 deletions chain-signatures/contract/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ pub enum InitError {
ThresholdTooHigh,
#[error("Cannot load in contract due to missing state")]
ContractStateIsMissing,
#[error("Cannot load in contract due to breakings changes with state")]
ContractStateIsBroken,
}

#[derive(Debug, thiserror::Error)]
Expand Down
18 changes: 16 additions & 2 deletions chain-signatures/contract/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ pub mod primitives;
pub mod state;
pub mod update;

#[cfg(feature = "migrate")]
pub mod migrate;

use crypto_shared::{
derive_epsilon, derive_key, kdf::check_ec_signature, near_public_key_to_affine_point,
types::SignatureResponse, ScalarExt as _,
Expand Down Expand Up @@ -660,10 +663,21 @@ impl VersionedMpcContract {
#[init(ignore_state)]
#[handle_result]
pub fn migrate() -> Result<Self, MpcContractError> {
let old: MpcContract = env::state_read().ok_or(MpcContractError::InitError(
#[cfg(feature = "dev")]
{
return migrate::migrate_testnet_dev();
}

#[cfg(feature = "testnet")]
{
return migrate::migrate_testnet_dev();
}

// Future state breaking changes for testnet and mainnet should be here
let loaded: MpcContract = env::state_read().ok_or(MpcContractError::InitError(
InitError::ContractStateIsMissing,
))?;
Ok(VersionedMpcContract::V0(old))
Ok(VersionedMpcContract::V0(loaded))
}

pub fn state(&self) -> &ProtocolContractState {
Expand Down
150 changes: 150 additions & 0 deletions chain-signatures/contract/src/migrate.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
#![allow(dead_code)]

use std::collections::{HashMap, HashSet};

use borsh::BorshDeserialize;
use near_sdk::collections::LookupMap;
use near_sdk::{env, AccountId};

use crate::config::Config;
use crate::errors::{InitError, MpcContractError};
use crate::primitives::{SignatureRequest, YieldIndex};
use crate::{update, MpcContract, ProtocolContractState, VersionedMpcContract};

// NOTE: All the custom `BorshDeserialize` implementations are necessary for debugging purposes
// in case the migration fails. This way we can log the error and get details about what went wrong
// during the deserialization step.

#[derive(BorshDeserialize)]
pub struct OldConfig {
pub triple_timeout: u64,
pub presignature_timeout: u64,
pub signature_timeout: u64,
}

#[derive(BorshDeserialize)]
enum OldUpdate {
Config(OldConfig),
Contract(Vec<u8>),
}

#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct OldUpdateId(pub(crate) u64);

impl BorshDeserialize for OldUpdateId {
fn deserialize_reader<R: borsh::io::Read>(reader: &mut R) -> borsh::io::Result<Self> {
let id = deserialize_or_log(reader, "OldUpdateId.u64")?;
Ok(OldUpdateId(id))
}
}

pub struct OldProposedUpdates {
updates: HashMap<OldUpdateId, Vec<OldUpdate>>,
votes: HashMap<OldUpdateId, HashSet<AccountId>>,
next_id: u64,
}

impl BorshDeserialize for OldProposedUpdates {
fn deserialize_reader<R: borsh::io::Read>(reader: &mut R) -> borsh::io::Result<Self> {
let updates = deserialize_or_log(reader, "OldProposedUpdates.updates")?;
let votes = deserialize_or_log(reader, "OldProposedUpdates.votes")?;
let next_id = deserialize_or_log(reader, "OldProposedUpdates.next_id")?;
Ok(OldProposedUpdates {
updates,
votes,
next_id,
})
}
}

pub struct OldContract {
protocol_state: ProtocolContractState,
pending_requests: LookupMap<SignatureRequest, YieldIndex>,
request_counter: u32,
proposed_updates: OldProposedUpdates,
config: OldConfig,
}

impl BorshDeserialize for OldContract {
fn deserialize_reader<R: borsh::io::Read>(reader: &mut R) -> borsh::io::Result<Self> {
let protocol_state = deserialize_or_log(reader, "OldContract.protocol_state")?;
let pending_requests = deserialize_or_log(reader, "OldContract.pending_requests")?;
let request_counter = deserialize_or_log(reader, "OldContract.request_counter")?;
let proposed_updates = deserialize_or_log(reader, "OldContract.proposed_updates")?;
let config = deserialize_or_log(reader, "OldContract.config")?;
Ok(OldContract {
protocol_state,
pending_requests,
request_counter,
proposed_updates,
config,
})
}
}

#[derive(BorshDeserialize)]
enum OldVersionedMpcContract {
V0(OldContract),
}

pub fn migrate_testnet_dev() -> Result<VersionedMpcContract, MpcContractError> {
// try to load state, if it doesn't work, then we need to do migration for dev.
// NOTE: that since we're in dev, there will be many changes. If state was able
// to be loaded successfully, then that means a migration was not necessary and
// the developer did not change the contract state.
let data = env::storage_read(b"STATE").ok_or(MpcContractError::InitError(
InitError::ContractStateIsMissing,
))?;

if let Ok(loaded) = MpcContract::try_from_slice(&data) {
return Ok(VersionedMpcContract::V0(loaded));
};

// NOTE: for any PRs that have this error, change the code in this block so we can keep
// our dev environment not broken.

let old = OldVersionedMpcContract::try_from_slice(&data).unwrap();
let OldVersionedMpcContract::V0(mut old) = old;

// Migrate old proposed updates to new proposed updates.
let mut new_updates = update::ProposedUpdates::default();
for (id, updates) in old.proposed_updates.updates {
let updates: Vec<_> = updates
.into_iter()
.map(|update| match update {
OldUpdate::Config(_) => update::Update::Config(Config::default()),
OldUpdate::Contract(contract) => update::Update::Contract(contract),
})
.collect();

let entry = update::UpdateEntry {
bytes_used: update::bytes_used_updates(&updates),
updates,
votes: old.proposed_updates.votes.remove(&id).unwrap(),
};
new_updates.entries.insert(update::UpdateId(id.0), entry);
}
new_updates.id = update::UpdateId(old.proposed_updates.next_id);

let migrated = VersionedMpcContract::V0(MpcContract {
protocol_state: old.protocol_state,
pending_requests: old.pending_requests,
request_counter: old.request_counter,
proposed_updates: new_updates,
config: Config::default(),
});
Ok(migrated)
}

fn deserialize_or_log<T: BorshDeserialize, R: borsh::io::Read>(
reader: &mut R,
which_state: &str,
) -> borsh::io::Result<T> {
match T::deserialize_reader(reader) {
Ok(state) => Ok(state),
Err(err) => {
env::log_str(&format!("Error deserializing {which_state} state: {err:?}"));
Err(err)
}
}
}
32 changes: 26 additions & 6 deletions chain-signatures/contract/src/update.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,16 @@ pub struct ProposeUpdateArgs {
}

#[derive(Debug, BorshSerialize, BorshDeserialize)]
struct UpdateEntry {
updates: Vec<Update>,
votes: HashSet<AccountId>,
bytes_used: u128,
pub struct UpdateEntry {
pub(crate) updates: Vec<Update>,
pub(crate) votes: HashSet<AccountId>,
pub(crate) bytes_used: u128,
}

#[derive(Debug, BorshSerialize, BorshDeserialize)]
pub struct ProposedUpdates {
entries: IterableMap<UpdateId, UpdateEntry>,
id: UpdateId,
pub(crate) entries: IterableMap<UpdateId, UpdateEntry>,
pub(crate) id: UpdateId,
}

impl Default for ProposedUpdates {
Expand Down Expand Up @@ -165,6 +165,26 @@ fn bytes_used(code: &Option<Vec<u8>>, config: &Option<Config>) -> u128 {
bytes_used
}

pub fn bytes_used_updates(updates: &[Update]) -> u128 {
let mut bytes_used = std::mem::size_of::<UpdateEntry>() as u128;

// Assume a high max of 128 participant votes per update entry.
bytes_used += 128 * std::mem::size_of::<AccountId>() as u128;

for update in updates {
match update {
Update::Config(config) => {
let bytes = serde_json::to_vec(&config).unwrap();
bytes_used += bytes.len() as u128;
}
Update::Contract(code) => {
bytes_used += code.len() as u128;
}
}
}
bytes_used
}

fn required_deposit(bytes_used: u128) -> NearToken {
env::storage_byte_cost().saturating_mul(bytes_used)
}
Loading
Loading