From d00274de630132c5952f613b52b61223dd53d0ba Mon Sep 17 00:00:00 2001 From: himanshu Date: Fri, 23 Aug 2024 13:52:21 +0530 Subject: [PATCH 1/9] remove commitment request --- src/constants.ts | 1 + src/helpers/common.ts | 71 +-- src/helpers/nodeUtils.ts | 1041 ++++++++++++++++------------------ src/interfaces.ts | 27 +- test/sapphire_devnet.test.ts | 4 +- 5 files changed, 532 insertions(+), 612 deletions(-) diff --git a/src/constants.ts b/src/constants.ts index c1019a4..1f449aa 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -1,5 +1,6 @@ export const JRPC_METHODS = { GET_OR_SET_KEY: "GetPubKeyOrKeyAssign", + VERIFIER_LOOKUP: "VerifierLookupRequest", COMMITMENT_REQUEST: "CommitmentRequest", IMPORT_SHARES: "ImportShares", GET_SHARE_OR_KEY_ASSIGN: "GetShareOrKeyAssign", diff --git a/src/helpers/common.ts b/src/helpers/common.ts index 8d37f0a..f2cd460 100644 --- a/src/helpers/common.ts +++ b/src/helpers/common.ts @@ -1,11 +1,11 @@ -import { JRPCResponse, KEY_TYPE } from "@toruslabs/constants"; +import { KEY_TYPE } from "@toruslabs/constants"; import { Ecies } from "@toruslabs/eccrypto"; import { BN } from "bn.js"; import { ec as EC } from "elliptic"; import { keccak256 as keccakHash } from "ethereum-cryptography/keccak"; import JsonStringify from "json-stable-stringify"; -import { CommitmentRequestResult, EciesHex, KeyType, VerifierLookupResponse } from "../interfaces"; +import { EciesHex, GetORSetKeyResponse, KeyType, VerifierLookupResponse } from "../interfaces"; export function keccak256(a: Buffer): string { const hash = Buffer.from(keccakHash(a)).toString("hex"); @@ -28,8 +28,8 @@ export const getKeyCurve = (keyType: KeyType) => { // For ex: some fields returns by nodes might be different from each other // like created_at field might vary and nonce_data might not be returned by all nodes because // of the metadata implementation in sapphire. -export const normalizeKeysResult = (result: VerifierLookupResponse) => { - const finalResult: Pick = { +export const normalizeKeysResult = (result: GetORSetKeyResponse) => { + const finalResult: Pick = { keys: [], is_new_key: result.is_new_key, }; @@ -46,6 +46,23 @@ export const normalizeKeysResult = (result: VerifierLookupResponse) => { return finalResult; }; +export const normalizeLookUpResult = (result: VerifierLookupResponse) => { + const finalResult: Pick = { + keys: [], + }; + if (result && result.keys && result.keys.length > 0) { + const finalKey = result.keys[0]; + finalResult.keys = [ + { + pub_key_X: finalKey.pub_key_X, + pub_key_Y: finalKey.pub_key_Y, + address: finalKey.address, + }, + ]; + } + return finalResult; +}; + export const kCombinations = (s: number | number[], k: number): number[][] => { let set = s; if (typeof set === "number") { @@ -143,49 +160,3 @@ export function waitFor(milliseconds: number) { } }); } - -export function retryCommitment(executionPromise: () => Promise>, maxRetries: number) { - // Notice that we declare an inner function here - // so we can encapsulate the retries and don't expose - // it to the caller. This is also a recursive function - async function retryWithBackoff(retries: number) { - try { - // we don't wait on the first attempt - if (retries > 0) { - // on every retry, we exponentially increase the time to wait. - // Here is how it looks for a `maxRetries` = 4 - // (2 ** 1) * 100 = 200 ms - // (2 ** 2) * 100 = 400 ms - // (2 ** 3) * 100 = 800 ms - const timeToWait = 2 ** retries * 100; - await waitFor(timeToWait); - } - const a = await executionPromise(); - return a; - } catch (e: unknown) { - const errorMsg = (e as Error).message; - const acceptedErrorMsgs = [ - // Slow node - "Timed out", - "Failed to fetch", - "fetch failed", - "Load failed", - "cancelled", - "NetworkError when attempting to fetch resource.", - // Happens when the node is not reachable (dns issue etc) - "TypeError: Failed to fetch", // All except iOS and Firefox - "TypeError: cancelled", // iOS - "TypeError: NetworkError when attempting to fetch resource.", // Firefox - ]; - - if (retries < maxRetries && (acceptedErrorMsgs.includes(errorMsg) || (errorMsg && errorMsg.includes("reason: getaddrinfo EAI_AGAIN")))) { - // only retry if we didn't reach the limit - // otherwise, let the caller handle the error - return retryWithBackoff(retries + 1); - } - throw e; - } - } - - return retryWithBackoff(0); -} diff --git a/src/helpers/nodeUtils.ts b/src/helpers/nodeUtils.ts index f890d33..5a1e85f 100644 --- a/src/helpers/nodeUtils.ts +++ b/src/helpers/nodeUtils.ts @@ -8,8 +8,8 @@ import { getRandomBytes } from "ethereum-cryptography/random"; import { config } from "../config"; import { JRPC_METHODS } from "../constants"; import { - CommitmentRequestResult, ExtendedPublicKey, + GetORSetKeyResponse, GetOrSetNonceResult, ImportedShare, ImportShareRequestResult, @@ -22,6 +22,7 @@ import { UserType, v2NonceResultType, VerifierLookupResponse, + VerifierLookupResult, VerifierParams, } from "../interfaces"; import log from "../loglevel"; @@ -32,9 +33,8 @@ import { generatePrivateKey, getProxyCoordinatorEndpointIndex, kCombinations, - keccak256, normalizeKeysResult, - retryCommitment, + normalizeLookUpResult, thresholdSame, } from "./common"; import { derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey, generateShares } from "./keyUtils"; @@ -60,7 +60,7 @@ export const GetPubKeyOrKeyAssign = async (params: { const { endpoints, network, verifier, verifierId, extendedVerifierId, keyType } = params; const minThreshold = ~~(endpoints.length / 2) + 1; const lookupPromises = endpoints.map((x) => - post>( + post>( x, generateJsonRPCObject(JRPC_METHODS.GET_OR_SET_KEY, { distributed_metadata: true, @@ -79,7 +79,7 @@ export const GetPubKeyOrKeyAssign = async (params: { let nonceResult: GetOrSetNonceResult | undefined; const nodeIndexes: number[] = []; - const result = await Some, KeyLookupResult>(lookupPromises, async (lookupResults) => { + const result = await Some, KeyLookupResult>(lookupPromises, async (lookupResults) => { const lookupPubKeys = lookupResults.filter((x1) => { if (x1 && !x1.error) { return x1; @@ -159,6 +159,62 @@ export const GetPubKeyOrKeyAssign = async (params: { return result; }; + +export const VerifierLookupRequest = async (params: { + endpoints: string[]; + verifier: string; + verifierId: string; + keyType: KeyType; +}): Promise => { + const { endpoints, verifier, verifierId, keyType } = params; + const minThreshold = ~~(endpoints.length / 2) + 1; + const lookupPromises = endpoints.map((x) => + post>( + x, + generateJsonRPCObject(JRPC_METHODS.VERIFIER_LOOKUP, { + verifier, + verifier_id: verifierId.toString(), + key_type: keyType, + client_time: Math.floor(Date.now() / 1000).toString(), + }), + {}, + { logTracingHeader: config.logRequestTracing } + ).catch((err) => log.error(`${JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)) + ); + + const result = await Some, VerifierLookupResult>(lookupPromises, async (lookupResults) => { + const lookupPubKeys = lookupResults.filter((x1) => { + if (x1 && !x1.error) { + return x1; + } + return false; + }); + + const errorResult = thresholdSame( + lookupPubKeys.map((x2) => x2 && x2.error), + minThreshold + ); + + const keyResult = thresholdSame( + lookupPubKeys.map((x3) => x3 && normalizeLookUpResult(x3.result)), + minThreshold + ); + + const serverTimeOffsets: number[] = []; + if (keyResult || errorResult) { + const serverTimeOffset = keyResult ? calculateMedian(serverTimeOffsets) : 0; + return Promise.resolve({ keyResult, serverTimeOffset, errorResult }); + } + return Promise.reject( + new Error( + `invalid lookup result: ${JSON.stringify(lookupResults)} + )} for verifier: ${verifier}, verifierId: ${verifierId}` + ) + ); + }); + + return result; +}; export async function retrieveOrImportShare(params: { legacyMetadataHost: string; serverTimeOffset: number; @@ -212,15 +268,13 @@ export async function retrieveOrImportShare(params: { }, { useAPIKey: true } ); - const promiseArr = []; // generate temporary private and public key that is used to secure receive shares const sessionAuthKey = generatePrivate(); const pubKey = getPublic(sessionAuthKey).toString("hex"); - const pubKeyX = pubKey.slice(2, 66); - const pubKeyY = pubKey.slice(66); + const sessionPubX = pubKey.slice(2, 66); + const sessionPubY = pubKey.slice(66); let finalImportedShares: ImportedShare[] = []; - const threeFourthsThreshold = ~~((endpoints.length * 3) / 4) + 1; const halfThreshold = ~~(endpoints.length / 2) + 1; if (newImportedShares?.length > 0) { @@ -234,43 +288,128 @@ export async function retrieveOrImportShare(params: { const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey)); finalImportedShares = [...finalImportedShares, ...generatedShares]; } - - const tokenCommitment = keccak256(Buffer.from(idToken, "utf8")); - - // make commitment requests to endpoints - for (let i = 0; i < endpoints.length; i += 1) { - /* - CommitmentRequestParams struct { - MessagePrefix string `json:"messageprefix"` - TokenCommitment string `json:"tokencommitment"` - TempPubX string `json:"temppubx"` - TempPubY string `json:"temppuby"` - VerifierIdentifier string `json:"verifieridentifier"` - } - */ - const p = () => - post>( + let existingPubKey; + // can only import shares if override existing key is allowed or for new non dkg registration + if (finalImportedShares.length > 0) { + // in case not allowed to overide existing key for import request + // check if key exists + if (!overrideExistingKey) { + const keyLookupResult = await VerifierLookupRequest({ endpoints, verifier, verifierId: verifierParams.verifier_id, keyType }); + if (keyLookupResult.errorResult) { + throw new Error( + `node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}` + ); + } + if (keyLookupResult.keyResult?.keys?.length > 0) { + existingPubKey = keyLookupResult.keyResult.keys[0]; + } + } + // check if key exists + } + const promiseArrRequest = []; + + const canImportedShares = overrideExistingKey || (!useDkg && !existingPubKey); + if (canImportedShares) { + const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id); + const items: Record[] = []; + for (let i = 0; i < endpoints.length; i += 1) { + const importedShare = finalImportedShares[i]; + items.push({ + ...verifierParams, + idtoken: idToken, + verifieridentifier: verifier, + pub_key_x: importedShare.oauth_pub_key_x, + pub_key_y: importedShare.oauth_pub_key_y, + signing_pub_key_x: importedShare.signing_pub_key_x, + signing_pub_key_y: importedShare.signing_pub_key_y, + encrypted_share: importedShare.encrypted_share, + encrypted_share_metadata: importedShare.encrypted_share_metadata, + node_index: importedShare.node_index, + key_type: importedShare.key_type, + nonce_data: importedShare.nonce_data, + nonce_signature: importedShare.nonce_signature, + sss_endpoint: endpoints[i], + ...extraParams, + }); + } + const p = post>( + endpoints[proxyEndpointNum], + generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, { + encrypted: "yes", + use_temp: true, + verifieridentifier: verifier, + temppubx: sessionPubX, + temppuby: sessionPubY, + item: items, + key_type: keyType, + one_key_flow: true, + }), + {}, + { logTracingHeader: config.logRequestTracing } + ).catch((err) => log.error("share req", err)); + promiseArrRequest.push(p); + } else { + for (let i = 0; i < endpoints.length; i += 1) { + const p = post>( endpoints[i], - generateJsonRPCObject(JRPC_METHODS.COMMITMENT_REQUEST, { - messageprefix: "mug00", - keytype: keyType, - tokencommitment: tokenCommitment.slice(2), - temppubx: pubKeyX, - temppuby: pubKeyY, + generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, { + encrypted: "yes", + use_temp: true, + key_type: keyType, + distributed_metadata: true, verifieridentifier: verifier, - verifier_id: verifierParams.verifier_id, - extended_verifier_id: verifierParams.extended_verifier_id, - is_import_key_flow: true, + temppubx: sessionPubX, + temppuby: sessionPubY, + item: [ + { + ...verifierParams, + idtoken: idToken, + key_type: keyType, + verifieridentifier: verifier, + ...extraParams, + }, + ], + client_time: Math.floor(Date.now() / 1000).toString(), + one_key_flow: true, }), {}, { logTracingHeader: config.logRequestTracing } ); - const r = retryCommitment(p, 4); - promiseArr.push(r); + promiseArrRequest.push(p); + } } - // send share request once k + t number of commitment requests have completed - return Some, (void | JRPCResponse)[]>(promiseArr, (resultArr) => { - const completedRequests = resultArr.filter((x) => { + return Some< + void | JRPCResponse | JRPCResponse, + | { + privateKey: BN; + sessionTokenData: SessionToken[]; + thresholdNonceData: GetOrSetNonceResult; + thresholdPubKey: ExtendedPublicKey; + nodeIndexes: BN[]; + isNewKey: boolean; + serverTimeOffsetResponse?: number; + } + | undefined + >(promiseArrRequest, async (shareResponseResult, sharedState) => { + let thresholdNonceData: GetOrSetNonceResult; + let shareResponses: (void | JRPCResponse)[] = []; + // for import shares case, where result is an array + if (shareResponseResult.length === 1 && shareResponseResult[0] && Array.isArray(shareResponseResult[0].result)) { + // this is for import shares + const importedSharesResult = shareResponseResult[0]; + shareResponseResult[0].result.forEach((res) => { + shareResponses.push({ + id: importedSharesResult.id, + jsonrpc: "2.0", + result: res, + error: importedSharesResult.error, + }); + }); + } else { + shareResponses = shareResponseResult as (void | JRPCResponse)[]; + } + // check if threshold number of nodes have returned the same user public key + const completedRequests = shareResponses.filter((x) => { if (!x || typeof x !== "object") { return false; } @@ -279,461 +418,256 @@ export async function retrieveOrImportShare(params: { } return true; }); - - if (finalImportedShares.length > 0) { - // this is a optimization is for imported keys - // for new imported keys registration we need to wait for all nodes to agree on commitment - // for fetching existing imported keys we can rely on threshold nodes commitment - if (overrideExistingKey && completedRequests.length === endpoints.length) { - const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { - if (resp) { - return true; - } - return false; - }); - if (requiredNodeResult) { - return Promise.resolve(resultArr); - } - } else if (!overrideExistingKey && completedRequests.length >= threeFourthsThreshold) { - const nodeSigs: CommitmentRequestResult[] = []; - for (let i = 0; i < completedRequests.length; i += 1) { - const x = completedRequests[i]; - if (!x || typeof x !== "object" || x.error) { - continue; - } - if (x) nodeSigs.push((x as JRPCResponse).result); - } - const existingPubKey = thresholdSame( - nodeSigs.map((x) => x && x.pub_key_x), - halfThreshold - ); - const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id); - // for import shares, proxy node response is required. - // proxy node returns metadata. - // if user's account already - const requiredNodeIndex = indexes[proxyEndpointNum].toString(10); - - // if not a existing key we need to wait for nodes to agree on commitment - if (existingPubKey || (!existingPubKey && completedRequests.length === endpoints.length)) { - const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { - if (resp && resp.result?.nodeindex === requiredNodeIndex) { - return true; - } - return false; - }); - if (requiredNodeResult) { - return Promise.resolve(resultArr); - } - } - } - } else if (completedRequests.length >= threeFourthsThreshold) { - // this case is for dkg keys - const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { - if (resp) { - return true; - } - return false; - }); - if (requiredNodeResult) { - return Promise.resolve(resultArr); + const pubkeys = shareResponses.map((x) => { + if (x && x.result && x.result.keys[0].public_key) { + return x.result.keys[0].public_key; } + return undefined; + }); + + const thresholdPublicKey = thresholdSame(pubkeys, halfThreshold); + + if (!thresholdPublicKey) { + throw new Error("invalid result from nodes, threshold number of public key results are not matching"); } - return Promise.reject(new Error(`invalid commitment results ${JSON.stringify(resultArr)}`)); - }) - .then((responses) => { - const promiseArrRequest: Promise | JRPCResponse>[] = []; - const nodeSigs: CommitmentRequestResult[] = []; - for (let i = 0; i < responses.length; i += 1) { - const x = responses[i]; - if (!x || typeof x !== "object" || x.error) { - continue; + shareResponses.forEach((x) => { + const requiredShareResponse = x && x.result && x.result.keys[0].public_key && x.result.keys[0]; + if (requiredShareResponse && !thresholdNonceData && !verifierParams.extended_verifier_id) { + const currentPubKey = requiredShareResponse.public_key; + const pubNonce = (requiredShareResponse.nonce_data as v2NonceResultType)?.pubNonce?.x; + if (pubNonce && currentPubKey.X === thresholdPublicKey.X) { + thresholdNonceData = requiredShareResponse.nonce_data; } - if (x) nodeSigs.push((x as JRPCResponse).result); } + }); - // if user's account already - const existingPubKey = thresholdSame( - nodeSigs.map((x) => x && x.pub_key_x), - halfThreshold - ); - - // can only import shares if override existing key is allowed or for new non dkg registration - const canImportedShares = overrideExistingKey || (!useDkg && !existingPubKey); - if (canImportedShares) { - const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id); - const items: Record[] = []; - for (let i = 0; i < endpoints.length; i += 1) { - const x = responses[i]; - if (!x || typeof x !== "object" || x.error) { - continue; + const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold; + // optimistically run lagrange interpolation once threshold number of shares have been received + // this is matched against the user public key to ensure that shares are consistent + // Note: no need of thresholdMetadataNonce for extended_verifier_id key + if (completedRequests.length >= thresholdReqCount && thresholdPublicKey) { + const sharePromises: Promise[] = []; + const sessionTokenSigPromises: Promise[] = []; + const sessionTokenPromises: Promise[] = []; + const nodeIndexes: BN[] = []; + const sessionTokenData: SessionToken[] = []; + const isNewKeyResponses: { + isNewKey: string; + publicKey: string; + }[] = []; + const serverTimeOffsetResponses: string[] = []; + + for (let i = 0; i < completedRequests.length; i += 1) { + const currentShareResponse = completedRequests[i] as JRPCResponse; + const { + session_tokens: sessionTokens, + session_token_metadata: sessionTokenMetadata, + session_token_sigs: sessionTokenSigs, + session_token_sig_metadata: sessionTokenSigMetadata, + keys, + is_new_key: isNewKey, + server_time_offset: serverTimeOffsetResponse, + } = currentShareResponse.result; + + isNewKeyResponses.push({ + isNewKey, + publicKey: currentShareResponse.result?.keys[0]?.public_key?.X || "", + }); + serverTimeOffsetResponses.push(serverTimeOffsetResponse || "0"); + + if (sessionTokenSigs?.length > 0) { + // decrypt sessionSig if enc metadata is sent + if (sessionTokenSigMetadata && sessionTokenSigMetadata[0]?.ephemPublicKey) { + sessionTokenSigPromises.push( + decryptNodeData(sessionTokenSigMetadata[0], sessionTokenSigs[0], sessionAuthKey).catch((err) => + log.error("session sig decryption", err) + ) + ); + } else { + sessionTokenSigPromises.push(Promise.resolve(Buffer.from(sessionTokenSigs[0], "hex"))); } - const importedShare = finalImportedShares[i]; - items.push({ - ...verifierParams, - idtoken: idToken, - nodesignatures: nodeSigs, - verifieridentifier: verifier, - pub_key_x: importedShare.oauth_pub_key_x, - pub_key_y: importedShare.oauth_pub_key_y, - signing_pub_key_x: importedShare.signing_pub_key_x, - signing_pub_key_y: importedShare.signing_pub_key_y, - encrypted_share: importedShare.encrypted_share, - encrypted_share_metadata: importedShare.encrypted_share_metadata, - node_index: importedShare.node_index, - key_type: importedShare.key_type, - nonce_data: importedShare.nonce_data, - nonce_signature: importedShare.nonce_signature, - sss_endpoint: endpoints[i], - ...extraParams, - }); + } else { + sessionTokenSigPromises.push(Promise.resolve(undefined)); } - const p = post>( - endpoints[proxyEndpointNum], - generateJsonRPCObject(JRPC_METHODS.IMPORT_SHARES, { - encrypted: "yes", - use_temp: true, - item: items, - key_type: keyType, - one_key_flow: true, - }), - {}, - { logTracingHeader: config.logRequestTracing } - ).catch((err) => log.error("share req", err)); - promiseArrRequest.push(p); - } else { - for (let i = 0; i < endpoints.length; i += 1) { - const x = responses[i]; - if (!x || typeof x !== "object" || x.error) { - continue; + + if (sessionTokens?.length > 0) { + // decrypt session token if enc metadata is sent + if (sessionTokenMetadata && sessionTokenMetadata[0]?.ephemPublicKey) { + sessionTokenPromises.push( + decryptNodeData(sessionTokenMetadata[0], sessionTokens[0], sessionAuthKey).catch((err) => + log.error("session token sig decryption", err) + ) + ); + } else { + sessionTokenPromises.push(Promise.resolve(Buffer.from(sessionTokens[0], "base64"))); } - const p = post>( - endpoints[i], - generateJsonRPCObject(JRPC_METHODS.GET_SHARE_OR_KEY_ASSIGN, { - encrypted: "yes", - use_temp: true, - key_type: keyType, - distributed_metadata: true, - item: [ - { - ...verifierParams, - idtoken: idToken, - key_type: keyType, - nodesignatures: nodeSigs, - verifieridentifier: verifier, - ...extraParams, - }, - ], - client_time: Math.floor(Date.now() / 1000).toString(), - one_key_flow: true, - }), - {}, - { logTracingHeader: config.logRequestTracing } - ); - promiseArrRequest.push(p); + } else { + sessionTokenPromises.push(Promise.resolve(undefined)); } - } - let thresholdNonceData: GetOrSetNonceResult; - return Some< - void | JRPCResponse | JRPCResponse, - | { - privateKey: BN; - sessionTokenData: SessionToken[]; - thresholdNonceData: GetOrSetNonceResult; - thresholdPubKey: ExtendedPublicKey; - nodeIndexes: BN[]; - isNewKey: boolean; - serverTimeOffsetResponse?: number; + + if (keys?.length > 0) { + const latestKey = currentShareResponse.result.keys[0]; + nodeIndexes.push(new BN(latestKey.node_index)); + if (latestKey.share_metadata) { + sharePromises.push( + decryptNodeDataWithPadding(latestKey.share_metadata, Buffer.from(latestKey.share, "base64").toString("binary"), sessionAuthKey).catch( + (err) => log.error("share decryption", err) + ) + ); } - | undefined - >(promiseArrRequest, async (shareResponseResult, sharedState) => { - let shareResponses: (void | JRPCResponse)[] = []; - // for import shares case, where result is an array - if (shareResponseResult.length === 1 && shareResponseResult[0] && Array.isArray(shareResponseResult[0].result)) { - // this is for import shares - const importedSharesResult = shareResponseResult[0]; - shareResponseResult[0].result.forEach((res) => { - shareResponses.push({ - id: importedSharesResult.id, - jsonrpc: "2.0", - result: res, - error: importedSharesResult.error, - }); - }); } else { - shareResponses = shareResponseResult as (void | JRPCResponse)[]; + nodeIndexes.push(undefined); + sharePromises.push(Promise.resolve(undefined)); } - // check if threshold number of nodes have returned the same user public key - const completedRequests = shareResponses.filter((x) => { - if (!x || typeof x !== "object") { - return false; - } - if (x.error) { - return false; - } + } + const allPromises = await Promise.all(sharePromises.concat(sessionTokenSigPromises).concat(sessionTokenPromises)); + const sharesResolved = allPromises.slice(0, sharePromises.length); + const sessionSigsResolved = allPromises.slice(sharePromises.length, sharePromises.length + sessionTokenSigPromises.length); + const sessionTokensResolved = allPromises.slice(sharePromises.length + sessionTokenSigPromises.length, allPromises.length); + const validSigs = sessionSigsResolved.filter((sig) => { + if (sig) { return true; - }); - const pubkeys = shareResponses.map((x) => { - if (x && x.result && x.result.keys[0].public_key) { - return x.result.keys[0].public_key; - } - return undefined; - }); - - const thresholdPublicKey = thresholdSame(pubkeys, halfThreshold); - - if (!thresholdPublicKey) { - throw new Error("invalid result from nodes, threshold number of public key results are not matching"); } + return false; + }); - shareResponses.forEach((x) => { - const requiredShareResponse = x && x.result && x.result.keys[0].public_key && x.result.keys[0]; - if (requiredShareResponse && !thresholdNonceData && !verifierParams.extended_verifier_id) { - const currentPubKey = requiredShareResponse.public_key; - const pubNonce = (requiredShareResponse.nonce_data as v2NonceResultType)?.pubNonce?.x; - if (pubNonce && currentPubKey.X === thresholdPublicKey.X) { - thresholdNonceData = requiredShareResponse.nonce_data; - } - } - }); - - const thresholdReqCount = canImportedShares ? endpoints.length : halfThreshold; - // optimistically run lagrange interpolation once threshold number of shares have been received - // this is matched against the user public key to ensure that shares are consistent - // Note: no need of thresholdMetadataNonce for extended_verifier_id key - if (completedRequests.length >= thresholdReqCount && thresholdPublicKey) { - const sharePromises: Promise[] = []; - const sessionTokenSigPromises: Promise[] = []; - const sessionTokenPromises: Promise[] = []; - const nodeIndexes: BN[] = []; - const sessionTokenData: SessionToken[] = []; - const isNewKeyResponses: { - isNewKey: string; - publicKey: string; - }[] = []; - const serverTimeOffsetResponses: string[] = []; - - for (let i = 0; i < completedRequests.length; i += 1) { - const currentShareResponse = completedRequests[i] as JRPCResponse; - const { - session_tokens: sessionTokens, - session_token_metadata: sessionTokenMetadata, - session_token_sigs: sessionTokenSigs, - session_token_sig_metadata: sessionTokenSigMetadata, - keys, - is_new_key: isNewKey, - server_time_offset: serverTimeOffsetResponse, - } = currentShareResponse.result; - - isNewKeyResponses.push({ - isNewKey, - publicKey: currentShareResponse.result?.keys[0]?.public_key?.X || "", - }); - serverTimeOffsetResponses.push(serverTimeOffsetResponse || "0"); - - if (sessionTokenSigs?.length > 0) { - // decrypt sessionSig if enc metadata is sent - if (sessionTokenSigMetadata && sessionTokenSigMetadata[0]?.ephemPublicKey) { - sessionTokenSigPromises.push( - decryptNodeData(sessionTokenSigMetadata[0], sessionTokenSigs[0], sessionAuthKey).catch((err) => - log.error("session sig decryption", err) - ) - ); - } else { - sessionTokenSigPromises.push(Promise.resolve(Buffer.from(sessionTokenSigs[0], "hex"))); - } - } else { - sessionTokenSigPromises.push(Promise.resolve(undefined)); - } + if (!verifierParams.extended_verifier_id && validSigs.length < halfThreshold) { + throw new Error(`Insufficient number of signatures from nodes, required: ${halfThreshold}, found: ${validSigs.length}`); + } - if (sessionTokens?.length > 0) { - // decrypt session token if enc metadata is sent - if (sessionTokenMetadata && sessionTokenMetadata[0]?.ephemPublicKey) { - sessionTokenPromises.push( - decryptNodeData(sessionTokenMetadata[0], sessionTokens[0], sessionAuthKey).catch((err) => - log.error("session token sig decryption", err) - ) - ); - } else { - sessionTokenPromises.push(Promise.resolve(Buffer.from(sessionTokens[0], "base64"))); - } - } else { - sessionTokenPromises.push(Promise.resolve(undefined)); - } + const validTokens = sessionTokensResolved.filter((token) => { + if (token) { + return true; + } + return false; + }); - if (keys?.length > 0) { - const latestKey = currentShareResponse.result.keys[0]; - nodeIndexes.push(new BN(latestKey.node_index)); - if (latestKey.share_metadata) { - sharePromises.push( - decryptNodeDataWithPadding( - latestKey.share_metadata, - Buffer.from(latestKey.share, "base64").toString("binary"), - sessionAuthKey - ).catch((err) => log.error("share decryption", err)) - ); - } - } else { - nodeIndexes.push(undefined); - sharePromises.push(Promise.resolve(undefined)); - } - } - const allPromises = await Promise.all(sharePromises.concat(sessionTokenSigPromises).concat(sessionTokenPromises)); - const sharesResolved = allPromises.slice(0, sharePromises.length); - const sessionSigsResolved = allPromises.slice(sharePromises.length, sharePromises.length + sessionTokenSigPromises.length); - const sessionTokensResolved = allPromises.slice(sharePromises.length + sessionTokenSigPromises.length, allPromises.length); - const validSigs = sessionSigsResolved.filter((sig) => { - if (sig) { - return true; - } - return false; + if (!verifierParams.extended_verifier_id && validTokens.length < halfThreshold) { + throw new Error(`Insufficient number of session tokens from nodes, required: ${halfThreshold}, found: ${validTokens.length}`); + } + sessionTokensResolved.forEach((x, index) => { + if (!x || !sessionSigsResolved[index]) sessionTokenData.push(undefined); + else + sessionTokenData.push({ + token: x.toString("base64"), + signature: (sessionSigsResolved[index] as Buffer).toString("hex"), + node_pubx: (completedRequests[index] as JRPCResponse).result.node_pubx, + node_puby: (completedRequests[index] as JRPCResponse).result.node_puby, }); + }); - if (!verifierParams.extended_verifier_id && validSigs.length < halfThreshold) { - throw new Error(`Insufficient number of signatures from nodes, required: ${halfThreshold}, found: ${validSigs.length}`); - } - - const validTokens = sessionTokensResolved.filter((token) => { - if (token) { - return true; - } - return false; - }); + if (sharedState.resolved) return undefined; - if (!verifierParams.extended_verifier_id && validTokens.length < halfThreshold) { - throw new Error(`Insufficient number of session tokens from nodes, required: ${halfThreshold}, found: ${validTokens.length}`); + const decryptedShares = sharesResolved.reduce( + (acc, curr, index) => { + if (curr) { + acc.push({ index: nodeIndexes[index], value: new BN(curr) }); } - sessionTokensResolved.forEach((x, index) => { - if (!x || !sessionSigsResolved[index]) sessionTokenData.push(undefined); - else - sessionTokenData.push({ - token: x.toString("base64"), - signature: (sessionSigsResolved[index] as Buffer).toString("hex"), - node_pubx: (completedRequests[index] as JRPCResponse).result.node_pubx, - node_puby: (completedRequests[index] as JRPCResponse).result.node_puby, - }); - }); - - if (sharedState.resolved) return undefined; + return acc; + }, + [] as { index: BN; value: BN }[] + ); + // run lagrange interpolation on all subsets, faster in the optimistic scenario than berlekamp-welch due to early exit + const allCombis = kCombinations(decryptedShares.length, halfThreshold); + + let privateKey: BN | null = null; + for (let j = 0; j < allCombis.length; j += 1) { + const currentCombi = allCombis[j]; + const currentCombiShares = decryptedShares.filter((_, index) => currentCombi.includes(index)); + const shares = currentCombiShares.map((x) => x.value); + const indices = currentCombiShares.map((x) => x.index); + const derivedPrivateKey = lagrangeInterpolation(ecCurve, shares, indices); + if (!derivedPrivateKey) continue; + const decryptedPubKey = derivePubKey(ecCurve, derivedPrivateKey); + const decryptedPubKeyX = decryptedPubKey.getX(); + const decryptedPubKeyY = decryptedPubKey.getY(); + + if (decryptedPubKeyX.cmp(new BN(thresholdPublicKey.X, 16)) === 0 && decryptedPubKeyY.cmp(new BN(thresholdPublicKey.Y, 16)) === 0) { + privateKey = derivedPrivateKey; + break; + } + } - const decryptedShares = sharesResolved.reduce( - (acc, curr, index) => { - if (curr) { - acc.push({ index: nodeIndexes[index], value: new BN(curr) }); - } - return acc; - }, - [] as { index: BN; value: BN }[] - ); - // run lagrange interpolation on all subsets, faster in the optimistic scenario than berlekamp-welch due to early exit - const allCombis = kCombinations(decryptedShares.length, halfThreshold); - - let privateKey: BN | null = null; - for (let j = 0; j < allCombis.length; j += 1) { - const currentCombi = allCombis[j]; - const currentCombiShares = decryptedShares.filter((_, index) => currentCombi.includes(index)); - const shares = currentCombiShares.map((x) => x.value); - const indices = currentCombiShares.map((x) => x.index); - const derivedPrivateKey = lagrangeInterpolation(ecCurve, shares, indices); - if (!derivedPrivateKey) continue; - const decryptedPubKey = derivePubKey(ecCurve, derivedPrivateKey); - const decryptedPubKeyX = decryptedPubKey.getX(); - const decryptedPubKeyY = decryptedPubKey.getY(); - - if (decryptedPubKeyX.cmp(new BN(thresholdPublicKey.X, 16)) === 0 && decryptedPubKeyY.cmp(new BN(thresholdPublicKey.Y, 16)) === 0) { - privateKey = derivedPrivateKey; - break; - } - } + if (privateKey === undefined || privateKey === null) { + throw new Error("could not derive private key"); + } - if (privateKey === undefined || privateKey === null) { - throw new Error("could not derive private key"); - } + let isNewKey = false; + isNewKeyResponses.forEach((x) => { + if (x.isNewKey === "true" && x.publicKey.toLowerCase() === thresholdPublicKey.X.toLowerCase()) { + isNewKey = true; + } + }); - let isNewKey = false; - isNewKeyResponses.forEach((x) => { - if (x.isNewKey === "true" && x.publicKey.toLowerCase() === thresholdPublicKey.X.toLowerCase()) { - isNewKey = true; - } - }); + // Convert each string timestamp to a number + const serverOffsetTimes = serverTimeOffsetResponses.map((timestamp) => Number.parseInt(timestamp, 10)); - // Convert each string timestamp to a number - const serverOffsetTimes = serverTimeOffsetResponses.map((timestamp) => Number.parseInt(timestamp, 10)); - - return { - privateKey, - sessionTokenData, - thresholdNonceData, - nodeIndexes, - thresholdPubKey: thresholdPublicKey, - isNewKey, - serverTimeOffsetResponse: serverTimeOffset || calculateMedian(serverOffsetTimes), - }; - } - if (completedRequests.length < thresholdReqCount) { - throw new Error(`Waiting for results from more nodes, pending: ${thresholdReqCount - completedRequests.length}`); - } + return { + privateKey, + sessionTokenData, + thresholdNonceData, + nodeIndexes, + thresholdPubKey: thresholdPublicKey, + isNewKey, + serverTimeOffsetResponse: serverTimeOffset || calculateMedian(serverOffsetTimes), + }; + } + if (completedRequests.length < thresholdReqCount) { + throw new Error(`Waiting for results from more nodes, pending: ${thresholdReqCount - completedRequests.length}`); + } + throw new Error( + `Invalid results, threshold pub key: ${thresholdPublicKey}, nonce data found: ${!!thresholdNonceData}, extended verifierId: ${verifierParams.extended_verifier_id}` + ); + }).then(async (res) => { + const { privateKey, thresholdPubKey, sessionTokenData, nodeIndexes, thresholdNonceData, isNewKey, serverTimeOffsetResponse } = res; + let nonceResult = thresholdNonceData; + if (!privateKey) throw new Error("Invalid private key returned"); + + const oAuthKey = privateKey; + const oAuthPubKey = derivePubKey(ecCurve, oAuthKey); + const oAuthPubkeyX = oAuthPubKey.getX().toString("hex", 64); + const oAuthPubkeyY = oAuthPubKey.getY().toString("hex", 64); + + // if both thresholdNonceData and extended_verifier_id are not available + // then we need to throw other wise address would be incorrect. + if (!nonceResult && !verifierParams.extended_verifier_id && !LEGACY_NETWORKS_ROUTE_MAP[network as TORUS_LEGACY_NETWORK_TYPE]) { + // NOTE: dont use padded pub key anywhere in metadata apis, send pub keys as is received from nodes. + const metadataNonceResult = await getOrSetSapphireMetadataNonce(network, thresholdPubKey.X, thresholdPubKey.Y, serverTimeOffset, oAuthKey); + // rechecking nonceResult to avoid promise race condition. + if (metadataNonceResult && !thresholdNonceData) { + nonceResult = metadataNonceResult; + } else { throw new Error( - `Invalid results, threshold pub key: ${thresholdPublicKey}, nonce data found: ${!!thresholdNonceData}, extended verifierId: ${verifierParams.extended_verifier_id}` + `invalid metadata result from nodes, nonce metadata is empty for verifier: ${verifier} and verifierId: ${verifierParams.verifier_id}` ); - }); - }) - .then(async (res) => { - const { privateKey, thresholdPubKey, sessionTokenData, nodeIndexes, thresholdNonceData, isNewKey, serverTimeOffsetResponse } = res; - let nonceResult = thresholdNonceData; - if (!privateKey) throw new Error("Invalid private key returned"); - - const oAuthKey = privateKey; - const oAuthPubKey = derivePubKey(ecCurve, oAuthKey); - const oAuthPubkeyX = oAuthPubKey.getX().toString("hex", 64); - const oAuthPubkeyY = oAuthPubKey.getY().toString("hex", 64); - - // if both thresholdNonceData and extended_verifier_id are not available - // then we need to throw other wise address would be incorrect. - if (!nonceResult && !verifierParams.extended_verifier_id && !LEGACY_NETWORKS_ROUTE_MAP[network as TORUS_LEGACY_NETWORK_TYPE]) { - // NOTE: dont use padded pub key anywhere in metadata apis, send pub keys as is received from nodes. - const metadataNonceResult = await getOrSetSapphireMetadataNonce(network, thresholdPubKey.X, thresholdPubKey.Y, serverTimeOffset, oAuthKey); - // rechecking nonceResult to avoid promise race condition. - if (metadataNonceResult && !thresholdNonceData) { - nonceResult = metadataNonceResult; - } else { - throw new Error( - `invalid metadata result from nodes, nonce metadata is empty for verifier: ${verifier} and verifierId: ${verifierParams.verifier_id}` - ); - } } - let metadataNonce = new BN(nonceResult?.nonce ? nonceResult.nonce.padStart(64, "0") : "0", "hex"); - let finalPubKey: curve.base.BasePoint; - let pubNonce: { X: string; Y: string } | undefined; - let typeOfUser: UserType = "v1"; - // extended_verifier_id is only exception for torus-test-health verifier - // otherwise extended verifier id should not even return shares. - if (verifierParams.extended_verifier_id) { - typeOfUser = "v2"; - // for tss key no need to add pub nonce - finalPubKey = ecCurve.keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }).getPublic(); - } else if (LEGACY_NETWORKS_ROUTE_MAP[network as TORUS_LEGACY_NETWORK_TYPE]) { - if (enableOneKey) { - nonceResult = await getOrSetNonce(legacyMetadataHost, ecCurve, serverTimeOffsetResponse, oAuthPubkeyX, oAuthPubkeyY, oAuthKey, !isNewKey); - metadataNonce = new BN(nonceResult.nonce || "0", 16); - typeOfUser = nonceResult.typeOfUser; - if (typeOfUser === "v2") { - pubNonce = { X: (nonceResult as v2NonceResultType).pubNonce.x, Y: (nonceResult as v2NonceResultType).pubNonce.y }; - finalPubKey = ecCurve - .keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }) - .getPublic() - .add( - ecCurve - .keyFromPublic({ x: (nonceResult as v2NonceResultType).pubNonce.x, y: (nonceResult as v2NonceResultType).pubNonce.y }) - .getPublic() - ); - } else { - typeOfUser = "v1"; - // for imported keys in legacy networks - metadataNonce = await getMetadata(legacyMetadataHost, { pub_key_X: oAuthPubkeyX, pub_key_Y: oAuthPubkeyY }); - const privateKeyWithNonce = oAuthKey.add(metadataNonce).umod(ecCurve.n); - finalPubKey = ecCurve.keyFromPrivate(privateKeyWithNonce.toString(16, 64), "hex").getPublic(); - } + } + let metadataNonce = new BN(nonceResult?.nonce ? nonceResult.nonce.padStart(64, "0") : "0", "hex"); + let finalPubKey: curve.base.BasePoint; + let pubNonce: { X: string; Y: string } | undefined; + let typeOfUser: UserType = "v1"; + // extended_verifier_id is only exception for torus-test-health verifier + // otherwise extended verifier id should not even return shares. + if (verifierParams.extended_verifier_id) { + typeOfUser = "v2"; + // for tss key no need to add pub nonce + finalPubKey = ecCurve.keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }).getPublic(); + } else if (LEGACY_NETWORKS_ROUTE_MAP[network as TORUS_LEGACY_NETWORK_TYPE]) { + if (enableOneKey) { + nonceResult = await getOrSetNonce(legacyMetadataHost, ecCurve, serverTimeOffsetResponse, oAuthPubkeyX, oAuthPubkeyY, oAuthKey, !isNewKey); + metadataNonce = new BN(nonceResult.nonce || "0", 16); + typeOfUser = nonceResult.typeOfUser; + if (typeOfUser === "v2") { + pubNonce = { X: (nonceResult as v2NonceResultType).pubNonce.x, Y: (nonceResult as v2NonceResultType).pubNonce.y }; + finalPubKey = ecCurve + .keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }) + .getPublic() + .add( + ecCurve + .keyFromPublic({ x: (nonceResult as v2NonceResultType).pubNonce.x, y: (nonceResult as v2NonceResultType).pubNonce.y }) + .getPublic() + ); } else { typeOfUser = "v1"; // for imported keys in legacy networks @@ -742,96 +676,103 @@ export async function retrieveOrImportShare(params: { finalPubKey = ecCurve.keyFromPrivate(privateKeyWithNonce.toString(16, 64), "hex").getPublic(); } } else { - typeOfUser = "v2"; - finalPubKey = ecCurve - .keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }) - .getPublic() - .add( - ecCurve.keyFromPublic({ x: (nonceResult as v2NonceResultType).pubNonce.x, y: (nonceResult as v2NonceResultType).pubNonce.y }).getPublic() - ); - pubNonce = { X: (nonceResult as v2NonceResultType).pubNonce.x, Y: (nonceResult as v2NonceResultType).pubNonce.y }; + typeOfUser = "v1"; + // for imported keys in legacy networks + metadataNonce = await getMetadata(legacyMetadataHost, { pub_key_X: oAuthPubkeyX, pub_key_Y: oAuthPubkeyY }); + const privateKeyWithNonce = oAuthKey.add(metadataNonce).umod(ecCurve.n); + finalPubKey = ecCurve.keyFromPrivate(privateKeyWithNonce.toString(16, 64), "hex").getPublic(); } + } else { + typeOfUser = "v2"; + finalPubKey = ecCurve + .keyFromPublic({ x: oAuthPubkeyX, y: oAuthPubkeyY }) + .getPublic() + .add( + ecCurve.keyFromPublic({ x: (nonceResult as v2NonceResultType).pubNonce.x, y: (nonceResult as v2NonceResultType).pubNonce.y }).getPublic() + ); + pubNonce = { X: (nonceResult as v2NonceResultType).pubNonce.x, Y: (nonceResult as v2NonceResultType).pubNonce.y }; + } - if (!finalPubKey) { - throw new Error("Invalid public key, this might be a bug, please report this to web3auth team"); - } + if (!finalPubKey) { + throw new Error("Invalid public key, this might be a bug, please report this to web3auth team"); + } - let finalPrivKey = ""; // it is empty for v2 user upgraded to 2/n - let isUpgraded: boolean | null = false; - const oAuthKeyAddress = generateAddressFromPrivKey(keyType, oAuthKey); - // deriving address from pub key coz pubkey is always available - // but finalPrivKey won't be available for v2 user upgraded to 2/n - const finalWalletAddress = generateAddressFromPubKey(keyType, finalPubKey.getX(), finalPubKey.getY()); - let keyWithNonce = ""; - if (typeOfUser === "v1") { - isUpgraded = null; - } else if (typeOfUser === "v2") { - isUpgraded = metadataNonce.eq(new BN("0")); - } + let finalPrivKey = ""; // it is empty for v2 user upgraded to 2/n + let isUpgraded: boolean | null = false; + const oAuthKeyAddress = generateAddressFromPrivKey(keyType, oAuthKey); + // deriving address from pub key coz pubkey is always available + // but finalPrivKey won't be available for v2 user upgraded to 2/n + const finalWalletAddress = generateAddressFromPubKey(keyType, finalPubKey.getX(), finalPubKey.getY()); + let keyWithNonce = ""; + if (typeOfUser === "v1") { + isUpgraded = null; + } else if (typeOfUser === "v2") { + isUpgraded = metadataNonce.eq(new BN("0")); + } - if (typeOfUser === "v1" || (typeOfUser === "v2" && metadataNonce.gt(new BN(0)))) { - const privateKeyWithNonce = oAuthKey.add(metadataNonce).umod(ecCurve.n); - keyWithNonce = privateKeyWithNonce.toString("hex", 64); - } - if (keyType === KEY_TYPE.SECP256K1) { - finalPrivKey = keyWithNonce; - } else if (keyType === KEY_TYPE.ED25519) { - if (keyWithNonce && !nonceResult.seed) { - throw new Error("Invalid data, seed data is missing for ed25519 key, Please report this bug"); - } else if (keyWithNonce && nonceResult.seed) { - // console.log("nonceResult.seed", nonceResult.seed, keyWithNonce); - const decryptedSeed = await decryptSeedData(nonceResult.seed, new BN(keyWithNonce, "hex")); - finalPrivKey = decryptedSeed.toString("hex"); - } - } else { - throw new Error(`Invalid keyType: ${keyType}`); + if (typeOfUser === "v1" || (typeOfUser === "v2" && metadataNonce.gt(new BN(0)))) { + const privateKeyWithNonce = oAuthKey.add(metadataNonce).umod(ecCurve.n); + keyWithNonce = privateKeyWithNonce.toString("hex", 64); + } + if (keyType === KEY_TYPE.SECP256K1) { + finalPrivKey = keyWithNonce; + } else if (keyType === KEY_TYPE.ED25519) { + if (keyWithNonce && !nonceResult.seed) { + throw new Error("Invalid data, seed data is missing for ed25519 key, Please report this bug"); + } else if (keyWithNonce && nonceResult.seed) { + // console.log("nonceResult.seed", nonceResult.seed, keyWithNonce); + const decryptedSeed = await decryptSeedData(nonceResult.seed, new BN(keyWithNonce, "hex")); + finalPrivKey = decryptedSeed.toString("hex"); } + } else { + throw new Error(`Invalid keyType: ${keyType}`); + } - let postboxKey = oAuthKey; - let postboxPubX = oAuthPubkeyX; - let postboxPubY = oAuthPubkeyY; - if (keyType === KEY_TYPE.ED25519) { - const { scalar, point } = getSecpKeyFromEd25519(privateKey); - postboxKey = scalar; - postboxPubX = point.getX().toString(16, 64); - postboxPubY = point.getY().toString(16, 64); - if (thresholdPubKey.SignerX.padStart(64, "0") !== postboxPubX || thresholdPubKey.SignerY.padStart(64, "0") !== postboxPubY) { - throw new Error("Invalid postbox key"); - } + let postboxKey = oAuthKey; + let postboxPubX = oAuthPubkeyX; + let postboxPubY = oAuthPubkeyY; + if (keyType === KEY_TYPE.ED25519) { + const { scalar, point } = getSecpKeyFromEd25519(privateKey); + postboxKey = scalar; + postboxPubX = point.getX().toString(16, 64); + postboxPubY = point.getY().toString(16, 64); + if (thresholdPubKey.SignerX.padStart(64, "0") !== postboxPubX || thresholdPubKey.SignerY.padStart(64, "0") !== postboxPubY) { + throw new Error("Invalid postbox key"); } - // return reconstructed private key and ethereum address - return { - finalKeyData: { - walletAddress: finalWalletAddress, - X: finalPubKey.getX().toString(16, 64), // this is final pub x user before and after updating to 2/n - Y: finalPubKey.getY().toString(16, 64), // this is final pub y user before and after updating to 2/n - privKey: finalPrivKey, - }, - oAuthKeyData: { - walletAddress: oAuthKeyAddress, - X: oAuthPubkeyX, - Y: oAuthPubkeyY, - privKey: oAuthKey.toString("hex", 64), - }, - postboxKeyData: { - privKey: postboxKey.toString("hex", 64), - X: postboxPubX, - Y: postboxPubY, - }, - sessionData: { - sessionTokenData, - sessionAuthKey: sessionAuthKey.toString("hex").padStart(64, "0"), - }, - metadata: { - pubNonce, - nonce: metadataNonce, - typeOfUser, - upgraded: isUpgraded, - serverTimeOffset: serverTimeOffsetResponse, - }, - nodesData: { - nodeIndexes: nodeIndexes.map((x) => x.toNumber()), - }, - } as TorusKey; - }); + } + // return reconstructed private key and ethereum address + return { + finalKeyData: { + walletAddress: finalWalletAddress, + X: finalPubKey.getX().toString(16, 64), // this is final pub x user before and after updating to 2/n + Y: finalPubKey.getY().toString(16, 64), // this is final pub y user before and after updating to 2/n + privKey: finalPrivKey, + }, + oAuthKeyData: { + walletAddress: oAuthKeyAddress, + X: oAuthPubkeyX, + Y: oAuthPubkeyY, + privKey: oAuthKey.toString("hex", 64), + }, + postboxKeyData: { + privKey: postboxKey.toString("hex", 64), + X: postboxPubX, + Y: postboxPubY, + }, + sessionData: { + sessionTokenData, + sessionAuthKey: sessionAuthKey.toString("hex").padStart(64, "0"), + }, + metadata: { + pubNonce, + nonce: metadataNonce, + typeOfUser, + upgraded: isUpgraded, + serverTimeOffset: serverTimeOffsetResponse, + }, + nodesData: { + nodeIndexes: nodeIndexes.map((x) => x.toNumber()), + }, + } as TorusKey; + }); } diff --git a/src/interfaces.ts b/src/interfaces.ts index b55a5e8..bcf573c 100644 --- a/src/interfaces.ts +++ b/src/interfaces.ts @@ -57,7 +57,7 @@ export interface LegacyVerifierLookupResponse { server_time_offset?: string; } -export interface VerifierLookupResponse { +export interface GetORSetKeyResponse { keys: { pub_key_X: string; pub_key_Y: string; @@ -70,13 +70,15 @@ export interface VerifierLookupResponse { server_time_offset?: string; } -export interface CommitmentRequestResult { - signature: string; - data: string; - nodepubx: string; - nodepuby: string; - nodeindex: string; - pub_key_x: string; +export interface VerifierLookupResponse { + keys: { + pub_key_X: string; + pub_key_Y: string; + signing_pub_key_X?: string; + signing_pub_key_Y?: string; + address: string; + }[]; + server_time_offset?: string; } export interface JRPCResponse { @@ -91,12 +93,17 @@ export interface JRPCResponse { } export interface KeyLookupResult { - keyResult: Pick; + keyResult: Pick; nodeIndexes: number[]; serverTimeOffset: number; - errorResult: JRPCResponse["error"]; + errorResult: JRPCResponse["error"]; nonceResult?: GetOrSetNonceResult; } +export interface VerifierLookupResult { + keyResult: Pick; + serverTimeOffset: number; + errorResult: JRPCResponse["error"]; +} export type EciesHex = { [key in keyof Ecies]: string; diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index ccb7ab6..c39bea3 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -21,7 +21,7 @@ const TORUS_TEST_VERIFIER = "torus-test-health"; const TORUS_TEST_AGGREGATE_VERIFIER = "torus-test-health-aggregate"; const HashEnabledVerifier = "torus-test-verifierid-hash"; -describe("torus utils sapphire devnet", function () { +describe.only("torus utils sapphire devnet", function () { let torus: TorusUtils; let TORUS_NODE_MANAGER: NodeDetailManager; @@ -74,7 +74,7 @@ describe("torus utils sapphire devnet", function () { }); }); - it("should be able to login a v1 user", async function () { + it.only("should be able to login a v1 user", async function () { const email = "himanshu@tor.us"; const verifier = "google-lrc"; const token = generateIdToken(email, "ES256"); From 9ad5596da610ba00a3e64aa6a44b396683b39387 Mon Sep 17 00:00:00 2001 From: himanshu Date: Mon, 26 Aug 2024 15:12:54 +0530 Subject: [PATCH 2/9] fix test --- test/sapphire_devnet.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index c39bea3..3f324a2 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -74,7 +74,7 @@ describe.only("torus utils sapphire devnet", function () { }); }); - it.only("should be able to login a v1 user", async function () { + it("should be able to login a v1 user", async function () { const email = "himanshu@tor.us"; const verifier = "google-lrc"; const token = generateIdToken(email, "ES256"); From 8d999e59aeddf9e7ae12206e3860805b35929506 Mon Sep 17 00:00:00 2001 From: himanshu Date: Thu, 29 Aug 2024 09:52:21 +0530 Subject: [PATCH 3/9] temp test --- test/sapphire_devnet.test.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index 3f324a2..7a96e50 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -74,7 +74,7 @@ describe.only("torus utils sapphire devnet", function () { }); }); - it("should be able to login a v1 user", async function () { + it.only("should be able to login a v1 user", async function () { const email = "himanshu@tor.us"; const verifier = "google-lrc"; const token = generateIdToken(email, "ES256"); @@ -122,6 +122,11 @@ describe.only("torus utils sapphire devnet", function () { }, nodesData: retrieveSharesResponse.nodesData, }); + const retrieveSharesResponse1 = await legacyTorus.retrieveShares( + getRetrieveSharesParams(torusNodeEndpoints, torusIndexes, TORUS_TEST_VERIFIER, { verifier_id: email }, token, torusNodePub) + ); + // eslint-disable-next-line no-console + console.log("retrieveSharesResponse1", retrieveSharesResponse1); }); it("should fetch user type and public address of legacy v2 user", async function () { From f83417e0b41acdc682eab2a9e5cbf8bfaae8e76b Mon Sep 17 00:00:00 2001 From: himanshu Date: Thu, 5 Sep 2024 18:38:48 +0530 Subject: [PATCH 4/9] error check fixed for non commitment lookups --- src/helpers/nodeUtils.ts | 14 ++++++++------ test/sapphire_devnet.test.ts | 7 +------ test/testnet.test.ts | 2 +- 3 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/helpers/nodeUtils.ts b/src/helpers/nodeUtils.ts index 5a1e85f..cba9486 100644 --- a/src/helpers/nodeUtils.ts +++ b/src/helpers/nodeUtils.ts @@ -88,7 +88,7 @@ export const GetPubKeyOrKeyAssign = async (params: { }); const errorResult = thresholdSame( - lookupPubKeys.map((x2) => x2 && x2.error), + lookupResults.map((x2) => x2 && x2.error), minThreshold ); @@ -191,7 +191,7 @@ export const VerifierLookupRequest = async (params: { }); const errorResult = thresholdSame( - lookupPubKeys.map((x2) => x2 && x2.error), + lookupResults.map((x2) => x2 && x2.error), minThreshold ); @@ -283,19 +283,21 @@ export async function retrieveOrImportShare(params: { } finalImportedShares = newImportedShares; } else if (!useDkg) { - // TODO: why use getrandombytes here? const bufferKey = keyType === KEY_TYPE.SECP256K1 ? generatePrivateKey(ecCurve, Buffer) : await getRandomBytes(32); const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey)); finalImportedShares = [...finalImportedShares, ...generatedShares]; } let existingPubKey; - // can only import shares if override existing key is allowed or for new non dkg registration + // can only import new shares if override existing key is allowed or when doing a new non dkg registration if (finalImportedShares.length > 0) { - // in case not allowed to overide existing key for import request + // in case not allowed to override existing key for import request // check if key exists if (!overrideExistingKey) { const keyLookupResult = await VerifierLookupRequest({ endpoints, verifier, verifierId: verifierParams.verifier_id, keyType }); - if (keyLookupResult.errorResult) { + if ( + keyLookupResult.errorResult && + !(keyLookupResult.errorResult?.data as string)?.includes("Verifier + VerifierID has not yet been assigned") + ) { throw new Error( `node results do not match at first lookup ${JSON.stringify(keyLookupResult.keyResult || {})}, ${JSON.stringify(keyLookupResult.errorResult || {})}` ); diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index 7a96e50..3f324a2 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -74,7 +74,7 @@ describe.only("torus utils sapphire devnet", function () { }); }); - it.only("should be able to login a v1 user", async function () { + it("should be able to login a v1 user", async function () { const email = "himanshu@tor.us"; const verifier = "google-lrc"; const token = generateIdToken(email, "ES256"); @@ -122,11 +122,6 @@ describe.only("torus utils sapphire devnet", function () { }, nodesData: retrieveSharesResponse.nodesData, }); - const retrieveSharesResponse1 = await legacyTorus.retrieveShares( - getRetrieveSharesParams(torusNodeEndpoints, torusIndexes, TORUS_TEST_VERIFIER, { verifier_id: email }, token, torusNodePub) - ); - // eslint-disable-next-line no-console - console.log("retrieveSharesResponse1", retrieveSharesResponse1); }); it("should fetch user type and public address of legacy v2 user", async function () { diff --git a/test/testnet.test.ts b/test/testnet.test.ts index e495699..258a89e 100644 --- a/test/testnet.test.ts +++ b/test/testnet.test.ts @@ -14,7 +14,7 @@ const TORUS_TEST_EMAIL = "archit1@tor.us"; const TORUS_TEST_VERIFIER = "torus-test-health"; const TORUS_TEST_AGGREGATE_VERIFIER = "torus-test-health-aggregate"; -describe("torus utils migrated testnet on sapphire", function () { +describe.only("torus utils migrated testnet on sapphire", function () { let torus: TorusUtils; let TORUS_NODE_MANAGER: NodeDetailManager; From 3a842818b2b99db27930c47ad5a4e6de953ea9b2 Mon Sep 17 00:00:00 2001 From: himanshu Date: Tue, 24 Sep 2024 17:29:38 +0530 Subject: [PATCH 5/9] made commitment flow optional --- src/helpers/common.ts | 50 +++++++++- src/helpers/nodeUtils.ts | 188 +++++++++++++++++++++++++++++++++-- src/interfaces.ts | 10 ++ src/torus.ts | 16 ++- test/helpers.ts | 4 +- test/sapphire_devnet.test.ts | 55 ++++++++++ 6 files changed, 307 insertions(+), 16 deletions(-) diff --git a/src/helpers/common.ts b/src/helpers/common.ts index f2cd460..ae2b36e 100644 --- a/src/helpers/common.ts +++ b/src/helpers/common.ts @@ -1,11 +1,11 @@ -import { KEY_TYPE } from "@toruslabs/constants"; +import { JRPCResponse, KEY_TYPE } from "@toruslabs/constants"; import { Ecies } from "@toruslabs/eccrypto"; import { BN } from "bn.js"; import { ec as EC } from "elliptic"; import { keccak256 as keccakHash } from "ethereum-cryptography/keccak"; import JsonStringify from "json-stable-stringify"; -import { EciesHex, GetORSetKeyResponse, KeyType, VerifierLookupResponse } from "../interfaces"; +import { CommitmentRequestResult, EciesHex, GetORSetKeyResponse, KeyType, VerifierLookupResponse } from "../interfaces"; export function keccak256(a: Buffer): string { const hash = Buffer.from(keccakHash(a)).toString("hex"); @@ -160,3 +160,49 @@ export function waitFor(milliseconds: number) { } }); } + +export function retryCommitment(executionPromise: () => Promise>, maxRetries: number) { + // Notice that we declare an inner function here + // so we can encapsulate the retries and don't expose + // it to the caller. This is also a recursive function + async function retryWithBackoff(retries: number) { + try { + // we don't wait on the first attempt + if (retries > 0) { + // on every retry, we exponentially increase the time to wait. + // Here is how it looks for a `maxRetries` = 4 + // (2 ** 1) * 100 = 200 ms + // (2 ** 2) * 100 = 400 ms + // (2 ** 3) * 100 = 800 ms + const timeToWait = 2 ** retries * 100; + await waitFor(timeToWait); + } + const a = await executionPromise(); + return a; + } catch (e: unknown) { + const errorMsg = (e as Error).message; + const acceptedErrorMsgs = [ + // Slow node + "Timed out", + "Failed to fetch", + "fetch failed", + "Load failed", + "cancelled", + "NetworkError when attempting to fetch resource.", + // Happens when the node is not reachable (dns issue etc) + "TypeError: Failed to fetch", // All except iOS and Firefox + "TypeError: cancelled", // iOS + "TypeError: NetworkError when attempting to fetch resource.", // Firefox + ]; + + if (retries < maxRetries && (acceptedErrorMsgs.includes(errorMsg) || (errorMsg && errorMsg.includes("reason: getaddrinfo EAI_AGAIN")))) { + // only retry if we didn't reach the limit + // otherwise, let the caller handle the error + return retryWithBackoff(retries + 1); + } + throw e; + } + } + + return retryWithBackoff(0); +} diff --git a/src/helpers/nodeUtils.ts b/src/helpers/nodeUtils.ts index cba9486..feb291b 100644 --- a/src/helpers/nodeUtils.ts +++ b/src/helpers/nodeUtils.ts @@ -8,6 +8,7 @@ import { getRandomBytes } from "ethereum-cryptography/random"; import { config } from "../config"; import { JRPC_METHODS } from "../constants"; import { + CommitmentRequestResult, ExtendedPublicKey, GetORSetKeyResponse, GetOrSetNonceResult, @@ -33,8 +34,10 @@ import { generatePrivateKey, getProxyCoordinatorEndpointIndex, kCombinations, + keccak256, normalizeKeysResult, normalizeLookUpResult, + retryCommitment, thresholdSame, } from "./common"; import { derivePubKey, generateAddressFromPrivKey, generateAddressFromPubKey, generateShares } from "./keyUtils"; @@ -215,6 +218,136 @@ export const VerifierLookupRequest = async (params: { return result; }; + +const commitmentRequest = async (params: { + idToken: string; + endpoints: string[]; + indexes: number[]; + keyType: KeyType; + verifier: string; + verifierParams: VerifierParams; + pubKeyX: string; + pubKeyY: string; + finalImportedShares: ImportedShare[]; + overrideExistingKey: boolean; +}): Promise<(void | JRPCResponse)[]> => { + const { idToken, endpoints, indexes, keyType, verifier, verifierParams, pubKeyX, pubKeyY, finalImportedShares, overrideExistingKey } = params; + const tokenCommitment = keccak256(Buffer.from(idToken, "utf8")); + const threeFourthsThreshold = ~~((endpoints.length * 3) / 4) + 1; + const halfThreshold = ~~(endpoints.length / 2) + 1; + + const promiseArr: Promise>[] = []; + // make commitment requests to endpoints + for (let i = 0; i < endpoints.length; i += 1) { + /* + CommitmentRequestParams struct { + MessagePrefix string `json:"messageprefix"` + TokenCommitment string `json:"tokencommitment"` + TempPubX string `json:"temppubx"` + TempPubY string `json:"temppuby"` + VerifierIdentifier string `json:"verifieridentifier"` + } + */ + const p = () => + post>( + endpoints[i], + generateJsonRPCObject(JRPC_METHODS.COMMITMENT_REQUEST, { + messageprefix: "mug00", + keytype: keyType, + tokencommitment: tokenCommitment.slice(2), + temppubx: pubKeyX, + temppuby: pubKeyY, + verifieridentifier: verifier, + verifier_id: verifierParams.verifier_id, + extended_verifier_id: verifierParams.extended_verifier_id, + is_import_key_flow: true, + }), + {}, + { logTracingHeader: config.logRequestTracing } + ); + const r = retryCommitment(p, 4); + promiseArr.push(r); + } + return new Promise<(void | JRPCResponse)[]>((resolve, reject) => { + // send share request once k + t number of commitment requests have completed + Some, (void | JRPCResponse)[]>(promiseArr, (resultArr) => { + const completedRequests = resultArr.filter((x) => { + if (!x || typeof x !== "object") { + return false; + } + if (x.error) { + return false; + } + return true; + }); + + if (finalImportedShares.length > 0) { + // this is a optimization is for imported keys + // for new imported keys registration we need to wait for all nodes to agree on commitment + // for fetching existing imported keys we can rely on threshold nodes commitment + if (overrideExistingKey && completedRequests.length === endpoints.length) { + const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { + if (resp) { + return true; + } + return false; + }); + if (requiredNodeResult) { + return Promise.resolve(resultArr); + } + } else if (!overrideExistingKey && completedRequests.length >= threeFourthsThreshold) { + const nodeSigs: CommitmentRequestResult[] = []; + for (let i = 0; i < completedRequests.length; i += 1) { + const x = completedRequests[i]; + if (!x || typeof x !== "object" || x.error) { + continue; + } + if (x) nodeSigs.push((x as JRPCResponse).result); + } + const existingPubKey = thresholdSame( + nodeSigs.map((x) => x && x.pub_key_x), + halfThreshold + ); + const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id); + // for import shares, proxy node response is required. + // proxy node returns metadata. + // if user's account already + const requiredNodeIndex = indexes[proxyEndpointNum].toString(10); + + // if not a existing key we need to wait for nodes to agree on commitment + if (existingPubKey || (!existingPubKey && completedRequests.length === endpoints.length)) { + const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { + if (resp && resp.result?.nodeindex === requiredNodeIndex) { + return true; + } + return false; + }); + if (requiredNodeResult) { + return Promise.resolve(resultArr); + } + } + } + } else if (completedRequests.length >= threeFourthsThreshold) { + // this case is for dkg keys + const requiredNodeResult = completedRequests.find((resp: void | JRPCResponse) => { + if (resp) { + return true; + } + return false; + }); + if (requiredNodeResult) { + return Promise.resolve(resultArr); + } + } + + return Promise.reject(new Error(`invalid commitment results ${JSON.stringify(resultArr)}`)); + }) + .then((resultArr: (void | JRPCResponse)[]) => { + return resolve(resultArr); + }) + .catch(reject); + }); +}; export async function retrieveOrImportShare(params: { legacyMetadataHost: string; serverTimeOffset: number; @@ -232,8 +365,9 @@ export async function retrieveOrImportShare(params: { useDkg: boolean; overrideExistingKey: boolean; nodePubkeys: INodePub[]; - newImportedShares?: ImportedShare[]; extraParams: TorusUtilsExtraParams; + newImportedShares?: ImportedShare[]; + checkCommitment?: boolean; }): Promise { const { legacyMetadataHost, @@ -254,6 +388,7 @@ export async function retrieveOrImportShare(params: { extraParams, useDkg = true, serverTimeOffset, + checkCommitment = true, } = params; await get( allowHost, @@ -287,9 +422,36 @@ export async function retrieveOrImportShare(params: { const generatedShares = await generateShares(ecCurve, keyType, serverTimeOffset, indexes, nodePubkeys, Buffer.from(bufferKey)); finalImportedShares = [...finalImportedShares, ...generatedShares]; } - let existingPubKey; - // can only import new shares if override existing key is allowed or when doing a new non dkg registration - if (finalImportedShares.length > 0) { + + let commitmentRequestResult: (void | JRPCResponse)[] = []; + let isExistingKey: boolean; + const nodeSigs: CommitmentRequestResult[] = []; + if (checkCommitment) { + commitmentRequestResult = await commitmentRequest({ + idToken, + endpoints, + indexes, + keyType, + verifier, + verifierParams, + pubKeyX: sessionPubX, + pubKeyY: sessionPubY, + finalImportedShares, + overrideExistingKey, + }); + for (let i = 0; i < commitmentRequestResult.length; i += 1) { + const x = commitmentRequestResult[i]; + if (!x || typeof x !== "object" || x.error) { + continue; + } + if (x) nodeSigs.push((x as JRPCResponse).result); + } + // if user's account already + isExistingKey = !!thresholdSame( + nodeSigs.map((x) => x && x.pub_key_x), + halfThreshold + ); + } else if (!checkCommitment && finalImportedShares.length > 0) { // in case not allowed to override existing key for import request // check if key exists if (!overrideExistingKey) { @@ -303,22 +465,25 @@ export async function retrieveOrImportShare(params: { ); } if (keyLookupResult.keyResult?.keys?.length > 0) { - existingPubKey = keyLookupResult.keyResult.keys[0]; + isExistingKey = !!keyLookupResult.keyResult.keys[0]; } } - // check if key exists } const promiseArrRequest = []; - const canImportedShares = overrideExistingKey || (!useDkg && !existingPubKey); + const canImportedShares = overrideExistingKey || (!useDkg && !isExistingKey); if (canImportedShares) { const proxyEndpointNum = getProxyCoordinatorEndpointIndex(endpoints, verifier, verifierParams.verifier_id); const items: Record[] = []; for (let i = 0; i < endpoints.length; i += 1) { const importedShare = finalImportedShares[i]; + if (!importedShare) { + throw new Error(`invalid imported share at index ${i}`); + } items.push({ ...verifierParams, idtoken: idToken, + nodesignatures: nodeSigs, verifieridentifier: verifier, pub_key_x: importedShare.oauth_pub_key_x, pub_key_y: importedShare.oauth_pub_key_y, @@ -340,8 +505,8 @@ export async function retrieveOrImportShare(params: { encrypted: "yes", use_temp: true, verifieridentifier: verifier, - temppubx: sessionPubX, - temppuby: sessionPubY, + temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "", // send session pub key x only if node signatures are not available (Ie. in non commitment flow) + temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "", // send session pub key y only if node signatures are not available (Ie. in non commitment flow) item: items, key_type: keyType, one_key_flow: true, @@ -360,13 +525,14 @@ export async function retrieveOrImportShare(params: { key_type: keyType, distributed_metadata: true, verifieridentifier: verifier, - temppubx: sessionPubX, - temppuby: sessionPubY, + temppubx: nodeSigs.length === 0 && !checkCommitment ? sessionPubX : "", // send session pub key x only if node signatures are not available (Ie. in non commitment flow) + temppuby: nodeSigs.length === 0 && !checkCommitment ? sessionPubY : "", // send session pub key y only if node signatures are not available (Ie. in non commitment flow) item: [ { ...verifierParams, idtoken: idToken, key_type: keyType, + nodesignatures: nodeSigs, verifieridentifier: verifier, ...extraParams, }, diff --git a/src/interfaces.ts b/src/interfaces.ts index bcf573c..9e9e2d2 100644 --- a/src/interfaces.ts +++ b/src/interfaces.ts @@ -81,6 +81,14 @@ export interface VerifierLookupResponse { server_time_offset?: string; } +export interface CommitmentRequestResult { + signature: string; + data: string; + nodepubx: string; + nodepuby: string; + nodeindex: string; + pub_key_x: string; +} export interface JRPCResponse { id: number; jsonrpc: "2.0"; @@ -272,6 +280,7 @@ export interface ImportKeyParams { idToken: string; newPrivateKey: string; extraParams?: TorusUtilsExtraParams; + checkCommitment?: boolean; } export interface RetrieveSharesParams { @@ -283,4 +292,5 @@ export interface RetrieveSharesParams { nodePubkeys: INodePub[]; extraParams?: TorusUtilsExtraParams; useDkg?: boolean; + checkCommitment?: boolean; } diff --git a/src/torus.ts b/src/torus.ts index 08fe50c..e56bae1 100644 --- a/src/torus.ts +++ b/src/torus.ts @@ -112,7 +112,7 @@ class Torus { } async retrieveShares(params: RetrieveSharesParams): Promise { - const { verifier, verifierParams, idToken, nodePubkeys, indexes, endpoints, useDkg, extraParams = {} } = params; + const { verifier, verifierParams, idToken, nodePubkeys, indexes, endpoints, useDkg, extraParams = {}, checkCommitment = true } = params; if (nodePubkeys.length === 0) { throw new Error("nodePubkeys param is required"); } @@ -164,6 +164,7 @@ class Torus { overrideExistingKey: false, nodePubkeys, extraParams, + checkCommitment, }); } @@ -177,7 +178,17 @@ class Torus { } async importPrivateKey(params: ImportKeyParams): Promise { - const { nodeIndexes, newPrivateKey, verifier, verifierParams, idToken, nodePubkeys, endpoints, extraParams = {} } = params; + const { + nodeIndexes, + newPrivateKey, + verifier, + verifierParams, + idToken, + nodePubkeys, + endpoints, + extraParams = {}, + checkCommitment = true, + } = params; if (LEGACY_NETWORKS_ROUTE_MAP[this.network as TORUS_LEGACY_NETWORK_TYPE]) { throw new Error(`importPrivateKey is not supported by legacy network; ${this.network}`); @@ -236,6 +247,7 @@ class Torus { newImportedShares: sharesData, nodePubkeys, extraParams, + checkCommitment, }); } diff --git a/test/helpers.ts b/test/helpers.ts index a196b8a..9acd687 100644 --- a/test/helpers.ts +++ b/test/helpers.ts @@ -53,7 +53,8 @@ export const getRetrieveSharesParams = ( idToken: string, nodePubkeys: INodePub[], extraParams: TorusUtilsExtraParams = {}, - useDkg?: boolean + useDkg?: boolean, + checkCommitment = true ): RetrieveSharesParams => { return { endpoints, @@ -64,6 +65,7 @@ export const getRetrieveSharesParams = ( nodePubkeys, extraParams, useDkg, + checkCommitment, }; }; diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index 3f324a2..8e0ee6e 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -367,6 +367,61 @@ describe.only("torus utils sapphire devnet", function () { }); }); + it("should be able to login without commitments", async function () { + const token = generateIdToken(TORUS_TEST_EMAIL, "ES256"); + const nodeDetails = await TORUS_NODE_MANAGER.getNodeDetails({ verifier: TORUS_TEST_VERIFIER, verifierId: TORUS_TEST_EMAIL }); + const torusNodeEndpoints = nodeDetails.torusNodeSSSEndpoints; + const result = await torus.retrieveShares( + getRetrieveSharesParams( + torusNodeEndpoints, + nodeDetails.torusIndexes, + TORUS_TEST_VERIFIER, + { verifier_id: TORUS_TEST_EMAIL }, + token, + nodeDetails.torusNodePub, + {}, + true, + false + ) + ); + expect(result.metadata.serverTimeOffset).lessThan(20); + delete result.metadata.serverTimeOffset; + + expect(result).eql({ + finalKeyData: { + walletAddress: "0x462A8BF111A55C9354425F875F89B22678c0Bc44", + X: "36e257717f746cdd52ba85f24f7c9040db8977d3b0354de70ed43689d24fa1b1", + Y: "58ec9768c2fe871b3e2a83cdbcf37ba6a88ad19ec2f6e16a66231732713fd507", + privKey: "230dad9f42039569e891e6b066ff5258b14e9764ef5176d74aeb594d1a744203", + }, + oAuthKeyData: { + walletAddress: "0x137B3607958562D03Eb3C6086392D1eFa01aA6aa", + X: "118a674da0c68f16a1123de9611ba655f4db1e336fe1b2d746028d65d22a3c6b", + Y: "8325432b3a3418d632b4fe93db094d6d83250eea60fe512897c0ad548737f8a5", + privKey: "6b3c872a269aa8994a5acc8cdd70ea3d8d182d42f8af421c0c39ea124e9b66fa", + }, + postboxKeyData: { + X: "118a674da0c68f16a1123de9611ba655f4db1e336fe1b2d746028d65d22a3c6b", + Y: "8325432b3a3418d632b4fe93db094d6d83250eea60fe512897c0ad548737f8a5", + privKey: "6b3c872a269aa8994a5acc8cdd70ea3d8d182d42f8af421c0c39ea124e9b66fa", + }, + sessionData: { + sessionTokenData: result.sessionData.sessionTokenData, + sessionAuthKey: result.sessionData.sessionAuthKey, + }, + metadata: { + pubNonce: { + X: "5d03a0df9b3db067d3363733df134598d42873bb4730298a53ee100975d703cc", + Y: "279434dcf0ff22f077877a70bcad1732412f853c96f02505547f7ca002b133ed", + }, + nonce: new BN("b7d126751b68ecd09e371a23898e6819dee54708a5ead4f6fe83cdc79c0f1c4a", "hex"), + typeOfUser: "v2", + upgraded: false, + }, + nodesData: result.nodesData, + }); + }); + it("should be able to login with non dkg keys", async function () { const email = `atomicimporttest2`; const token = generateIdToken(email, "ES256"); From 09a5c8dbd09b2d4224c9abdc3f0a82d37f99070e Mon Sep 17 00:00:00 2001 From: himanshu Date: Fri, 4 Oct 2024 09:32:22 +0530 Subject: [PATCH 6/9] add more tessts --- test/cyan.test.ts | 46 ++++++++++++++++++ test/sapphire_devnet.test.ts | 2 +- test/sapphire_mainnet.test.ts | 87 +++++++++++++++++++++++++++++++++++ test/testnet.test.ts | 2 +- 4 files changed, 135 insertions(+), 2 deletions(-) diff --git a/test/cyan.test.ts b/test/cyan.test.ts index 10aa432..3114a34 100644 --- a/test/cyan.test.ts +++ b/test/cyan.test.ts @@ -207,6 +207,52 @@ describe("torus utils cyan", function () { }); }); + it("should be able to login without commitments", async function () { + const token = generateIdToken(TORUS_TEST_EMAIL, "ES256"); + const verifierDetails = { verifier: TORUS_TEST_VERIFIER, verifierId: TORUS_TEST_EMAIL }; + const { torusNodeEndpoints, torusIndexes, torusNodePub } = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + const result = await torus.retrieveShares( + getRetrieveSharesParams( + torusNodeEndpoints, + torusIndexes, + TORUS_TEST_VERIFIER, + { verifier_id: TORUS_TEST_EMAIL }, + token, + torusNodePub, + {}, + true, + false + ) + ); + delete result.sessionData; + expect(result.metadata.serverTimeOffset).lessThan(20); + + delete result.metadata.serverTimeOffset; + + expect(result.finalKeyData.privKey).to.be.equal("5db51619684b32a2ff2375b4c03459d936179dfba401cb1c176b621e8a2e4ac8"); + expect(result).eql({ + finalKeyData: { + X: "e2ed6033951af2851d1bea98799e62fb1ff24b952c1faea17922684678ba42d1", + Y: "beef0efad88e81385952c0068ca48e8b9c2121be87cb0ddf18a68806db202359", + walletAddress: "0xC615aA03Dd8C9b2dc6F7c43cBDfF2c34bBa47Ec9", + privKey: "5db51619684b32a2ff2375b4c03459d936179dfba401cb1c176b621e8a2e4ac8", + }, + oAuthKeyData: { + X: "e2ed6033951af2851d1bea98799e62fb1ff24b952c1faea17922684678ba42d1", + Y: "beef0efad88e81385952c0068ca48e8b9c2121be87cb0ddf18a68806db202359", + walletAddress: "0xC615aA03Dd8C9b2dc6F7c43cBDfF2c34bBa47Ec9", + privKey: "5db51619684b32a2ff2375b4c03459d936179dfba401cb1c176b621e8a2e4ac8", + }, + postboxKeyData: { + X: "e2ed6033951af2851d1bea98799e62fb1ff24b952c1faea17922684678ba42d1", + Y: "beef0efad88e81385952c0068ca48e8b9c2121be87cb0ddf18a68806db202359", + privKey: "5db51619684b32a2ff2375b4c03459d936179dfba401cb1c176b621e8a2e4ac8", + }, + metadata: { pubNonce: undefined, nonce: new BN(0), typeOfUser: "v1", upgraded: null }, + nodesData: result.nodesData, + }); + }); + it("should be able to aggregate login", async function () { const idToken = generateIdToken(TORUS_TEST_EMAIL, "ES256"); const hashedIdToken = keccak256(Buffer.from(idToken, "utf8")); diff --git a/test/sapphire_devnet.test.ts b/test/sapphire_devnet.test.ts index 8e0ee6e..8aa8764 100644 --- a/test/sapphire_devnet.test.ts +++ b/test/sapphire_devnet.test.ts @@ -21,7 +21,7 @@ const TORUS_TEST_VERIFIER = "torus-test-health"; const TORUS_TEST_AGGREGATE_VERIFIER = "torus-test-health-aggregate"; const HashEnabledVerifier = "torus-test-verifierid-hash"; -describe.only("torus utils sapphire devnet", function () { +describe("torus utils sapphire devnet", function () { let torus: TorusUtils; let TORUS_NODE_MANAGER: NodeDetailManager; diff --git a/test/sapphire_mainnet.test.ts b/test/sapphire_mainnet.test.ts index 0fdade6..5c2e834 100644 --- a/test/sapphire_mainnet.test.ts +++ b/test/sapphire_mainnet.test.ts @@ -337,6 +337,60 @@ describe("torus utils sapphire mainnet", function () { }); }); + it("should be able to login without commitments", async function () { + const token = generateIdToken(TORUS_TEST_EMAIL, "ES256"); + const verifierDetails = { verifier: TORUS_TEST_VERIFIER, verifierId: TORUS_TEST_EMAIL }; + const { torusNodeEndpoints, torusIndexes, torusNodePub } = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + const result = await torus.retrieveShares( + getRetrieveSharesParams( + torusNodeEndpoints, + torusIndexes, + TORUS_TEST_VERIFIER, + { verifier_id: TORUS_TEST_EMAIL }, + token, + torusNodePub, + {}, + true, + false + ) + ); + expect(result.finalKeyData.privKey).to.be.equal("dfb39b84e0c64b8c44605151bf8670ae6eda232056265434729b6a8a50fa3419"); + expect(result.metadata.serverTimeOffset).lessThan(20); + + delete result.metadata.serverTimeOffset; + + expect(result).eql({ + finalKeyData: { + walletAddress: "0x70520A7F04868ACad901683699Fa32765C9F6871", + X: "adff099b5d3b1e238b43fba1643cfa486e8d9e8de22c1e6731d06a5303f9025b", + Y: "21060328e7889afd303acb63201b6493e3061057d1d81279931ab4a6cabf94d4", + privKey: "dfb39b84e0c64b8c44605151bf8670ae6eda232056265434729b6a8a50fa3419", + }, + oAuthKeyData: { + walletAddress: "0x925c97404F1aBdf4A8085B93edC7B9F0CEB3C673", + X: "5cd8625fc01c7f7863a58c914a8c43b2833b3d0d5059350bab4acf6f4766a33d", + Y: "198a4989615c5c2c7fa4d49c076ea7765743d09816bb998acb9ff54f5db4a391", + privKey: "90a219ac78273e82e36eaa57c15f9070195e436644319d6b9aea422bb4d31906", + }, + postboxKeyData: { + X: "5cd8625fc01c7f7863a58c914a8c43b2833b3d0d5059350bab4acf6f4766a33d", + Y: "198a4989615c5c2c7fa4d49c076ea7765743d09816bb998acb9ff54f5db4a391", + privKey: "90a219ac78273e82e36eaa57c15f9070195e436644319d6b9aea422bb4d31906", + }, + sessionData: { sessionTokenData: result.sessionData.sessionTokenData, sessionAuthKey: result.sessionData.sessionAuthKey }, + metadata: { + pubNonce: { + X: "ab4d287c263ab1bb83c37646d0279764e50fe4b0c34de4da113657866ddcf318", + Y: "ad35db2679dfad4b62d77cf753d7b98f73c902e5d101cc2c3c1209ece6d94382", + }, + nonce: new BN("4f1181d8689f0d0960f1a6f9fe26e03e557bdfba11f4b6c8d7b1285e9c271b13", "hex"), + typeOfUser: "v2", + upgraded: false, + }, + nodesData: result.nodesData, + }); + }); + it("should be able to aggregate login", async function () { const email = faker.internet.email(); const idToken = generateIdToken(email, "ES256"); @@ -367,6 +421,39 @@ describe("torus utils sapphire mainnet", function () { expect(result.metadata.upgraded).to.equal(false); }); + it("should be able to aggregate login without commitment", async function () { + const email = faker.internet.email(); + const idToken = generateIdToken(email, "ES256"); + const hashedIdToken = keccak256(Buffer.from(idToken, "utf8")); + const verifierDetails = { verifier: TORUS_TEST_AGGREGATE_VERIFIER, verifierId: email }; + + const nodeDetails = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + const torusNodeEndpoints = nodeDetails.torusNodeSSSEndpoints; + const result = await torus.retrieveShares( + getRetrieveSharesParams( + torusNodeEndpoints, + nodeDetails.torusIndexes, + TORUS_TEST_AGGREGATE_VERIFIER, + { + verify_params: [{ verifier_id: email, idtoken: idToken }], + sub_verifier_ids: [TORUS_TEST_VERIFIER], + verifier_id: email, + }, + hashedIdToken.substring(2), + nodeDetails.torusNodePub, + {}, + true, + false + ) + ); + expect(result.finalKeyData.walletAddress).to.not.equal(null); + expect(result.finalKeyData.walletAddress).to.not.equal(""); + expect(result.oAuthKeyData.walletAddress).to.not.equal(null); + expect(result.metadata.typeOfUser).to.equal("v2"); + expect(result.metadata.nonce).to.not.equal(null); + expect(result.metadata.upgraded).to.equal(false); + }); + it("should be able to update the `sessionTime` of the token signature data", async function () { const email = faker.internet.email(); const token = generateIdToken(TORUS_TEST_EMAIL, "ES256"); diff --git a/test/testnet.test.ts b/test/testnet.test.ts index 258a89e..e495699 100644 --- a/test/testnet.test.ts +++ b/test/testnet.test.ts @@ -14,7 +14,7 @@ const TORUS_TEST_EMAIL = "archit1@tor.us"; const TORUS_TEST_VERIFIER = "torus-test-health"; const TORUS_TEST_AGGREGATE_VERIFIER = "torus-test-health-aggregate"; -describe.only("torus utils migrated testnet on sapphire", function () { +describe("torus utils migrated testnet on sapphire", function () { let torus: TorusUtils; let TORUS_NODE_MANAGER: NodeDetailManager; From 3d58fd5640d342fc8b2e740641bb34f63827f4ca Mon Sep 17 00:00:00 2001 From: himanshu Date: Fri, 4 Oct 2024 09:36:56 +0530 Subject: [PATCH 7/9] Release 15.1.0-0 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index fd00cf9..833f199 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@toruslabs/torus.js", - "version": "15.0.5", + "version": "15.1.0-0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@toruslabs/torus.js", - "version": "15.0.5", + "version": "15.1.0-0", "license": "MIT", "dependencies": { "@toruslabs/bs58": "^1.0.0", diff --git a/package.json b/package.json index c5d92a3..4cdf5c9 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@toruslabs/torus.js", - "version": "15.0.5", + "version": "15.1.0-0", "description": "Handle communication with torus nodes", "main": "dist/lib.cjs/index.js", "module": "dist/lib.esm/index.js", From d2a6da45195edea5c5af27e0e64ca1eb20a6f5e7 Mon Sep 17 00:00:00 2001 From: himanshu Date: Fri, 4 Oct 2024 14:03:58 +0530 Subject: [PATCH 8/9] added helper function for get or set tss dkg pub key --- src/helpers/index.ts | 1 + src/helpers/tssPubKeyUtils.ts | 110 ++++++++++++++++++++++++++++++++++ test/tssPubKey.test.ts | 88 +++++++++++++++++++++++++++ 3 files changed, 199 insertions(+) create mode 100644 src/helpers/tssPubKeyUtils.ts create mode 100644 test/tssPubKey.test.ts diff --git a/src/helpers/index.ts b/src/helpers/index.ts index 4296e6a..92a0751 100644 --- a/src/helpers/index.ts +++ b/src/helpers/index.ts @@ -4,3 +4,4 @@ export * from "./keyUtils"; export * from "./langrangeInterpolatePoly"; export * from "./metadataUtils"; export * from "./nodeUtils"; +export * from "./tssPubKeyUtils"; diff --git a/src/helpers/tssPubKeyUtils.ts b/src/helpers/tssPubKeyUtils.ts new file mode 100644 index 0000000..a88920a --- /dev/null +++ b/src/helpers/tssPubKeyUtils.ts @@ -0,0 +1,110 @@ +// Note: Endpoints should be the sss node endpoints along with path +import { JRPCResponse } from "@toruslabs/constants"; +import { generateJsonRPCObject, post } from "@toruslabs/http-helpers"; +import log from "loglevel"; + +import { GetORSetKeyResponse } from "../interfaces"; +import { Some } from "../some"; +import { normalizeKeysResult, thresholdSame } from "./common"; + +// for ex: [https://node-1.node.web3auth.io/sss/jrpc, https://node-2.node.web3auth.io/sss/jrpc ....] +export const GetOrSetTssDKGPubKey = async (params: { + endpoints: string[]; + verifier: string; + verifierId: string; + tssVerifierId: string; +}): Promise<{ + key: { + pubKeyX: string; + pubKeyY: string; + address: string; + createdAt?: number; + }; + isNewKey: boolean; + nodeIndexes: number[]; +}> => { + const { endpoints, verifier, verifierId, tssVerifierId } = params; + const minThreshold = ~~(endpoints.length / 2) + 1; + const lookupPromises = endpoints.map((x) => + post>( + x, + generateJsonRPCObject("GetPubKeyOrKeyAssign", { + distributed_metadata: true, + verifier, + verifier_id: verifierId, + extended_verifier_id: tssVerifierId, + one_key_flow: true, + key_type: "secp256k1", + fetch_node_index: true, + client_time: Math.floor(Date.now() / 1000).toString(), + }), + {}, + { + logTracingHeader: false, + } + ).catch((err) => log.error(`GetPubKeyOrKeyAssign request failed`, err)) + ); + + const nodeIndexes: number[] = []; + const result = await Some< + void | JRPCResponse, + { + keyResult: Pick; + nodeIndexes: number[]; + errorResult: JRPCResponse["error"]; + } + >(lookupPromises, async (lookupResults) => { + const lookupPubKeys = lookupResults.filter((x1) => { + if (x1 && !x1.error) { + return x1; + } + return false; + }); + + const errorResult = thresholdSame( + lookupResults.map((x2) => x2 && x2.error), + minThreshold + ); + + const keyResult = thresholdSame( + lookupPubKeys.map((x3) => x3 && normalizeKeysResult(x3.result)), + minThreshold + ); + + if (keyResult || errorResult) { + if (keyResult) { + lookupResults.forEach((x1) => { + if (x1 && x1.result) { + const currentNodePubKey = x1.result.keys[0].pub_key_X.toLowerCase(); + const thresholdPubKey = keyResult.keys[0].pub_key_X.toLowerCase(); + // push only those indexes for nodes who are returning pub key matching with threshold pub key. + // this check is important when different nodes have different keys assigned to a user. + if (currentNodePubKey === thresholdPubKey) { + const nodeIndex = Number.parseInt(x1.result.node_index); + if (nodeIndex) nodeIndexes.push(nodeIndex); + } + } + }); + } + + return Promise.resolve({ keyResult, nodeIndexes, errorResult }); + } + return Promise.reject(new Error(`invalid public key result: ${JSON.stringify(lookupResults)} for tssVerifierId: ${tssVerifierId} `)); + }); + + if (result.errorResult) { + throw new Error(`invalid public key result,errorResult: ${JSON.stringify(result.errorResult)}`); + } + + const key = result.keyResult.keys[0]; + return { + key: { + pubKeyX: key.pub_key_X, + pubKeyY: key.pub_key_Y, + address: key.address, + createdAt: key.created_at, + }, + nodeIndexes: result.nodeIndexes, + isNewKey: result.keyResult.is_new_key, + }; +}; diff --git a/test/tssPubKey.test.ts b/test/tssPubKey.test.ts new file mode 100644 index 0000000..dd70d8b --- /dev/null +++ b/test/tssPubKey.test.ts @@ -0,0 +1,88 @@ +import { TORUS_SAPPHIRE_NETWORK } from "@toruslabs/constants"; +import { NodeDetailManager } from "@toruslabs/fetch-node-details"; +import { expect } from "chai"; +import faker from "faker"; + +import { GetOrSetTssDKGPubKey } from "../src"; + +describe("setTssKey", function () { + const TORUS_EXTENDED_VERIFIER_EMAIL = "testextenderverifierid@example.com"; + const TORUS_TEST_VERIFIER = "torus-test-health"; + + let TORUS_NODE_MANAGER: NodeDetailManager; + + beforeEach("one time execution before all tests", async function () { + TORUS_NODE_MANAGER = new NodeDetailManager({ network: TORUS_SAPPHIRE_NETWORK.SAPPHIRE_DEVNET }); + }); + + it("should assign key to tss verifier id", async function () { + const email = faker.internet.email(); + const nonce = 0; + const tssTag = "default"; + const tssVerifierId = `${email}\u0015${tssTag}\u0016${nonce}`; + const verifierDetails = { verifier: TORUS_TEST_VERIFIER, verifierId: email }; + + const { torusNodeSSSEndpoints: torusNodeEndpoints } = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + + const result = await GetOrSetTssDKGPubKey({ + endpoints: torusNodeEndpoints, + verifier: TORUS_TEST_VERIFIER, + verifierId: email, + tssVerifierId, + }); + expect(result.key.pubKeyX).to.not.equal(null); + }); + + it("should fetch pub address of tss verifier id", async function () { + const email = TORUS_EXTENDED_VERIFIER_EMAIL; + const nonce = 0; + const tssTag = "default"; + const tssVerifierId = `${email}\u0015${tssTag}\u0016${nonce}`; + const verifierDetails = { verifier: TORUS_TEST_VERIFIER, verifierId: email }; + + const { torusNodeSSSEndpoints: torusNodeEndpoints } = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + + const result = await GetOrSetTssDKGPubKey({ + endpoints: torusNodeEndpoints, + verifier: TORUS_TEST_VERIFIER, + verifierId: email, + tssVerifierId, + }); + delete result.key.createdAt; + expect(result).eql({ + key: { + pubKeyX: "d45d4ad45ec643f9eccd9090c0a2c753b1c991e361388e769c0dfa90c210348c", + pubKeyY: "fdc151b136aa7df94e97cc7d7007e2b45873c4b0656147ec70aad46e178bce1e", + address: "0xBd6Bc8aDC5f2A0526078Fd2016C4335f64eD3a30", + }, + isNewKey: false, + nodeIndexes: result.nodeIndexes, + }); + }); + + it("should fail if more than one endpoints are invalid", async function () { + const email = TORUS_EXTENDED_VERIFIER_EMAIL; + const nonce = 0; + const tssTag = "default"; + const tssVerifierId = `${email}\u0015${tssTag}\u0016${nonce}`; + const verifierDetails = { verifier: TORUS_TEST_VERIFIER, verifierId: email }; + + const { torusNodeSSSEndpoints: torusNodeEndpoints } = await TORUS_NODE_MANAGER.getNodeDetails(verifierDetails); + torusNodeEndpoints[2] = "https://invalid.torus.com"; + torusNodeEndpoints[3] = "https://invalid.torus.com"; + torusNodeEndpoints[4] = "https://invalid.torus.com"; + try { + await GetOrSetTssDKGPubKey({ + endpoints: torusNodeEndpoints, + verifier: TORUS_TEST_VERIFIER, + verifierId: email, + tssVerifierId, + }); + // If the function doesn't throw an error, fail the test + expect.fail("Expected an error to be thrown"); + } catch (error) { + // Test passes if an error is thrown + expect(error).to.be.instanceOf(Error); + } + }); +}); From c701538250e2abb1d863ea99db55ccdb7f580673 Mon Sep 17 00:00:00 2001 From: himanshu Date: Fri, 4 Oct 2024 14:05:42 +0530 Subject: [PATCH 9/9] cleanup --- src/helpers/tssPubKeyUtils.ts | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/helpers/tssPubKeyUtils.ts b/src/helpers/tssPubKeyUtils.ts index a88920a..dca9bb9 100644 --- a/src/helpers/tssPubKeyUtils.ts +++ b/src/helpers/tssPubKeyUtils.ts @@ -1,9 +1,10 @@ // Note: Endpoints should be the sss node endpoints along with path -import { JRPCResponse } from "@toruslabs/constants"; +import { JRPCResponse, KEY_TYPE } from "@toruslabs/constants"; import { generateJsonRPCObject, post } from "@toruslabs/http-helpers"; import log from "loglevel"; -import { GetORSetKeyResponse } from "../interfaces"; +import { JRPC_METHODS } from "../constants"; +import { GetORSetKeyResponse, KeyType } from "../interfaces"; import { Some } from "../some"; import { normalizeKeysResult, thresholdSame } from "./common"; @@ -13,6 +14,7 @@ export const GetOrSetTssDKGPubKey = async (params: { verifier: string; verifierId: string; tssVerifierId: string; + keyType?: KeyType; }): Promise<{ key: { pubKeyX: string; @@ -23,18 +25,18 @@ export const GetOrSetTssDKGPubKey = async (params: { isNewKey: boolean; nodeIndexes: number[]; }> => { - const { endpoints, verifier, verifierId, tssVerifierId } = params; + const { endpoints, verifier, verifierId, tssVerifierId, keyType = KEY_TYPE.SECP256K1 } = params; const minThreshold = ~~(endpoints.length / 2) + 1; const lookupPromises = endpoints.map((x) => post>( x, - generateJsonRPCObject("GetPubKeyOrKeyAssign", { + generateJsonRPCObject(JRPC_METHODS.GET_OR_SET_KEY, { distributed_metadata: true, verifier, verifier_id: verifierId, extended_verifier_id: tssVerifierId, one_key_flow: true, - key_type: "secp256k1", + key_type: keyType, fetch_node_index: true, client_time: Math.floor(Date.now() / 1000).toString(), }), @@ -42,7 +44,7 @@ export const GetOrSetTssDKGPubKey = async (params: { { logTracingHeader: false, } - ).catch((err) => log.error(`GetPubKeyOrKeyAssign request failed`, err)) + ).catch((err) => log.error(`${JRPC_METHODS.GET_OR_SET_KEY} request failed`, err)) ); const nodeIndexes: number[] = [];