Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/fix 73 #380

Merged
merged 4 commits into from
Apr 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions main/process-change-l2-block-utils.zkasm
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
; Computes L1InfoTree leaf value computedL1InfoTreeData = keccak(gerL1InfoTree, blockHashL1InfoTree, timestampL1InfoTree)
; @out label: computedL1InfoTreeData
computeL1InfoTreeValue:
%MAX_CNT_KECCAK_F - CNT_KECCAK_F - 1 :JMPN(outOfCountersKeccak)

Expand Down Expand Up @@ -30,6 +31,7 @@ computeNewCurrentL1InfoRootFromHistoric:

; compute "new" currentL1InfoRoot from computed merkle root, currentL1InfoTreeRoot = keccak(computedMerkleRoot, computedL1InfoTreeData)
; @in C: computed merkle root
; @out label: currentL1InfoTreeRoot
computeNewCurrentL1InfoRoot:
%MAX_CNT_KECCAK_F - CNT_KECCAK_F - 1 :JMPN(outOfCountersKeccak)
$ => E :MLOAD(nextHashKId)
Expand Down
25 changes: 14 additions & 11 deletions main/process-change-l2-block.zkasm
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ continueProcessChangeL2Block:
$ :MLOAD(isForced), JMPZ(verifyTimestampAndL1InfoRoot)

; forced batch
; - update timestamp only if currentTimestamp < limitTimestamp
; - update timestamp only if currentTimestamp < forcedTimestamp
; - set blockHash to default
$ => C :MLOAD(forcedBlockHashL1InfoTree)
C :MSTORE(blockHashL1InfoTree)
Expand All @@ -77,26 +77,29 @@ verifyTimestampAndL1InfoRoot:
A - $ - 1 :F_MLOAD(currentL1InfoTreeIndex), JMPN(invalidL1InfoTreeIndex)
${getL1InfoGER(mem.indexL1InfoTree)} :MSTORE(gerL1InfoTree)
${getL1InfoBlockHash(mem.indexL1InfoTree)} :MSTORE(blockHashL1InfoTree)
${getL1InfoMinTimestamp(mem.indexL1InfoTree)} => B :MSTORE(timestampL1InfoTree)
; Verify (currentTimestamp + deltaTimestamp) >= l1InfoRoot.minTimestamp
$ => A :MLOAD(timestamp)
$ :LT, JMPC(invalidChangeL2BlockMinTimestamp)
${getL1InfoMinTimestamp(mem.indexL1InfoTree)} :MSTORE(timestampL1InfoTree)

; Compute infoTreeData
:CALL(computeL1InfoTreeValue)
:CALL(computeL1InfoTreeValue) ; out: [@label: computedL1InfoTreeData]
$ :MLOAD(currentL1InfoTreeIndex), JMPNZ(previousL1InfoTreeIndexIsNotZero)
; if previous L1 Info TreeIndex is zero, currentL1InfoTreeRoot = keccak(HistoricRoot, blockHashL1InfoTree)
:CALL(computeNewCurrentL1InfoRootFromHistoric)
$ => A :MLOAD(indexL1InfoTree)
; Update currentL1InfoTreeIndex
A :MSTORE(currentL1InfoTreeIndex), JMP(initSetGERL1InfoTree)
:JMP(updateCurrentL1InfoTreeIndex)

previousL1InfoTreeIndexIsNotZero:
; if previous L1 Info TreeIndex is NOT zero, currentL1InfoTreeRoot = keccak(computedMerkleRoot, blockHashL1InfoTree)
:CALL(computeMerkleProof) ; out: [C: computed merkle root]
; Compute newL1InfoTreeRoot
:CALL(computeNewCurrentL1InfoRoot)
:CALL(computeNewCurrentL1InfoRoot) ; in: [C: computed merkle root], out: [@label: currentL1InfoTreeRoot]
updateCurrentL1InfoTreeIndex:
; Update currentL1InfoTreeIndex
$ => A :MLOAD(indexL1InfoTree)
A :MSTORE(currentL1InfoTreeIndex), JMP(initSetGERL1InfoTree)
A :MSTORE(currentL1InfoTreeIndex)
; Verify (currentTimestamp + deltaTimestamp) >= l1InfoRoot.minTimestamp
; This verification is done after updating the currentL1InfoTreeIndex and the currentL1InfoTreeRoot to correctly set batch outputs at execution finalization
$ => A :MLOAD(timestamp)
$ => B :MLOAD(timestampL1InfoTree)
$ :LT, JMPC(invalidChangeL2BlockMinTimestamp, initSetGERL1InfoTree)

setNewTimestamp:
; Set forcedTimestamp (now new Timestamp) from forced batch data
Expand Down
2 changes: 1 addition & 1 deletion main/utils.zkasm
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ __MSTOREX_afterSave:

; E === 32 * RR + A (RCX)
; E - 32 * RR === A
; secure: E < MAX_MEM_EXPASION_BYTES < 32 bits, RR < 32 bits, 32*RR < 37 bits, all < 38 bits
; secure: E < MAX_MEM_EXPANSION_BYTES < 32 bits, RR < 32 bits, 32*RR < 37 bits, all < 38 bits
E - 32 * RR :ASSERT

RCX + %MEM_ALIGN_LEN * C + %MEM_ALIGN_LEFT_ALIGNMENT => C :JMP_EQ(%MEM_ALIGN_LEN * 32 + %MEM_ALIGN_LEFT_ALIGNMENT, __MSTORE32_offset0_len32)
Expand Down
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,9 @@
"yargs": "^17.5.1"
},
"devDependencies": {
"@0xpolygonhermez/zkevm-commonjs": "github:0xPolygonHermez/zkevm-commonjs#v7.0.0-rc.1-fork.10",
"@0xpolygonhermez/zkevm-proverjs": "github:0xPolygonHermez/zkevm-proverjs#feature/renaming",
"@0xpolygonhermez/zkevm-testvectors": "github:0xPolygonHermez/zkevm-testvectors#v7.0.0-rc.1-fork.10",
"@0xpolygonhermez/zkevm-commonjs": "github:0xPolygonHermez/zkevm-commonjs#develop-feijoa",
"@0xpolygonhermez/zkevm-proverjs": "github:0xPolygonHermez/zkevm-proverjs#develop-feijoa",
"@0xpolygonhermez/zkevm-testvectors": "github:0xPolygonHermez/zkevm-testvectors#develop-feijoa",
"chai": "^4.3.6",
"chalk": "^3.0.0",
"eslint": "^8.25.0",
Expand Down
5 changes: 3 additions & 2 deletions tools/parallel-testing/gen-parallel-tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,14 +25,15 @@ async function genTestsFiles() {
fs.mkdirSync(testsFolder);
}
for (const inputPath of inputs) {
const name = inputPath.split('/').slice(-1)[0].replace('json', 'test.js');
const fileName = path.basename(inputPath, '.json');
const folderName = path.basename(path.dirname(inputPath));
const sample = fs.readFileSync(sampleDir, 'utf-8');
let test = sample.replace('%%INPUT_PATH%%', `${inputPath}`);
// Replace skip vcounters flag
if (argv.skipVCounters) {
test = test.replace('%%SKIP_VCOUNTERS%%', 'yes');
}
fs.writeFileSync(`${testsFolder}/${name}`, test);
fs.writeFileSync(`${testsFolder}/${folderName}__${fileName}.test.js`, test);
}
expect(true).to.be.equal(true);
}
Expand Down
Loading