From 4e88e9535adaacbd4cbc4f0a214a6594bda59268 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 21 Oct 2024 04:18:23 -0400 Subject: [PATCH 1/3] perf recording A bunch of patches to record some of the node perf data at runtime. disable some reports, mkdir before writing create the dirs so we can write there adding error correction fixing the collecting of output to after the unit tests finish lets try to capture the results fixing the job update dirs now listing the files and running different profs adding more test adding error checking to the copy looking for isolate removing extra move fixing the directories adding in a matrix of profile tests adding node version to matrix rename the artifact to include the node version moving count to 32 perf splitting the keccak test file into parts by the sizes because so larger fixme: refactor to reduce duplicate code and use parameters reduce matrix size all branchs Refactoring output directory We want the output directories to contain the branch name so we can merge results easily. also using shellcheck https://github.com/mschuett/yaml-shellcheck.git on the yaml. fixing variable names --- .github/workflows/auto-merge-main-to-v2.yml | 7 +- .github/workflows/benchmarks.yml | 8 +- .github/workflows/build-action.yml | 99 +++++-- .github/workflows/changelog-entry.yml | 9 +- .github/workflows/doc.yml | 7 +- .github/workflows/live-tests.yml | 12 +- .github/workflows/pkg-pr-new-publish.yml | 6 +- .github/workflows/release.yml | 4 +- .gitignore | 52 ++++ run | 5 +- src/bindings | 2 +- ...k.unit-test.ts => keccak-256.unit-test.ts} | 4 +- src/lib/provable/test/keccak-384.unit-test.ts | 274 ++++++++++++++++++ src/lib/provable/test/keccak-512.unit-test.ts | 273 +++++++++++++++++ 14 files changed, 713 insertions(+), 49 deletions(-) rename src/lib/provable/test/{keccak.unit-test.ts => keccak-256.unit-test.ts} (99%) create mode 100644 src/lib/provable/test/keccak-384.unit-test.ts create mode 100644 src/lib/provable/test/keccak-512.unit-test.ts diff --git a/.github/workflows/auto-merge-main-to-v2.yml b/.github/workflows/auto-merge-main-to-v2.yml index 0e171bd355..75f03a49e5 100644 --- a/.github/workflows/auto-merge-main-to-v2.yml +++ b/.github/workflows/auto-merge-main-to-v2.yml @@ -13,9 +13,10 @@ name: Auto-merge main to v2 on: - push: - branches: - - main + workflow_dispatch: +# push: +# branches: +# - main jobs: auto-merge: diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index ec1a4d969d..aaaf1720fa 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -1,9 +1,9 @@ name: Benchmark o1js on: - push: - branches: - - main - - develop +# push: +# branches: +# - main +# - develop pull_request: workflow_dispatch: {} diff --git a/.github/workflows/build-action.yml b/.github/workflows/build-action.yml index 9d4b1a98d3..62c3e0640d 100644 --- a/.github/workflows/build-action.yml +++ b/.github/workflows/build-action.yml @@ -1,9 +1,12 @@ name: Build o1js on: push: - branches: - - main - - develop + #branches: + # - feature/perf + # - perf-recording + # - main + # - develop + # - * pull_request: workflow_dispatch: {} @@ -12,7 +15,7 @@ jobs: runs-on: ubuntu-latest outputs: test_count: ${{ steps.count_tests.outputs.test_count }} - chunk_count: 8 # This is hardcoded to 8, but it can be changed to any number. + chunk_count: 32 # This is hardcoded to 8, but it can be changed to any number. steps: - name: Checkout repository with submodules uses: actions/checkout@v4 @@ -44,7 +47,7 @@ jobs: id: count_tests run: | TEST_COUNT=$(find ./dist/node -name "*.unit-test.js" | wc -l) - echo "test_count=${TEST_COUNT}" >> $GITHUB_OUTPUT + echo "test_count=${TEST_COUNT}" >> "$GITHUB_OUTPUT" echo "Total test count: ${TEST_COUNT}" - name: Cache repository @@ -103,8 +106,8 @@ jobs: - name: Add to job summary if: always() run: | - echo "### Test Results for ${{ matrix.test_type }}" >> $GITHUB_STEP_SUMMARY - cat profiling.md >> $GITHUB_STEP_SUMMARY + echo "### Test Results for ${{ matrix.test_type }}" >> "$GITHUB_STEP_SUMMARY" + cat profiling.md >> "$GITHUB_STEP_SUMMARY" Run-Unit-Tests: needs: Prepare @@ -114,7 +117,13 @@ jobs: strategy: fail-fast: false matrix: - chunk: [1, 2, 3, 4, 5, 6, 7, 8] + chunk: [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32] + perf: [ + #prof, heap, cpu, + all] + node_version: [18 + #,20,22 + ] steps: - name: Restore repository uses: actions/cache@v4 @@ -125,7 +134,8 @@ jobs: - name: Setup Node uses: actions/setup-node@v4 with: - node-version: '18' + # FIXME change to use matrix + node-version: ${{ matrix.node_version }} - name: Restore cache uses: actions/cache@v4 @@ -139,16 +149,32 @@ jobs: - name: Prepare for tests run: touch profiling.md + # from https://stackoverflow.com/questions/75985925/how-to-replace-slashes-with-dashes-and-set-it-an-environment-variable-in-github + - name: Sets MODIFIED_BRANCH_NAME + env: + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} + run: | + MODIFIED_BRANCH_NAME=${BRANCH_NAME/\//-} + OUTPUT_DIR="profile/profile-data/${MODIFIED_BRANCH_NAME}/" + OUTPUT_TEST_DIR="${OUTPUT_DIR}/profile-data-${{matrix.chunk}}-${{ matrix.perf }}-${{ matrix.node_version }}" + + echo "MODIFIED_BRANCH_NAME=${MODIFIED_BRANCH_NAME}" >> "$GITHUB_ENV" + echo "OUTPUT_DIR=${OUTPUT_DIR}" >> "$GITHUB_ENV" + echo "OUTPUT_TEST_DIR=${OUTPUT_TEST_DIR}" >> "$GITHUB_ENV" + + - name: create dir + run: mkdir -p ${{env.OUTPUT_TEST_DIR}} + - name: Run unit tests env: TOTAL_TESTS: ${{ needs.Prepare.outputs.test_count }} CHUNK: ${{ matrix.chunk }} - CHUNKS: 8 + CHUNKS: 32 run: | echo "Total tests: $TOTAL_TESTS" echo "Current chunk: $CHUNK" - echo "Total chunks: $CHUNKS" - + echo "Total chunks: $CHUNKS" + if [ -z "$TOTAL_TESTS" ] || [ "$TOTAL_TESTS" -eq 0 ]; then echo "Error: TOTAL_TESTS is not set or is zero. Exiting." exit 1 @@ -162,26 +188,57 @@ jobs: shopt -s globstar test_files=(./dist/node/**/*.unit-test.js) - set -o pipefail + #set -o pipefail for ((i=start_index; i> $GITHUB_STEP_SUMMARY - cat profiling.md >> $GITHUB_STEP_SUMMARY + echo "### Test Results for Unit Tests Chunk ${{ matrix.chunk }}" >> "$GITHUB_STEP_SUMMARY" + cat profiling.md >> "$GITHUB_STEP_SUMMARY" Build-And-Test-Server-Unit-Tests: name: Build-And-Test-Server (Unit tests) @@ -229,8 +286,7 @@ jobs: npm run e2e:prepare-server - name: Execute E2E tests - run: npm run test:e2e - + run: npm run test:e2e - name: Upload E2E test artifacts uses: actions/upload-artifact@v4 continue-on-error: true @@ -238,6 +294,7 @@ jobs: with: if-no-files-found: ignore name: e2e-tests-report + # playwrite data path: tests/report/ retention-days: 30 diff --git a/.github/workflows/changelog-entry.yml b/.github/workflows/changelog-entry.yml index a53bcc6914..d071c9f69d 100644 --- a/.github/workflows/changelog-entry.yml +++ b/.github/workflows/changelog-entry.yml @@ -1,9 +1,10 @@ name: Check Changelog for changes on: - pull_request: - types: [assigned, opened, synchronize, reopened, labeled, unlabeled] - branches: - - main + workflow_dispatch: +# pull_request: +# types: [assigned, opened, synchronize, reopened, labeled, unlabeled] +# branches: +# - main jobs: Check-Changelog: name: Check Changelog Action diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 6bd4433615..90b18bf934 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -1,8 +1,9 @@ name: o1js typedoc on: - push: - branches: - - main + workflow_dispatch: +# push: +# branches: +# - main jobs: Build-Doc: diff --git a/.github/workflows/live-tests.yml b/.github/workflows/live-tests.yml index e3d40ce2f9..dcee8ac9fb 100644 --- a/.github/workflows/live-tests.yml +++ b/.github/workflows/live-tests.yml @@ -1,11 +1,11 @@ name: Test o1js against lightnet on: - push: - branches: - - main - pull_request: - branches: - - main +# push: +# branches: +# - main +# pull_request: +# branches: +# - main workflow_dispatch: {} jobs: diff --git a/.github/workflows/pkg-pr-new-publish.yml b/.github/workflows/pkg-pr-new-publish.yml index 134650eec4..f1cc9a0918 100644 --- a/.github/workflows/pkg-pr-new-publish.yml +++ b/.github/workflows/pkg-pr-new-publish.yml @@ -4,9 +4,9 @@ name: Continuous releases on pkg-pr-new on: - push: - branches: - - main +# push: +# branches: +# - main workflow_dispatch: {} jobs: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 67e89f95bb..84898a3043 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,8 +11,8 @@ name: Version Bump on: workflow_dispatch: # Allow to manually trigger the workflow - schedule: - - cron: '0 0 * * 2' # At 00:00 UTC every Tuesday +# schedule: +# - cron: '0 0 * * 2' # At 00:00 UTC every Tuesday jobs: version-bump: diff --git a/.gitignore b/.gitignore index 3c60f7a680..05af500fe2 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,55 @@ src/config.mlh .direnv .rustup result +isolate*-v8.log +CPU*.cpuprofile +Heap*heapprofile +# -*- mode: gitignore; -*- +*~ +\#*\# +/.emacs.desktop +/.emacs.desktop.lock +*.elc +auto-save-list +tramp +.\#* + +# Org-mode +.org-id-locations +*_archive + +# flymake-mode +*_flymake.* + +# eshell files +/eshell/history +/eshell/lastdir + +# elpa packages +/elpa/ + +# reftex files +*.rel + +# AUCTeX auto folder +/auto/ + +# cask packages +.cask/ +dist/ + +# Flycheck +flycheck_*.el + +# server auth directory +/server/ + +# projectiles files +.projectile + +# directory configuration +.dir-locals.el + +# network security +/network-security.data + diff --git a/run b/run index b039136688..54b25f4531 100755 --- a/run +++ b/run @@ -1 +1,4 @@ -node --enable-source-maps --stack-trace-limit=1000 src/build/run.js $@ +node --cpu-prof --expose-gc --cpu-prof-dir tests/report/profile-data --enable-source-maps --stack-trace-limit=1000 src/build/run.js $@ + +node --heap-prof --expose-gc --enable-source-maps --stack-trace-limit=1000 src/build/run.js $@ +node --prof --expose-gc --enable-source-maps --stack-trace-limit=1000 src/build/run.js $@ diff --git a/src/bindings b/src/bindings index e0aae7f07a..8e99acc94d 160000 --- a/src/bindings +++ b/src/bindings @@ -1 +1 @@ -Subproject commit e0aae7f07ac63cd153d86a074bdccab1d8bcf230 +Subproject commit 8e99acc94da892eeb090367844f85d22ecf4f93b diff --git a/src/lib/provable/test/keccak.unit-test.ts b/src/lib/provable/test/keccak-256.unit-test.ts similarity index 99% rename from src/lib/provable/test/keccak.unit-test.ts rename to src/lib/provable/test/keccak-256.unit-test.ts index 92e9eb8854..80844ad5c8 100644 --- a/src/lib/provable/test/keccak.unit-test.ts +++ b/src/lib/provable/test/keccak-256.unit-test.ts @@ -38,7 +38,9 @@ const testImplementations = { }, }; -const lengths = [256, 384, 512] as const; +const lengths = [256 + //, 384, 512 + ] as const; // EQUIVALENCE TESTS AGAINST REF IMPLEMENTATION diff --git a/src/lib/provable/test/keccak-384.unit-test.ts b/src/lib/provable/test/keccak-384.unit-test.ts new file mode 100644 index 0000000000..ec40942b1a --- /dev/null +++ b/src/lib/provable/test/keccak-384.unit-test.ts @@ -0,0 +1,274 @@ +import { Keccak } from '../crypto/keccak.js'; +import { ZkProgram } from '../../proof-system/zkprogram.js'; +import { + equivalentProvable, + equivalent, + equivalentAsync, +} from '../../testing/equivalent.js'; +import { + keccak_224, + keccak_256, + keccak_384, + keccak_512, + sha3_224, + sha3_256, + sha3_384, + sha3_512, +} from '@noble/hashes/sha3'; +import { Bytes } from '../wrapped-classes.js'; +import { bytes } from './test-utils.js'; +import { UInt8 } from '../int.js'; +import { test, Random, sample } from '../../testing/property.js'; +import { expect } from 'expect'; + +const RUNS = 1; + +const testImplementations = { + sha3: { + 224: sha3_224, + 256: sha3_256, + 384: sha3_384, + 512: sha3_512, + }, + preNist: { + 224: keccak_224, + 256: keccak_256, + 384: keccak_384, + 512: keccak_512, + }, +}; + +const lengths = [//256, + 384 + //, 512 +] as const; + +// EQUIVALENCE TESTS AGAINST REF IMPLEMENTATION + +// checks outside circuit +// TODO: fix witness generation slowness + +for (let length of lengths) { + let [preimageLength] = sample(Random.nat(100), 1); + console.log(`Testing ${length} with preimage length ${preimageLength}`); + let inputBytes = bytes(preimageLength); + let outputBytes = bytes(length / 8); + + equivalentProvable({ from: [inputBytes], to: outputBytes, verbose: true })( + testImplementations.sha3[length], + (x) => Keccak.nistSha3(length, x), + `sha3 ${length}` + ); + + equivalentProvable({ from: [inputBytes], to: outputBytes, verbose: true })( + testImplementations.preNist[length], + (x) => Keccak.preNist(length, x), + `keccak ${length}` + ); + + // bytes to hex roundtrip + equivalent({ from: [inputBytes], to: inputBytes })( + (x) => x, + (x) => Bytes.fromHex(x.toHex()), + `Bytes toHex` + ); +} + +// EQUIVALENCE TESTS AGAINST TEST VECTORS (at the bottom) + +for (let { nist, length, message, expected } of testVectors()) { + let Hash = nist ? Keccak.nistSha3 : Keccak.preNist; + let actual = Hash(length, Bytes.fromHex(message)); + expect(actual).toEqual(Bytes.fromHex(expected)); +} + +// MISC QUICK TESTS + +// Test constructor +test(Random.uint8, Random.uint8, (x, y, assert) => { + let z = new UInt8(x); + assert(z instanceof UInt8); + assert(z.toBigInt() === x); + assert(z.toString() === x.toString()); + + assert((z = new UInt8(x)) instanceof UInt8 && z.toBigInt() === x); + assert((z = new UInt8(z)) instanceof UInt8 && z.toBigInt() === x); + assert((z = new UInt8(z.value.value)) instanceof UInt8 && z.toBigInt() === x); + + z = new UInt8(y); + assert(z instanceof UInt8); + assert(z.toString() === y.toString()); +}); + +// handles all numbers up to 2^8 +test(Random.nat(255), (n, assert) => { + assert(UInt8.from(n).toString() === String(n)); +}); + +// throws on negative numbers +test.negative(Random.int(-10, -1), (x) => UInt8.from(x)); + +// throws on numbers >= 2^8 +test.negative(Random.uint8.invalid, (x) => UInt8.from(x)); + +// PROOF TESTS + +// Choose a test length at random +const digestLength = lengths[Math.floor(Math.random() * 3)]; + +// Digest length in bytes +const digestLengthBytes = digestLength / 8; + +const preImageLength = 32; + +// No need to test Ethereum because it's just a special case of preNist +const KeccakProgram = ZkProgram({ + name: `keccak-test-${digestLength}`, + publicInput: Bytes(preImageLength), + publicOutput: Bytes(digestLengthBytes), + methods: { + nistSha3: { + privateInputs: [], + async method(preImage: Bytes) { + return Keccak.nistSha3(digestLength, preImage); + }, + }, + preNist: { + privateInputs: [], + async method(preImage: Bytes) { + return Keccak.preNist(digestLength, preImage); + }, + }, + }, +}); + +await KeccakProgram.compile(); + +// SHA-3 +await equivalentAsync( + { + from: [bytes(preImageLength)], + to: bytes(digestLengthBytes), + }, + { runs: RUNS } +)(testImplementations.sha3[digestLength], async (x) => { + const proof = await KeccakProgram.nistSha3(x); + await KeccakProgram.verify(proof); + return proof.publicOutput; +}); + +// PreNIST Keccak +await equivalentAsync( + { + from: [bytes(preImageLength)], + to: bytes(digestLengthBytes), + }, + { runs: RUNS } +)(testImplementations.preNist[digestLength], async (x) => { + const proof = await KeccakProgram.preNist(x); + await KeccakProgram.verify(proof); + return proof.publicOutput; +}); + +// TEST VECTORS + +function testVectors(): { + nist: boolean; + length: 256 | 384 | 512; + message: string; + expected: string; +}[] { + return [ + { + nist: false, + length: 256, + message: '30', + expected: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d', + }, + { + nist: true, + length: 512, + message: '30', + expected: + '2d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c', + }, + { + nist: false, + length: 256, + message: + '4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e', + expected: + '63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + }, + { + nist: false, + length: 256, + message: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + expected: + '560deb1d387f72dba729f0bd0231ad45998dda4b53951645322cf95c7b6261d9', + }, + { + nist: true, + length: 256, + message: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + expected: + '1784354c4bbfa5f54e5db23041089e65a807a7b970e3cfdba95e2fbe63b1c0e4', + }, + { + nist: false, + length: 256, + message: + '391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4', + expected: + '7d5655391ede9ca2945f32ad9696f464be8004389151ce444c89f688278f2e1d', + }, + { + nist: false, + length: 256, + message: + 'ff391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4', + expected: + '37694fd4ba137be747eb25a85b259af5563e0a7a3010d42bd15963ac631b9d3f', + }, + { + nist: false, + length: 256, + message: + '80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001', + expected: + 'bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714', + }, + { + nist: false, + length: 256, + message: + '80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001', + expected: + 'bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714', + }, + { + nist: false, + length: 256, + message: 'a2c0', + expected: + '9856642c690c036527b8274db1b6f58c0429a88d9f3b9298597645991f4f58f0', + }, + { + nist: false, + length: 256, + message: '0a2c', + expected: + '295b48ad49eff61c3abfd399c672232434d89a4ef3ca763b9dbebb60dbb32a8b', + }, + { + nist: false, + length: 256, + message: '00', + expected: + 'bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a', + }, + ]; +} diff --git a/src/lib/provable/test/keccak-512.unit-test.ts b/src/lib/provable/test/keccak-512.unit-test.ts new file mode 100644 index 0000000000..01b4e0f72c --- /dev/null +++ b/src/lib/provable/test/keccak-512.unit-test.ts @@ -0,0 +1,273 @@ +import { Keccak } from '../crypto/keccak.js'; +import { ZkProgram } from '../../proof-system/zkprogram.js'; +import { + equivalentProvable, + equivalent, + equivalentAsync, +} from '../../testing/equivalent.js'; +import { + keccak_224, + keccak_256, + keccak_384, + keccak_512, + sha3_224, + sha3_256, + sha3_384, + sha3_512, +} from '@noble/hashes/sha3'; +import { Bytes } from '../wrapped-classes.js'; +import { bytes } from './test-utils.js'; +import { UInt8 } from '../int.js'; +import { test, Random, sample } from '../../testing/property.js'; +import { expect } from 'expect'; + +const RUNS = 1; + +const testImplementations = { + sha3: { + 224: sha3_224, + 256: sha3_256, + 384: sha3_384, + 512: sha3_512, + }, + preNist: { + 224: keccak_224, + 256: keccak_256, + 384: keccak_384, + 512: keccak_512, + }, +}; + +const lengths = [ + //256, 384, + 512] as const; + +// EQUIVALENCE TESTS AGAINST REF IMPLEMENTATION + +// checks outside circuit +// TODO: fix witness generation slowness + +for (let length of lengths) { + let [preimageLength] = sample(Random.nat(100), 1); + console.log(`Testing ${length} with preimage length ${preimageLength}`); + let inputBytes = bytes(preimageLength); + let outputBytes = bytes(length / 8); + + equivalentProvable({ from: [inputBytes], to: outputBytes, verbose: true })( + testImplementations.sha3[length], + (x) => Keccak.nistSha3(length, x), + `sha3 ${length}` + ); + + equivalentProvable({ from: [inputBytes], to: outputBytes, verbose: true })( + testImplementations.preNist[length], + (x) => Keccak.preNist(length, x), + `keccak ${length}` + ); + + // bytes to hex roundtrip + equivalent({ from: [inputBytes], to: inputBytes })( + (x) => x, + (x) => Bytes.fromHex(x.toHex()), + `Bytes toHex` + ); +} + +// EQUIVALENCE TESTS AGAINST TEST VECTORS (at the bottom) + +for (let { nist, length, message, expected } of testVectors()) { + let Hash = nist ? Keccak.nistSha3 : Keccak.preNist; + let actual = Hash(length, Bytes.fromHex(message)); + expect(actual).toEqual(Bytes.fromHex(expected)); +} + +// MISC QUICK TESTS + +// Test constructor +test(Random.uint8, Random.uint8, (x, y, assert) => { + let z = new UInt8(x); + assert(z instanceof UInt8); + assert(z.toBigInt() === x); + assert(z.toString() === x.toString()); + + assert((z = new UInt8(x)) instanceof UInt8 && z.toBigInt() === x); + assert((z = new UInt8(z)) instanceof UInt8 && z.toBigInt() === x); + assert((z = new UInt8(z.value.value)) instanceof UInt8 && z.toBigInt() === x); + + z = new UInt8(y); + assert(z instanceof UInt8); + assert(z.toString() === y.toString()); +}); + +// handles all numbers up to 2^8 +test(Random.nat(255), (n, assert) => { + assert(UInt8.from(n).toString() === String(n)); +}); + +// throws on negative numbers +test.negative(Random.int(-10, -1), (x) => UInt8.from(x)); + +// throws on numbers >= 2^8 +test.negative(Random.uint8.invalid, (x) => UInt8.from(x)); + +// PROOF TESTS + +// Choose a test length at random +const digestLength = lengths[Math.floor(Math.random() * 3)]; + +// Digest length in bytes +const digestLengthBytes = digestLength / 8; + +const preImageLength = 32; + +// No need to test Ethereum because it's just a special case of preNist +const KeccakProgram = ZkProgram({ + name: `keccak-test-${digestLength}`, + publicInput: Bytes(preImageLength), + publicOutput: Bytes(digestLengthBytes), + methods: { + nistSha3: { + privateInputs: [], + async method(preImage: Bytes) { + return Keccak.nistSha3(digestLength, preImage); + }, + }, + preNist: { + privateInputs: [], + async method(preImage: Bytes) { + return Keccak.preNist(digestLength, preImage); + }, + }, + }, +}); + +await KeccakProgram.compile(); + +// SHA-3 +await equivalentAsync( + { + from: [bytes(preImageLength)], + to: bytes(digestLengthBytes), + }, + { runs: RUNS } +)(testImplementations.sha3[digestLength], async (x) => { + const proof = await KeccakProgram.nistSha3(x); + await KeccakProgram.verify(proof); + return proof.publicOutput; +}); + +// PreNIST Keccak +await equivalentAsync( + { + from: [bytes(preImageLength)], + to: bytes(digestLengthBytes), + }, + { runs: RUNS } +)(testImplementations.preNist[digestLength], async (x) => { + const proof = await KeccakProgram.preNist(x); + await KeccakProgram.verify(proof); + return proof.publicOutput; +}); + +// TEST VECTORS + +function testVectors(): { + nist: boolean; + length: 256 | 384 | 512; + message: string; + expected: string; +}[] { + return [ + { + nist: false, + length: 256, + message: '30', + expected: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d', + }, + { + nist: true, + length: 512, + message: '30', + expected: + '2d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c', + }, + { + nist: false, + length: 256, + message: + '4920616d20746865206f776e6572206f6620746865204e465420776974682069642058206f6e2074686520457468657265756d20636861696e', + expected: + '63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + }, + { + nist: false, + length: 256, + message: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + expected: + '560deb1d387f72dba729f0bd0231ad45998dda4b53951645322cf95c7b6261d9', + }, + { + nist: true, + length: 256, + message: + '044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116df9e2eaaa42d9fe9e558a9b8ef1bf366f190aacaa83bad2641ee106e9041096e42d44da53f305ab94b6365837b9803627ab098c41a6013694f9b468bccb9c13e95b3900365eb58924de7158a54467e984efcfdabdbcc9af9a940d49c51455b04c63858e0487687c3eeb30796a3e9307680e1b81b860b01c88ff74545c2c314e36', + expected: + '1784354c4bbfa5f54e5db23041089e65a807a7b970e3cfdba95e2fbe63b1c0e4', + }, + { + nist: false, + length: 256, + message: + '391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4', + expected: + '7d5655391ede9ca2945f32ad9696f464be8004389151ce444c89f688278f2e1d', + }, + { + nist: false, + length: 256, + message: + 'ff391ccf9b5de23bb86ec6b2b142adb6e9ba6bee8519e7502fb8be8959fbd2672934cc3e13b7b45bf2b8a5cb48881790a7438b4a326a0c762e31280711e6b64fcc2e3e4e631e501d398861172ea98603618b8f23b91d0208b0b992dfe7fdb298b6465adafbd45e4f88ee9dc94e06bc4232be91587f78572c169d4de4d8b95b714ea62f1fbf3c67a4', + expected: + '37694fd4ba137be747eb25a85b259af5563e0a7a3010d42bd15963ac631b9d3f', + }, + { + nist: false, + length: 256, + message: + '80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001', + expected: + 'bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714', + }, + { + nist: false, + length: 256, + message: + '80000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001', + expected: + 'bbf1f49a2cc5678aa62196d0c3108d89425b81780e1e90bcec03b4fb5f834714', + }, + { + nist: false, + length: 256, + message: 'a2c0', + expected: + '9856642c690c036527b8274db1b6f58c0429a88d9f3b9298597645991f4f58f0', + }, + { + nist: false, + length: 256, + message: '0a2c', + expected: + '295b48ad49eff61c3abfd399c672232434d89a4ef3ca763b9dbebb60dbb32a8b', + }, + { + nist: false, + length: 256, + message: '00', + expected: + 'bc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a', + }, + ]; +} From aa0473968474a1b973bbd76322991c2af9889354 Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 21 Oct 2024 10:56:46 -0400 Subject: [PATCH 2/3] get rid of final slash that produces // double slashes --- .github/workflows/build-action.yml | 2 +- src/mina | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-action.yml b/.github/workflows/build-action.yml index 62c3e0640d..38941eb186 100644 --- a/.github/workflows/build-action.yml +++ b/.github/workflows/build-action.yml @@ -155,7 +155,7 @@ jobs: BRANCH_NAME: ${{ github.head_ref || github.ref_name }} run: | MODIFIED_BRANCH_NAME=${BRANCH_NAME/\//-} - OUTPUT_DIR="profile/profile-data/${MODIFIED_BRANCH_NAME}/" + OUTPUT_DIR="profile/profile-data/${MODIFIED_BRANCH_NAME}" OUTPUT_TEST_DIR="${OUTPUT_DIR}/profile-data-${{matrix.chunk}}-${{ matrix.perf }}-${{ matrix.node_version }}" echo "MODIFIED_BRANCH_NAME=${MODIFIED_BRANCH_NAME}" >> "$GITHUB_ENV" diff --git a/src/mina b/src/mina index 6899054b74..63725a4909 160000 --- a/src/mina +++ b/src/mina @@ -1 +1 @@ -Subproject commit 6899054b745c1323b9d5bcaa62c00bed2ad1ead3 +Subproject commit 63725a4909a8053371f95961f7b55c61cb13d763 From 889be00a68961899f3b9521544a9d3758e1ebc1f Mon Sep 17 00:00:00 2001 From: mike dupont Date: Mon, 21 Oct 2024 11:33:40 -0400 Subject: [PATCH 3/3] run only chunk 13 with more detail --- .github/workflows/build-action.yml | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-action.yml b/.github/workflows/build-action.yml index 38941eb186..e0f955ffd6 100644 --- a/.github/workflows/build-action.yml +++ b/.github/workflows/build-action.yml @@ -117,12 +117,17 @@ jobs: strategy: fail-fast: false matrix: - chunk: [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32] + chunk: [ + #1,2,3,4,5,6,7,8,9,10,11,12, + 13 + #,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32 + ] perf: [ - #prof, heap, cpu, + no, + prof, heap, cpu, all] - node_version: [18 - #,20,22 + node_version: [ + 18,20,22 ] steps: - name: Restore repository @@ -193,6 +198,12 @@ jobs: for ((i=start_index; i