parent
fedf6075b7
commit
fcb1f1ac25
@ -0,0 +1,9 @@ |
||||
REMIX CIRCOM WORKSPACE |
||||
|
||||
Welcome to the Remix Circom Workspace. This workspace becomes available when you create a new workspace using the 'Circom' template. |
||||
Directory Structure |
||||
|
||||
The workspace comprises two main directories: |
||||
|
||||
circuits: Contains sample semaphore contracts. These can be compiled to generate a witness. |
||||
scripts: Provides a sample script designed for a trusted setup using snarkjs. This script also aids in generating Solidity code, which is essential for on-chain deployment. |
@ -0,0 +1,90 @@ |
||||
pragma circom 2.0.0; |
||||
|
||||
include "circomlib/poseidon.circom"; |
||||
include "./tree.circom"; |
||||
|
||||
template CalculateSecret() { |
||||
signal input identityNullifier; |
||||
signal input identityTrapdoor; |
||||
signal output out; |
||||
|
||||
component poseidon = Poseidon(2); |
||||
|
||||
poseidon.inputs[0] <== identityNullifier; |
||||
poseidon.inputs[1] <== identityTrapdoor; |
||||
|
||||
out <== poseidon.out; |
||||
} |
||||
|
||||
template CalculateIdentityCommitment() { |
||||
signal input secret; |
||||
|
||||
signal output out; |
||||
|
||||
component poseidon = Poseidon(1); |
||||
|
||||
poseidon.inputs[0] <== secret; |
||||
|
||||
out <== poseidon.out; |
||||
} |
||||
|
||||
template CalculateNullifierHash() { |
||||
signal input externalNullifier; |
||||
signal input identityNullifier; |
||||
|
||||
signal output out; |
||||
|
||||
component poseidon = Poseidon(2); |
||||
|
||||
poseidon.inputs[0] <== externalNullifier; |
||||
poseidon.inputs[1] <== identityNullifier; |
||||
|
||||
out <== poseidon.out; |
||||
} |
||||
|
||||
// credits to : https://github.com/semaphore-protocol/semaphore |
||||
// The current Semaphore smart contracts require nLevels <= 32 and nLevels >= 16. |
||||
template Semaphore(nLevels) { |
||||
signal input identityNullifier; |
||||
signal input identityTrapdoor; |
||||
signal input treePathIndices[nLevels]; |
||||
signal input treeSiblings[nLevels]; |
||||
|
||||
signal input signalHash; |
||||
signal input externalNullifier; |
||||
|
||||
signal output root; |
||||
signal output nullifierHash; |
||||
|
||||
component calculateSecret = CalculateSecret(); |
||||
calculateSecret.identityNullifier <== identityNullifier; |
||||
calculateSecret.identityTrapdoor <== identityTrapdoor; |
||||
|
||||
signal secret; |
||||
secret <== calculateSecret.out; |
||||
|
||||
component calculateIdentityCommitment = CalculateIdentityCommitment(); |
||||
calculateIdentityCommitment.secret <== secret; |
||||
|
||||
component calculateNullifierHash = CalculateNullifierHash(); |
||||
calculateNullifierHash.externalNullifier <== externalNullifier; |
||||
calculateNullifierHash.identityNullifier <== identityNullifier; |
||||
|
||||
component inclusionProof = MerkleTreeInclusionProof(nLevels); |
||||
inclusionProof.leaf <== calculateIdentityCommitment.out; |
||||
|
||||
for (var i = 0; i < nLevels; i++) { |
||||
inclusionProof.siblings[i] <== treeSiblings[i]; |
||||
inclusionProof.pathIndices[i] <== treePathIndices[i]; |
||||
} |
||||
|
||||
root <== inclusionProof.root; |
||||
|
||||
// Dummy square to prevent tampering signalHash. |
||||
signal signalHashSquared; |
||||
signalHashSquared <== signalHash * signalHash; |
||||
|
||||
nullifierHash <== calculateNullifierHash.out; |
||||
} |
||||
|
||||
component main {public [signalHash, externalNullifier]} = Semaphore(20); |
@ -0,0 +1,11 @@ |
||||
pragma circom 2.0.0; |
||||
|
||||
template Multiplier2() { |
||||
signal input a; |
||||
signal input b; |
||||
signal output c; |
||||
c <== a*b; |
||||
} |
||||
|
||||
component main = Multiplier2(); |
||||
|
@ -0,0 +1,40 @@ |
||||
pragma circom 2.0.0; |
||||
|
||||
include "circomlib/poseidon.circom"; |
||||
include "circomlib/mux1.circom"; |
||||
|
||||
template MerkleTreeInclusionProof(nLevels) { |
||||
signal input leaf; |
||||
signal input pathIndices[nLevels]; |
||||
signal input siblings[nLevels]; |
||||
|
||||
signal output root; |
||||
|
||||
component poseidons[nLevels]; |
||||
component mux[nLevels]; |
||||
|
||||
signal hashes[nLevels + 1]; |
||||
hashes[0] <== leaf; |
||||
|
||||
for (var i = 0; i < nLevels; i++) { |
||||
pathIndices[i] * (1 - pathIndices[i]) === 0; |
||||
|
||||
poseidons[i] = Poseidon(2); |
||||
mux[i] = MultiMux1(2); |
||||
|
||||
mux[i].c[0][0] <== hashes[i]; |
||||
mux[i].c[0][1] <== siblings[i]; |
||||
|
||||
mux[i].c[1][0] <== siblings[i]; |
||||
mux[i].c[1][1] <== hashes[i]; |
||||
|
||||
mux[i].s <== pathIndices[i]; |
||||
|
||||
poseidons[i].inputs[0] <== mux[i].out[0]; |
||||
poseidons[i].inputs[1] <== mux[i].out[1]; |
||||
|
||||
hashes[i + 1] <== poseidons[i].out; |
||||
} |
||||
|
||||
root <== hashes[nLevels]; |
||||
} |
@ -0,0 +1,16 @@ |
||||
export default async () => { |
||||
return { |
||||
// @ts-ignore
|
||||
'circuits/semaphore.circom': (await import('raw-loader!./circuits/semaphore.circom')).default, |
||||
// @ts-ignore
|
||||
'circuits/simple.circom': (await import('!!raw-loader!./circuits/simple.circom')).default, |
||||
// @ts-ignore
|
||||
'circuits/tree.circom': (await import('!!raw-loader!./circuits/tree.circom')).default, |
||||
// @ts-ignore
|
||||
'scripts/run_setup.ts': (await import('!!raw-loader!./scripts/run_setup.ts')).default, |
||||
// @ts-ignore
|
||||
'scripts/run_verification.ts': (await import('!!raw-loader!./scripts/run_verification.ts')).default, |
||||
// @ts-ignore
|
||||
'README.txt': (await import('raw-loader!./README.txt')).default, |
||||
} |
||||
} |
@ -0,0 +1,67 @@ |
||||
import { ethers, BigNumber } from 'ethers' |
||||
import { IncrementalMerkleTree } from "@zk-kit/incremental-merkle-tree" |
||||
import { poseidon } from "circomlibjs" // v0.0.8
|
||||
const snarkjs = require('snarkjs'); |
||||
|
||||
const logger = { |
||||
info: (...args) => console.log(...args), |
||||
debug: (...args) => console.log(...args) |
||||
}; |
||||
|
||||
/** |
||||
* Creates a keccak256 hash of a message compatible with the SNARK scalar modulus. |
||||
* @param message The message to be hashed. |
||||
* @returns The message digest. |
||||
*/ |
||||
function hash(message: any): bigint { |
||||
message = BigNumber.from(message).toTwos(256).toHexString() |
||||
message = ethers.utils.zeroPad(message, 32) |
||||
return BigInt(ethers.utils.keccak256(message)) >> BigInt(8) |
||||
} |
||||
|
||||
(async () => { |
||||
try { |
||||
const ptau_final = "https://ipfs-cluster.ethdevops.io/ipfs/QmTiT4eiYz5KF7gQrDsgfCSTRv3wBPYJ4bRN1MmTRshpnW"; |
||||
const r1cs = "https://ipfs-cluster.ethdevops.io/ipfs/QmbMk4ksBYLQzJ6TiZfzaALF8W11xvB8Wz6a2GrG9oDrXW"; |
||||
const wasm = "https://ipfs-cluster.ethdevops.io/ipfs/QmUbpEvHHKaHEqYLjhn93S8rEsUGeqiTYgRjGPk7g8tBbz"; |
||||
const zkey_0 = { type: "mem" }; |
||||
const zkey_1 = { type: "mem" }; |
||||
const zkey_final = { type: "mem" }; |
||||
|
||||
console.log('newZkey') |
||||
await snarkjs.zKey.newZKey(r1cs, ptau_final, zkey_0); |
||||
|
||||
console.log('contribute') |
||||
await snarkjs.zKey.contribute(zkey_0, zkey_1, "p2_C1", "pa_Entropy1"); |
||||
|
||||
console.log('beacon') |
||||
await snarkjs.zKey.beacon(zkey_1, zkey_final, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10); |
||||
|
||||
console.log('verifyFromR1cs') |
||||
const verifyFromR1csResult = await snarkjs.zKey.verifyFromR1cs(r1cs, ptau_final, zkey_final); |
||||
console.assert(verifyFromR1csResult); |
||||
|
||||
console.log('verifyFromInit') |
||||
const verifyFromInit = await snarkjs.zKey.verifyFromInit(zkey_0, ptau_final, zkey_final); |
||||
console.assert(verifyFromInit); |
||||
|
||||
console.log('exportVerificationKey') |
||||
const vKey = await snarkjs.zKey.exportVerificationKey(zkey_final) |
||||
await remix.call('fileManager', 'writeFile', './zk/build/verification_key.json', JSON.stringify(vKey)) |
||||
|
||||
const templates = { |
||||
groth16: await remix.call('fileManager', 'readFile', './zk/templates/groth16_verifier.sol.ejs') |
||||
} |
||||
const solidityContract = await snarkjs.zKey.exportSolidityVerifier(zkey_final, templates) |
||||
|
||||
await remix.call('fileManager', 'writeFile', './zk/build/zk_verifier.sol', solidityContract) |
||||
|
||||
console.log('buffer', (zkey_final as any).data.length) |
||||
await remix.call('fileManager', 'writeFile', './zk/build/zk_setup.txt', JSON.stringify(Array.from(((zkey_final as any).data)))) |
||||
|
||||
console.log('setup done.') |
||||
|
||||
} catch (e) { |
||||
console.error(e.message) |
||||
} |
||||
})() |
@ -0,0 +1,94 @@ |
||||
import { ethers, BigNumber } from 'ethers' |
||||
import { IncrementalMerkleTree } from "@zk-kit/incremental-merkle-tree" |
||||
import { poseidon } from "circomlibjs" // v0.0.8
|
||||
const snarkjs = require('snarkjs'); |
||||
|
||||
const logger = { |
||||
info: (...args) => console.log(...args), |
||||
debug: (...args) => console.log(...args), |
||||
error: (...args) => console.error(...args), |
||||
} |
||||
|
||||
/** |
||||
* Creates a keccak256 hash of a message compatible with the SNARK scalar modulus. |
||||
* @param message The message to be hashed. |
||||
* @returns The message digest. |
||||
*/ |
||||
function hash(message: any): bigint { |
||||
message = BigNumber.from(message).toTwos(256).toHexString() |
||||
message = ethers.utils.zeroPad(message, 32) |
||||
return BigInt(ethers.utils.keccak256(message)) >> BigInt(8) |
||||
} |
||||
|
||||
(async () => { |
||||
try { |
||||
const r1cs = "https://ipfs-cluster.ethdevops.io/ipfs/QmbMk4ksBYLQzJ6TiZfzaALF8W11xvB8Wz6a2GrG9oDrXW"; |
||||
const wasm = "https://ipfs-cluster.ethdevops.io/ipfs/QmUbpEvHHKaHEqYLjhn93S8rEsUGeqiTYgRjGPk7g8tBbz";
|
||||
|
||||
const zkey_final = { |
||||
type: "mem", |
||||
data: new Uint8Array(JSON.parse(await remix.call('fileManager', 'readFile', './zk/build/zk_setup.txt'))) |
||||
} |
||||
const wtns = { type: "mem" };
|
||||
|
||||
const vKey = JSON.parse(await remix.call('fileManager', 'readFile', './zk/build/verification_key.json')) |
||||
|
||||
// build list of identity commitments
|
||||
const secrets = [] |
||||
const identityCommitments = [] |
||||
for (let k = 0; k < 2; k++) {
|
||||
const identityTrapdoor = BigInt(ethers.utils.hexlify(ethers.utils.randomBytes(32))) |
||||
const identityNullifier = BigInt(ethers.utils.hexlify(ethers.utils.randomBytes(32))) |
||||
secrets.push({identityTrapdoor, identityNullifier}) |
||||
|
||||
const secret = poseidon([identityNullifier, identityTrapdoor]) |
||||
const identityCommitment = poseidon([secret]) |
||||
identityCommitments.push(identityCommitment) |
||||
} |
||||
//console.log('incremental tree', identityCommitments.map((x) => x.toString()))
|
||||
|
||||
let tree |
||||
|
||||
try { |
||||
tree = new IncrementalMerkleTree(poseidon, 20, BigInt(0), 2, identityCommitments) // Binary tree.
|
||||
} catch (e) { |
||||
console.error(e.message) |
||||
return |
||||
} |
||||
const index = tree.indexOf(identityCommitments[0]) |
||||
|
||||
console.log(index.toString()) |
||||
|
||||
const proof1 = tree.createProof(0) |
||||
|
||||
console.log('prepare signals for id ', identityCommitments[0].toString(), tree.indexOf(identityCommitments[0]), proof1.siblings.map((x)=> x.toString())) |
||||
|
||||
const signals = { |
||||
identityTrapdoor: secrets[0].identityTrapdoor, |
||||
identityNullifier: secrets[0].identityNullifier, |
||||
treePathIndices: proof1.pathIndices, |
||||
treeSiblings: proof1.siblings, |
||||
externalNullifier: hash(42), |
||||
signalHash: hash(ethers.utils.formatBytes32String("Hello World")) |
||||
} |
||||
|
||||
console.log('calculate') |
||||
await snarkjs.wtns.calculate(signals, wasm, wtns); |
||||
|
||||
console.log('check') |
||||
await snarkjs.wtns.check(r1cs, wtns, logger); |
||||
|
||||
|
||||
console.log('prove') |
||||
const { proof, publicSignals } = await snarkjs.groth16.prove(zkey_final, wtns); |
||||
|
||||
const verified = await snarkjs.groth16.verify(vKey, publicSignals, proof, logger); |
||||
console.log('zk proof validity', verified); |
||||
proof1.root.toString() === publicSignals[0] ? console.log('merkle proof valid') : console.log('merkle proof invalid') |
||||
|
||||
|
||||
|
||||
} catch (e) { |
||||
console.error(e.message) |
||||
} |
||||
})() |
Loading…
Reference in new issue