Skip to content

Commit 6107786

Browse files
authored
feat: merge-train/barretenberg (#16715)
BEGIN_COMMIT_OVERRIDE feat(bb): --print_bench (#16673) refactor(bb): remove --output_format, have one-size-fits-all format (#16201) fix(bb): fix vk simplified buffer (#16720) fix(avm): avm vk after serialization unification (#16721) END_COMMIT_OVERRIDE
2 parents f8fa569 + 6da0f67 commit 6107786

File tree

239 files changed

+1811
-1647
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

239 files changed

+1811
-1647
lines changed

.vscode/launch.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@
3636
"name": "Debug CIVC transaction",
3737
"type": "lldb",
3838
"request": "launch",
39-
"program": "${workspaceFolder}/barretenberg/cpp/build-debug-no-avm/bin/bb",
40-
"args": ["prove", "--scheme", "client_ivc", "--output_path", ".", "--ivc_inputs_path", "ivc-inputs.msgpack"],
39+
"program": "${workspaceFolder}/barretenberg/cpp/build-debug-fast-no-avm/bin/bb",
40+
"args": ["prove", "--scheme", "client_ivc", "--output_path", ".", "--ivc_inputs_path", "ivc-inputs.msgpack", "--print_bench"],
4141
"cwd": "${workspaceFolder}/yarn-project/end-to-end/example-app-ivc-inputs-out/ecdsar1+transfer_0_recursions+sponsored_fpc",
4242
"initCommands": [
4343
"command script import ${workspaceFolder}/barretenberg/cpp/scripts/lldb_format.py"

barretenberg/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -450,7 +450,7 @@ In terms of general usage, you should be able to use scrolling or the WASD keys
450450

451451
##### Adding Zones
452452

453-
Zones are how you can keep track of where you are relative in the code and how you can bucket allocations together. All of the colored blocks in the Main Thread row and other threads' rows refer to zones. You can nest zones in deeper and deeper scopes, which leads to stacks of these zones. To add a named zone, all you have to do is add PROFILE_THIS() or PROFILE_THIS_NAME(<name>) to a scope and it will create a zone. Note that you can't create multiple zones in the same scope.
453+
Zones are how you can keep track of where you are relative in the code and how you can bucket allocations together. All of the colored blocks in the Main Thread row and other threads' rows refer to zones. You can nest zones in deeper and deeper scopes, which leads to stacks of these zones. To add a named zone, all you have to do is add BB_BENCH_TRACY() or BB_BENCH_TRACY_NAME(<name>) to a scope and it will create a zone. Note that you can't create multiple zones in the same scope.
454454

455455
##### Analyzing Fragmentation
456456

barretenberg/acir_tests/bbjs-test/src/index.ts

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,7 @@ const logger = pino({
99
});
1010

1111
const proofPath = (dir: string) => path.join(dir, "proof");
12-
const proofAsFieldsPath = (dir: string) => path.join(dir, "proof_fields.json");
13-
const publicInputsAsFieldsPath = (dir: string) =>
14-
path.join(dir, "public_inputs_fields.json");
12+
const publicInputsPath = (dir: string) => path.join(dir, "public_inputs");
1513
const vkeyPath = (dir: string) => path.join(dir, "vk");
1614

1715
async function generateProof({
@@ -27,7 +25,7 @@ async function generateProof({
2725
oracleHash?: string;
2826
multiThreaded?: boolean;
2927
}) {
30-
const { UltraHonkBackend, deflattenFields } = await import("@aztec/bb.js");
28+
const { UltraHonkBackend } = await import("@aztec/bb.js");
3129

3230
logger.debug(`Generating proof for ${bytecodePath}...`);
3331
const circuitArtifact = await fs.readFile(bytecodePath);
@@ -45,17 +43,16 @@ async function generateProof({
4543
await fs.writeFile(proofPath(outputDirectory), Buffer.from(proof.proof));
4644
logger.debug("Proof written to " + proofPath(outputDirectory));
4745

48-
await fs.writeFile(
49-
publicInputsAsFieldsPath(outputDirectory),
50-
JSON.stringify(proof.publicInputs)
46+
// Convert public inputs from field strings to binary
47+
const publicInputsBuffer = Buffer.concat(
48+
proof.publicInputs.map((field: string) => {
49+
const hex = field.startsWith('0x') ? field.slice(2) : field;
50+
return Buffer.from(hex.padStart(64, '0'), 'hex');
51+
})
5152
);
53+
await fs.writeFile(publicInputsPath(outputDirectory), publicInputsBuffer);
5254
logger.debug(
53-
"Public inputs written to " + publicInputsAsFieldsPath(outputDirectory)
54-
);
55-
56-
await fs.writeFile(
57-
proofAsFieldsPath(outputDirectory),
58-
JSON.stringify(deflattenFields(proof.proof))
55+
"Public inputs written to " + publicInputsPath(outputDirectory)
5956
);
6057

6158
const verificationKey = await backend.getVerificationKey({
@@ -69,21 +66,24 @@ async function generateProof({
6966
}
7067

7168
async function verifyProof({ directory }: { directory: string }) {
72-
const { BarretenbergVerifier } = await import("@aztec/bb.js");
69+
const { UltraHonkVerifierBackend } = await import("@aztec/bb.js");
7370

74-
const verifier = new BarretenbergVerifier();
71+
const verifier = new UltraHonkVerifierBackend();
7572

7673
const proof = await fs.readFile(proofPath(directory));
7774

78-
const publicInputs = JSON.parse(
79-
await fs.readFile(publicInputsAsFieldsPath(directory), "utf8")
80-
);
75+
// Read binary public inputs and convert to field strings
76+
const publicInputsBinary = await fs.readFile(publicInputsPath(directory));
77+
const publicInputs = [];
78+
for (let i = 0; i < publicInputsBinary.length; i += 32) {
79+
const chunk = publicInputsBinary.slice(i, Math.min(i + 32, publicInputsBinary.length));
80+
publicInputs.push('0x' + chunk.toString('hex'));
81+
}
8182
logger.debug(`publicInputs: ${JSON.stringify(publicInputs)}`);
82-
const vkey = await fs.readFile(vkeyPath(directory));
83+
const verificationKey = await fs.readFile(vkeyPath(directory));
8384

84-
const verified = await verifier.verifyUltraHonkProof(
85-
{ proof: new Uint8Array(proof), publicInputs },
86-
new Uint8Array(vkey)
85+
const verified = await verifier.verifyProof(
86+
{ proof: new Uint8Array(proof), publicInputs, verificationKey},
8787
);
8888

8989
await verifier.destroy();

barretenberg/acir_tests/bootstrap.sh

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,18 @@ tests_hash=$(hash_str \
2121
../ts/.rebuild_patterns \
2222
../noir/))
2323

24+
function hex_to_fields_json {
25+
# 1. split encoded hex into 64-character lines 3. encode as JSON array of hex strings
26+
fold -w64 | jq -R -s -c 'split("\n") | map(select(length > 0)) | map("0x" + .)'
27+
}
28+
2429
# Generate inputs for a given recursively verifying program.
2530
function run_proof_generation {
2631
local program=$1
2732
local native_build_dir=$(../cpp/scripts/native-preset-build-dir)
2833
local bb=$(realpath ../cpp/$native_build_dir/bin/bb)
2934
local outdir=$(mktemp -d)
3035
trap "rm -rf $outdir" EXIT
31-
local adjustment=16
3236
local ipa_accumulation_flag=""
3337

3438
cd ./acir_tests/assert_statement
@@ -37,35 +41,37 @@ function run_proof_generation {
3741

3842
# Adjust settings based on program type
3943
if [[ $program == *"rollup"* ]]; then
40-
adjustment=26
4144
ipa_accumulation_flag="--ipa_accumulation"
4245
fi
4346
# If the test program has zk in it's name would like to use the zk prover, so we empty the flag in this case.
4447
if [[ $program == *"zk"* ]]; then
4548
disable_zk=""
4649
fi
47-
local prove_cmd="$bb prove --scheme ultra_honk $disable_zk $ipa_accumulation_flag --output_format fields --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz"
50+
local prove_cmd="$bb prove --scheme ultra_honk $disable_zk $ipa_accumulation_flag --write_vk -o $outdir -b ./target/program.json -w ./target/witness.gz"
4851
echo_stderr "$prove_cmd"
4952
dump_fail "$prove_cmd"
5053

51-
local vk_fields=$(cat "$outdir/vk_fields.json")
52-
local vk_hash_fields=$(cat "$outdir/vk_hash_fields.json")
53-
local public_inputs_fields=$(cat "$outdir/public_inputs_fields.json")
54-
local proof_fields=$(cat "$outdir/proof_fields.json")
5554

56-
generate_toml "$program" "$vk_fields" "$vk_hash_fields" "$proof_fields" "$public_inputs_fields"
55+
# Split the hex-encoded vk bytes into fields boundaries (but still hex-encoded), first making 64-character lines and then encoding as JSON.
56+
# This used to be done by barretenberg itself, but with serialization now always being in field elements we can do it outside of bb.
57+
local vk_fields=$(cat "$outdir/vk" | xxd -p -c 0 | hex_to_fields_json)
58+
local vk_hash_field="\"0x$(cat "$outdir/vk_hash" | xxd -p -c 0)\""
59+
local public_inputs_fields=$(cat "$outdir/public_inputs" | xxd -p -c 0 | hex_to_fields_json)
60+
local proof_fields=$(cat "$outdir/proof" | xxd -p -c 0 | hex_to_fields_json)
61+
62+
generate_toml "$program" "$vk_fields" "$vk_hash_field" "$proof_fields" "$public_inputs_fields"
5763
}
5864

5965
function generate_toml {
6066
local program=$1
6167
local vk_fields=$2
62-
local vk_hash_fields=$3
68+
local vk_hash_field=$3
6369
local proof_fields=$4
64-
local num_inner_public_inputs=$5
70+
local public_inputs_fields=$5
6571
local output_file="../$program/Prover.toml"
6672

6773
jq -nr \
68-
--arg key_hash "$vk_hash_fields" \
74+
--arg key_hash "$vk_hash_field" \
6975
--argjson vk_f "$vk_fields" \
7076
--argjson public_inputs_f "$public_inputs_fields" \
7177
--argjson proof_f "$proof_fields" \
@@ -79,7 +85,6 @@ function generate_toml {
7985
}
8086

8187
function regenerate_recursive_inputs {
82-
local program=$1
8388
# Compile the assert_statement test as it's used for the recursive tests.
8489
cd ./acir_tests/assert_statement
8590
local nargo=$(realpath ../../../../noir/noir-repo/target/release/nargo)
@@ -91,7 +96,7 @@ function regenerate_recursive_inputs {
9196
parallel 'run_proof_generation {}' ::: $(ls internal_test_programs)
9297
}
9398

94-
export -f regenerate_recursive_inputs run_proof_generation generate_toml
99+
export -f hex_to_fields_json regenerate_recursive_inputs run_proof_generation generate_toml
95100

96101
function compile {
97102
echo_header "Compiling acir_tests"
@@ -160,10 +165,10 @@ function test_cmds {
160165

161166
# bb.js browser tests. Isolate because server.
162167
local browser_prefix="$tests_hash:ISOLATE=1:NET=1:CPUS=8"
163-
echo "$browser_prefix:NAME=chrome_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof chrome"
164-
echo "$browser_prefix:NAME=chrome_a_1_mul $scripts/browser_prove.sh a_1_mul chrome"
165-
echo "$browser_prefix:NAME=webkit_verify_honk_proof $scripts/browser_prove.sh verify_honk_proof webkit"
166-
echo "$browser_prefix:NAME=webkit_a_1_mul $scripts/browser_prove.sh a_1_mul webkit"
168+
echo "$browser_prefix $scripts/browser_prove.sh verify_honk_proof chrome"
169+
echo "$browser_prefix $scripts/browser_prove.sh a_1_mul chrome"
170+
echo "$browser_prefix $scripts/browser_prove.sh verify_honk_proof webkit"
171+
echo "$browser_prefix $scripts/browser_prove.sh a_1_mul webkit"
167172

168173
# bb.js tests.
169174
# ecdsa_secp256r1_3x through bb.js on node to check 256k support.

barretenberg/acir_tests/browser-test-app/src/index.ts

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -30,17 +30,16 @@ function installUltraHonkGlobals() {
3030
}
3131

3232
async function verify(proofData: ProofData, verificationKey: Uint8Array) {
33-
const { BarretenbergVerifier } = await import("@aztec/bb.js");
33+
const { UltraHonkVerifierBackend } = await import("@aztec/bb.js");
3434

3535
logger.debug(`verifying...`);
36-
const verifier = new BarretenbergVerifier();
37-
const verified = await verifier.verifyUltraHonkProof(
38-
proofData,
39-
verificationKey
36+
const backend = new UltraHonkVerifierBackend();
37+
const verified = await backend.verifyProof(
38+
{...proofData, verificationKey}
4039
);
4140
logger.debug(`verified: ${verified}`);
4241

43-
await verifier.destroy();
42+
await backend.destroy();
4443

4544
logger.debug("test complete.");
4645
return verified;

barretenberg/acir_tests/scripts/bb_prove_bbjs_verify.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ $bb prove \
2222
-b target/program.json \
2323
-w target/witness.gz \
2424
-k output-$$/vk \
25-
--output_format bytes_and_fields \
2625
-o output-$$
2726

2827
# Verify the proof with bb.js classes

barretenberg/acir_tests/scripts/bb_prove_sol_verify.sh

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,16 +34,15 @@ mkdir -p output-$$
3434
trap "rm -rf output-$$" EXIT
3535

3636
# Create a proof, write the solidity contract, write the proof as fields in order to extract the public inputs
37-
$bb prove $flags -b target/program.json --oracle_hash keccak --output_format bytes_and_fields --write_vk -o output-$$
37+
$bb prove $flags -b target/program.json --oracle_hash keccak --write_vk -o output-$$
3838
$bb verify $flags --oracle_hash keccak -i output-$$/public_inputs -k output-$$/vk -p output-$$/proof
3939
$bb write_solidity_verifier $write_contract_flags -k output-$$/vk -o output-$$/Verifier.sol
4040

4141
# Use solcjs to compile the generated key contract with the template verifier and test contract
4242
# index.js will start an anvil, on a random port
4343
# Deploy the verifier then send a test transaction
4444
PROOF="output-$$/proof" \
45-
PROOF_AS_FIELDS="output-$$/proof_fields.json" \
46-
PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \
45+
PUBLIC_INPUTS="output-$$/public_inputs" \
4746
VERIFIER_PATH="output-$$/Verifier.sol" \
4847
TEST_PATH="../../sol-test/HonkTest.sol" \
4948
HAS_ZK="$has_zk" \

barretenberg/acir_tests/scripts/bbjs_prove_bb_verify.sh

Lines changed: 1 addition & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,17 +14,7 @@ node ../../bbjs-test prove \
1414
-w target/witness.gz \
1515
-o output-$$
1616

17-
proof_bytes=$(cat output-$$/proof | xxd -p)
18-
public_inputs=$(cat output-$$/public_inputs_fields.json | jq -r '.[]')
19-
20-
public_inputs_bytes=""
21-
for input in $public_inputs; do
22-
public_inputs_bytes+=$input
23-
done
24-
25-
# Combine proof header and the proof to a single file
26-
echo -n $proof_bytes | xxd -r -p > output-$$/proof
27-
echo -n $public_inputs_bytes | xxd -r -p > output-$$/public_inputs
17+
# The proof and public_inputs are already in binary format from bbjs-test
2818

2919
bb=$(../../../cpp/scripts/find-bb)
3020
# Verify the proof with bb cli

barretenberg/acir_tests/scripts/bbjs_prove_sol_verify.sh

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,7 @@ $bb write_solidity_verifier --scheme ultra_honk -k output-$$/vk -o output-$$/Ver
3535

3636
# Verify the proof using the solidity verifier
3737
PROOF="output-$$/proof" \
38-
PROOF_AS_FIELDS="output-$$/proof_fields.json" \
39-
PUBLIC_INPUTS_AS_FIELDS="output-$$/public_inputs_fields.json" \
38+
PUBLIC_INPUTS="output-$$/public_inputs" \
4039
VERIFIER_PATH="output-$$/Verifier.sol" \
4140
TEST_PATH="../../sol-test/HonkTest.sol" \
4241
HAS_ZK="$has_zk" \

barretenberg/acir_tests/sol-test/src/index.js

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,20 @@ const linkLibrary = (bytecode, libraryName, libraryAddress) => {
165165
return bytecode.replace(regex, address);
166166
};
167167

168+
/**
169+
* Converts binary data to array of field elements (32-byte chunks as hex strings)
170+
* @param {Buffer} buffer - Binary data
171+
* @return {Array<String>} Array of hex strings with 0x prefix
172+
*/
173+
const binaryToFields = (buffer) => {
174+
const fields = [];
175+
for (let i = 0; i < buffer.length; i += 32) {
176+
const chunk = buffer.slice(i, i + 32);
177+
fields.push('0x' + chunk.toString('hex'));
178+
}
179+
return fields;
180+
};
181+
168182
/**
169183
* Takes in a proof as fields, and returns the public inputs, as well as the number of public inputs
170184
* @param {Array<String>} proofAsFields
@@ -225,26 +239,22 @@ try {
225239
const proof = readFileSync(proofPath);
226240
proofStr = proof.toString("hex");
227241

228-
let publicInputsAsFieldsPath = getEnvVarCanBeUndefined(
229-
"PUBLIC_INPUTS_AS_FIELDS"
230-
); // PUBLIC_INPUTS_AS_FIELDS is not defined for bb plonk, but is for bb honk and bbjs honk.
231-
var publicInputs;
232-
let proofAsFieldsPath = getEnvVarCanBeUndefined("PROOF_AS_FIELDS"); // PROOF_AS_FIELDS is not defined for bbjs, but is for bb plonk and bb honk.
242+
let publicInputsPath = getEnvVarCanBeUndefined("PUBLIC_INPUTS");
243+
var publicInputs = [];
233244
let numExtraPublicInputs = 0;
234245
let extraPublicInputs = [];
235-
if (proofAsFieldsPath) {
236-
const proofAsFields = readFileSync(proofAsFieldsPath);
246+
247+
// For flows that use binary proof format, extract public inputs from the proof
248+
const proofAsFields = binaryToFields(proof);
249+
if (proofAsFields.length > NUMBER_OF_FIELDS_IN_PROOF) {
237250
// We need to extract the public inputs from the proof. This might be empty, or just the pairing point object, or be the entire public inputs...
238-
[numExtraPublicInputs, extraPublicInputs] = readPublicInputs(
239-
JSON.parse(proofAsFields.toString())
240-
);
251+
[numExtraPublicInputs, extraPublicInputs] = readPublicInputs(proofAsFields);
241252
}
242-
// We need to do this because plonk doesn't define this path
243-
if (publicInputsAsFieldsPath) {
244-
const innerPublicInputs = JSON.parse(
245-
readFileSync(publicInputsAsFieldsPath).toString()
246-
); // assumes JSON array of PI hex strings
247253

254+
// Read public inputs from binary file if available
255+
if (publicInputsPath) {
256+
const publicInputsBinary = readFileSync(publicInputsPath);
257+
const innerPublicInputs = binaryToFields(publicInputsBinary);
248258
publicInputs = innerPublicInputs.concat(extraPublicInputs);
249259
} else {
250260
// for plonk, the extraPublicInputs are all of the public inputs

0 commit comments

Comments
 (0)