Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

release: v0.10.0 #105

Merged
merged 20 commits into from
Feb 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ params:
@for target_dir in $(TARGET_DIRS); do \
size=$$(basename "$$target_dir" | sed 's/target_//' | sed 's/b//'); \
echo "Generating parameters for $${size}b with ROM length 100..."; \
cargo +nightly run --release -- "$$target_dir/artifacts" "$${size}b" "100" || exit 1; \
cargo +nightly run --release -p create-pp -- "$$target_dir/artifacts" "$${size}b" "100" || exit 1; \
done

.PHONY: check
Expand Down
2 changes: 1 addition & 1 deletion builds/target_256b/json_extraction_256b.circom
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ pragma circom 2.1.9;

include "../../circuits/json/extraction.circom";

component main { public [step_in] } = JSONExtraction(256, 10, 11);
component main { public [step_in] } = JSONExtraction(256, 12, 11);
2 changes: 1 addition & 1 deletion builds/target_512b/json_extraction_512b.circom
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ pragma circom 2.1.9;

include "../../circuits/json/extraction.circom";

component main { public [step_in] } = JSONExtraction(512, 10, 11);
component main { public [step_in] } = JSONExtraction(512, 12, 11);
2 changes: 1 addition & 1 deletion circuits/chacha20/authentication.circom
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ template PlaintextAuthentication(DATA_BYTES, PUBLIC_IO_LENGTH) {

// reset HTTP Verification inputs
step_out[2] <== step_in[2]; // Ciphertext digest POW accumulator
step_out[3] <== 1; // Machine state hash digest
step_out[3] <== PolynomialDigest(8)([1, 0, 0, 0, 0, 0, 0, 1], ciphertext_digest); // default Machine state digest
for (var i = 4 ; i < PUBLIC_IO_LENGTH - 1 ; i++) {
if (i == 6) {
step_out[i] <== 0; // Body ciphertext digest pow counter
Expand Down
32 changes: 16 additions & 16 deletions circuits/http/verification.circom
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {
signal input step_in[PUBLIC_IO_LENGTH];
signal output step_out[PUBLIC_IO_LENGTH];

// next_parsing_start, next_parsing_header, next_parsing_field_name, next_parsing_field_value, next_parsing_body, next_line_status, inner_main_digest
signal input machine_state[7];
// next_parsing_start, next_parsing_header, next_parsing_field_name, next_parsing_field_value, next_parsing_body, next_line_status, line_digest, main_monomial
signal input machine_state[8];

signal input ciphertext_digest;

Expand Down Expand Up @@ -39,11 +39,7 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {

// assertions:
// - check step_in[3] = machine state hash digest
// for (var i = 0 ; i < 7 ; i++) {
// log("machine_state[",i,"] = ", machine_state[i]);
// }
signal machine_state_digest <== PolynomialDigest(7)(machine_state, ciphertext_digest);
// log("machine_state_digest: ", machine_state_digest);
signal machine_state_digest <== PolynomialDigest(8)(machine_state, ciphertext_digest);
step_in[3] === machine_state_digest;
// - check step_in[4] = start line hash digest + all header hash digests
// TODO: I don't like this `MAX_NUMBER_OF_HEADERS + 1` now. It should just be `NUMBER_OF_STATEMENTS_TO_LOCK` or something
Expand Down Expand Up @@ -80,7 +76,7 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {


signal main_monomials[DATA_BYTES];
main_monomials[0] <== 1;
main_monomials[0] <== machine_state[7];

signal is_line_change[DATA_BYTES-1];
signal was_cleared[DATA_BYTES-1];
Expand All @@ -107,7 +103,7 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {
for(var i = 0 ; i < DATA_BYTES ; i++) {
monomial_is_zero[i] <== IsZero()(main_monomials[i]);
accum_prev[i] <== (1 - monomial_is_zero[i]) * line_digest[i];
line_digest[i+1] <== accum_prev[i] + data[i] * main_monomials[i];
line_digest[i+1] <== accum_prev[i] + data[i] * main_monomials[i];
is_zero[i] <== IsZero()(line_digest[i+1]);
contains[i] <== Contains(MAX_NUMBER_OF_HEADERS + 1)(line_digest[i+1], main_digests);
is_match[i] <== (1 - is_zero[i]) * contains[i];
Expand Down Expand Up @@ -147,22 +143,25 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {
step_out[1] <== step_in[1];
step_out[2] <== ciphertext_digest_pow[DATA_BYTES];
// pass machine state to next iteration
step_out[3] <== PolynomialDigest(7)(
step_out[3] <== PolynomialDigest(8)(
[State[DATA_BYTES - 1].next_parsing_start,
State[DATA_BYTES - 1].next_parsing_header,
State[DATA_BYTES - 1].next_parsing_field_name,
State[DATA_BYTES - 1].next_parsing_field_value,
State[DATA_BYTES - 1].next_parsing_body,
State[DATA_BYTES - 1].next_line_status,
line_digest[DATA_BYTES]
line_digest[DATA_BYTES],
main_monomials[DATA_BYTES - 1] * ciphertext_digest
],
ciphertext_digest
);
step_out[4] <== step_in[4];
step_out[5] <== step_in[5] - num_matched; // No longer check above, subtract here so circuits later check
step_out[6] <== body_monomials[DATA_BYTES - 1];

for (var i = 7 ; i < PUBLIC_IO_LENGTH ; i++) {
step_out[7] <== 1; // TODO: can i continue this counter?
step_out[8] <== 0; // TODO: This is a hack to make the circuit work. We should remove this in the future
for (var i = 9 ; i < PUBLIC_IO_LENGTH ; i++) {
step_out[i] <== step_in[i];
}

Expand All @@ -173,10 +172,11 @@ template HTTPVerification(DATA_BYTES, MAX_NUMBER_OF_HEADERS, PUBLIC_IO_LENGTH) {
// log("next_parsing_body: ", State[DATA_BYTES - 1].next_parsing_body);
// log("next_line_status: ", State[DATA_BYTES - 1].next_line_status);
// log("line_digest: ", line_digest[DATA_BYTES]);
// log("main_monomial: ", main_monomials[DATA_BYTES - 1] * ciphertext_digest);
// log("body_digest: ", body_digest[DATA_BYTES - 1]);

// for (var i = 0 ; i < PUBLIC_IO_LENGTH ; i++) {
// log("step_out[",i,"] = ", step_out[i]);
// }
// log("xxxxx HTTP Verification Done xxxxx");
// for (var i = 0 ; i < PUBLIC_IO_LENGTH ; i++) {
// log("step_out[",i,"] = ", step_out[i]);
// }
// log("xxxxx HTTP Verification Done xxxxx");
}
49 changes: 21 additions & 28 deletions circuits/json/extraction.circom
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@ include "hash_machine.circom";
template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
signal input data[DATA_BYTES];
signal input ciphertext_digest;
signal input sequence_digest;
signal input sequence_digest; // todo(sambhav): should sequence digest be 0 for first json circuit?
signal input value_digest;
signal input state[MAX_STACK_HEIGHT * 4 + 3];
signal input state[MAX_STACK_HEIGHT * 4 + 4];

signal input step_in[PUBLIC_IO_LENGTH];
signal output step_out[PUBLIC_IO_LENGTH];

//--------------------------------------------------------------------------------------------//

// assertions:
step_in[5] === 0; // HTTP statements matched
signal input_state_digest <== PolynomialDigest(MAX_STACK_HEIGHT * 4 + 3)(state, ciphertext_digest);
// step_in[5] === 0; // HTTP statements matched // TODO: either remove this or send a public io var
signal input_state_digest <== PolynomialDigest(MAX_STACK_HEIGHT * 4 + 4)(state, ciphertext_digest);
step_in[8] === input_state_digest;
signal sequence_digest_hashed <== Poseidon(1)([sequence_digest]);
step_in[9] === sequence_digest_hashed;
Expand All @@ -34,18 +34,6 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
signal intermediate_digest[DATA_BYTES][3 * MAX_STACK_HEIGHT];
signal state_digest[DATA_BYTES];

// Debugging
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) {
// log("State[", 0, "].next_stack[", i,"] = [",State[0].next_stack[i][0], "][", State[0].next_stack[i][1],"]" );
// }
// for(var i = 0; i<MAX_STACK_HEIGHT; i++) {
// log("State[", 0, "].next_tree_hash[", i,"] = [",State[0].next_tree_hash[i][0], "][", State[0].next_tree_hash[i][1],"]" );
// }
// log("State[", 0, "].next_monomial =", State[0].next_monomial);
// log("State[", 0, "].next_parsing_string =", State[0].next_parsing_string);
// log("State[", 0, "].next_parsing_number =", State[0].next_parsing_number);
// log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");

var total_matches = 0;
signal sequence_is_matched[DATA_BYTES];
signal value_is_matched[DATA_BYTES];
Expand All @@ -61,7 +49,8 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
State[0].polynomial_input <== ciphertext_digest;
State[0].monomial <== state[MAX_STACK_HEIGHT*4];
State[0].parsing_string <== state[MAX_STACK_HEIGHT*4 + 1];
State[0].parsing_number <== state[MAX_STACK_HEIGHT*4 + 2];
State[0].parsing_primitive <== state[MAX_STACK_HEIGHT*4 + 2];
State[0].escaped <== state[MAX_STACK_HEIGHT*4 + 3];
} else {
State[data_idx] = StateUpdateHasher(MAX_STACK_HEIGHT);
State[data_idx].byte <== data[data_idx];
Expand All @@ -70,7 +59,8 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
State[data_idx].tree_hash <== State[data_idx - 1].next_tree_hash;
State[data_idx].monomial <== State[data_idx - 1].next_monomial;
State[data_idx].parsing_string <== State[data_idx - 1].next_parsing_string;
State[data_idx].parsing_number <== State[data_idx - 1].next_parsing_number;
State[data_idx].parsing_primitive <== State[data_idx - 1].next_parsing_primitive;
State[data_idx].escaped <== State[data_idx - 1].next_escaped;
}

// Digest the whole stack and key tree hash
Expand Down Expand Up @@ -104,14 +94,15 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
// }
// log("State[", data_idx, "].next_monomial =", State[data_idx].next_monomial);
// log("State[", data_idx, "].next_parsing_string =", State[data_idx].next_parsing_string);
// log("State[", data_idx, "].next_parsing_number =", State[data_idx].next_parsing_number);
// log("State[", data_idx, "].next_parsing_primitive =", State[data_idx].next_parsing_primitive);
// log("State[", data_idx, "].next_escaped =", State[data_idx].next_escaped);
// log("++++++++++++++++++++++++++++++++++++++++++++++++");
// log("state_digest[", data_idx,"] = ", state_digest[data_idx]);
// log("total_matches = ", total_matches);
// log("xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx");
}

signal new_state[MAX_STACK_HEIGHT*4 + 3];
signal new_state[MAX_STACK_HEIGHT*4 + 4];
for(var i = 0; i < MAX_STACK_HEIGHT; i++) {
new_state[i*2] <== State[DATA_BYTES - 1].next_stack[i][0];
new_state[i*2+1] <== State[DATA_BYTES - 1].next_stack[i][1];
Expand All @@ -120,11 +111,12 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
}
new_state[MAX_STACK_HEIGHT*4] <== State[DATA_BYTES - 1].next_monomial;
new_state[MAX_STACK_HEIGHT*4 + 1] <== State[DATA_BYTES - 1].next_parsing_string;
new_state[MAX_STACK_HEIGHT*4 + 2] <== State[DATA_BYTES - 1].next_parsing_number;
signal new_state_digest <== PolynomialDigest(MAX_STACK_HEIGHT * 4 + 3)(new_state, ciphertext_digest);
new_state[MAX_STACK_HEIGHT*4 + 2] <== State[DATA_BYTES - 1].next_parsing_primitive;
new_state[MAX_STACK_HEIGHT*4 + 3] <== State[DATA_BYTES - 1].next_escaped;
signal new_state_digest <== PolynomialDigest(MAX_STACK_HEIGHT * 4 + 4)(new_state, ciphertext_digest);

// for (var i = 0 ; i < MAX_STACK_HEIGHT * 4 + 3 ; i++) {
// log("new_state[", i, "] = ", new_state[i]);
// for (var i = 0 ; i < MAX_STACK_HEIGHT * 2 + 2 ; i++) {
// log("new_state[", i, "] = ", new_state[i*2], new_state[i*2 + 1]);
// }

// Verify we have now processed all the data properly
Expand All @@ -145,10 +137,12 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
// Set the output to the digest of the intended value
step_out[0] <== step_in[0] - data_digest + value_digest * total_matches;

// value_digest should be non-zero
signal is_value_digest_zero <== IsEqual()([value_digest, 0]);
// both should be 0 or 1 together
signal is_new_state_digest_zero <== IsEqual()([new_state_digest, 0]);
signal is_step_out_zero_matched <== IsEqual()([step_out[0], value_digest]);
0 === is_new_state_digest_zero - is_step_out_zero_matched; // verify final value matches
0 === (1 - is_value_digest_zero) * (is_new_state_digest_zero - is_step_out_zero_matched); // verify final value matches

step_out[1] <== step_in[1];
step_out[2] <== step_in[2];
Expand All @@ -160,9 +154,8 @@ template JSONExtraction(DATA_BYTES, MAX_STACK_HEIGHT, PUBLIC_IO_LENGTH) {
step_out[8] <== new_state_digest;
step_out[9] <== step_in[9];
step_out[10] <== step_in[10];
for (var i = 11 ; i < PUBLIC_IO_LENGTH ; i++) {
step_out[i] <== step_in[i];
}

step_out[1] === step_out[2]; // assert http and plaintext parsed same amount

// for (var i = 0 ; i < PUBLIC_IO_LENGTH ; i++) {
// log("step_out[", i, "] = ", step_out[i]);
Expand Down
Loading