Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion builds/target_512b/json_extraction_512b.circom
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@ pragma circom 2.1.9;

include "../../circuits/json/extraction.circom";

component main { public [step_in] } = JSONExtraction(512, 10, 11);
component main { public [step_in] } = JSONExtraction(512, 12, 11);
26 changes: 14 additions & 12 deletions circuits/json/hash_machine.circom
Original file line number Diff line number Diff line change
Expand Up @@ -362,32 +362,34 @@ template RewriteStack(n) {
//--------------------------------------------------------------------------------------------//
// Hash the next_* states to produce hash we need
// TODO: This could be optimized -- we don't really need to do the index selector, we can just accumulate elsewhere
component stateHash[2];
stateHash[0] = IndexSelector(n);
stateHash[0].index <== pointer - 1;
stateHash[1] = IndexSelector(n);
stateHash[1].index <== pointer - 1;
signal stateHash[2][n+1];
stateHash[0][0] <== 0;
stateHash[1][0] <== 0;
// stateHash[0] = IndexSelector(n);
// stateHash[0].index <== pointer - 1;
// stateHash[1] = IndexSelector(n);
// stateHash[1].index <== pointer - 1;
for(var i = 0 ; i < n ; i++) {
stateHash[0].in[i] <== tree_hash[i][0];
stateHash[1].in[i] <== tree_hash[i][1];
stateHash[0][i+1] <== stateHash[0][i] + tree_hash_indicator[i] * tree_hash[i][0];
stateHash[1][i+1] <== stateHash[1][i] + tree_hash_indicator[i] * tree_hash[i][1];
}

signal is_object_key <== IsEqualArray(2)([current_value,[1,0]]);
signal is_object_value <== IsEqualArray(2)([current_value,[1,1]]);
signal is_array <== IsEqual()([current_value[0], 2]);

signal not_to_hash <== IsZero()(parsing_string * next_parsing_string + next_parsing_primitive);
signal hash_0 <== is_object_key * stateHash[0].out; // TODO: I think these may not be needed
signal hash_1 <== (is_object_value + is_array) * stateHash[1].out; // TODO: I think these may not be needed
signal hash_0 <== is_object_key * stateHash[0][n]; // TODO: I think these may not be needed
signal hash_1 <== (is_object_value + is_array) * stateHash[1][n]; // TODO: I think these may not be needed

signal monomial_is_zero <== IsZero()(monomial);
signal increased_power <== monomial * polynomial_input;
next_monomial <== (1 - not_to_hash) * (monomial_is_zero + increased_power); // if monomial is zero and to_hash, then this treats monomial as if it is 1, else we increment the monomial
signal option_hash <== hash_0 + hash_1 + byte * next_monomial;

signal next_state_hash[2];
next_state_hash[0] <== not_to_hash * (stateHash[0].out - option_hash) + option_hash; // same as: (1 - not_to_hash[i]) * option_hash[i] + not_to_hash[i] * hash[i];
next_state_hash[1] <== not_to_hash * (stateHash[1].out - option_hash) + option_hash;
next_state_hash[0] <== not_to_hash * (stateHash[0][n] - option_hash) + option_hash; // same as: (1 - not_to_hash[i]) * option_hash[i] + not_to_hash[i] * hash[i];
next_state_hash[1] <== not_to_hash * (stateHash[1][n] - option_hash) + option_hash;
// ^^^^ next_state_hash is the previous value (state_hash) or it is the newly computed value (option_hash)
//--------------------------------------------------------------------------------------------//

Expand All @@ -405,7 +407,7 @@ template RewriteStack(n) {
signal to_change_zeroth <== (1 - is_array) * still_parsing_object_key + end_kv;

signal not_end_char_for_first <== IsZero()(readColonAndNotParsingString + readCommaAndNotParsingString + readQuote + (1-next_parsing_primitive));
signal maintain_zeroth <== is_object_value * stateHash[0].out;
signal maintain_zeroth <== is_object_value * stateHash[0][n];
signal to_change_first <== is_object_value + is_array;
// signal tree_hash_change_value[2] <== [not_array_and_not_object_value_and_not_end_kv * next_state_hash[0], to_change_first * next_state_hash[1]];

Expand Down
60 changes: 60 additions & 0 deletions circuits/test/json/extraction.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -480,6 +480,66 @@ describe("JSON Extraction", () => {
console.log("> Fourth subtest passed.");
});

it(`input: empty`, async () => {
let filename = "empty";
let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []);
let input_padded = input.concat(Array(DATA_BYTES - input.length).fill(-1));

// Test `{}` in "empty" key
let keySequence: JsonMaskType[] = [
{ type: "Object", value: strToBytes("empty") },
];
let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT);
console.log(treeHashes);
let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]);
let sequence_digest_hashed = poseidon1([sequence_digest]);

let value_digest = BigInt(0);

let data_digest = PolynomialDigest(input, mock_ct_digest, BigInt(0));

let state = Array(MAX_STACK_HEIGHT * 4 + 4).fill(0);
let state_digest = PolynomialDigest(state, mock_ct_digest, BigInt(0));

let step_in = [data_digest, 0, 0, 0, 0, 0, 0, 1, state_digest, sequence_digest_hashed, 0];

let json_extraction_step_out = await hash_parser.compute({
data: input_padded,
ciphertext_digest: mock_ct_digest,
sequence_digest,
value_digest,
step_in,
state,
}, ["step_out"]);
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[0], value_digest);
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[7], modPow(mock_ct_digest, BigInt(input.length)));
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[9], sequence_digest_hashed);
console.log("> First subtest passed.");

// Test `[]` in "arr" key
keySequence = [
{ type: "Object", value: strToBytes("arr") },
];
[stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT);
sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]);
sequence_digest_hashed = poseidon1([sequence_digest]);
value_digest = BigInt(0);
step_in = [data_digest, 0, 0, 0, 0, 0, 0, 1, state_digest, sequence_digest_hashed, 0];;

json_extraction_step_out = await hash_parser.compute({
data: input_padded,
ciphertext_digest: mock_ct_digest,
sequence_digest,
value_digest,
step_in,
state,
}, ["step_out"]);
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[0], value_digest);
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[7], modPow(mock_ct_digest, BigInt(input.length)));
assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[9], sequence_digest_hashed);
console.log("> Second subtest passed.");
});

it(`input: spotify`, async () => {
let filename = "spotify";
let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []);
Expand Down
1 change: 1 addition & 0 deletions examples/json/empty.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"object":{},"arr":[]}
6 changes: 5 additions & 1 deletion witness-generator/src/json/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,10 @@ pub fn parse<const MAX_STACK_HEIGHT: usize>(
)),
},
END_BRACE => match (machine.clone().status, machine.current_location()) {
(Status::None | Status::ParsingPrimitive(_), Location::ObjectValue) => {
(
Status::None | Status::ParsingPrimitive(_),
Location::ObjectKey | Location::ObjectValue,
) => {
machine.location[machine.pointer() - 1] = Location::None;
machine.status = Status::None;
machine.clear_label_stack();
Expand Down Expand Up @@ -387,6 +390,7 @@ mod tests {
r#"{"null": null, "false": false, "true": true, "num1": 2.0E-1, "num2": 2.0e+1}"#
)]
#[case::primitives_array(r#"[null,false,true,2.0E-1,2.0e+1]"#)]
#[case::empty(r#"{"object":{},"arr":[]}"#)]
fn test_json_parser_valid(#[case] input: &str) {
let polynomial_input = create_polynomial_input();

Expand Down
Loading