diff --git a/builds/target_512b/json_extraction_512b.circom b/builds/target_512b/json_extraction_512b.circom index ae251ec..20263d2 100644 --- a/builds/target_512b/json_extraction_512b.circom +++ b/builds/target_512b/json_extraction_512b.circom @@ -2,4 +2,4 @@ pragma circom 2.1.9; include "../../circuits/json/extraction.circom"; -component main { public [step_in] } = JSONExtraction(512, 10, 11); \ No newline at end of file +component main { public [step_in] } = JSONExtraction(512, 12, 11); \ No newline at end of file diff --git a/circuits/json/hash_machine.circom b/circuits/json/hash_machine.circom index e971bb2..040ff11 100644 --- a/circuits/json/hash_machine.circom +++ b/circuits/json/hash_machine.circom @@ -362,14 +362,16 @@ template RewriteStack(n) { //--------------------------------------------------------------------------------------------// // Hash the next_* states to produce hash we need // TODO: This could be optimized -- we don't really need to do the index selector, we can just accumulate elsewhere - component stateHash[2]; - stateHash[0] = IndexSelector(n); - stateHash[0].index <== pointer - 1; - stateHash[1] = IndexSelector(n); - stateHash[1].index <== pointer - 1; + signal stateHash[2][n+1]; + stateHash[0][0] <== 0; + stateHash[1][0] <== 0; + // stateHash[0] = IndexSelector(n); + // stateHash[0].index <== pointer - 1; + // stateHash[1] = IndexSelector(n); + // stateHash[1].index <== pointer - 1; for(var i = 0 ; i < n ; i++) { - stateHash[0].in[i] <== tree_hash[i][0]; - stateHash[1].in[i] <== tree_hash[i][1]; + stateHash[0][i+1] <== stateHash[0][i] + tree_hash_indicator[i] * tree_hash[i][0]; + stateHash[1][i+1] <== stateHash[1][i] + tree_hash_indicator[i] * tree_hash[i][1]; } signal is_object_key <== IsEqualArray(2)([current_value,[1,0]]); @@ -377,8 +379,8 @@ template RewriteStack(n) { signal is_array <== IsEqual()([current_value[0], 2]); signal not_to_hash <== IsZero()(parsing_string * next_parsing_string + next_parsing_primitive); - signal hash_0 <== is_object_key * stateHash[0].out; // TODO: I think these may not be needed - signal hash_1 <== (is_object_value + is_array) * stateHash[1].out; // TODO: I think these may not be needed + signal hash_0 <== is_object_key * stateHash[0][n]; // TODO: I think these may not be needed + signal hash_1 <== (is_object_value + is_array) * stateHash[1][n]; // TODO: I think these may not be needed signal monomial_is_zero <== IsZero()(monomial); signal increased_power <== monomial * polynomial_input; @@ -386,8 +388,8 @@ template RewriteStack(n) { signal option_hash <== hash_0 + hash_1 + byte * next_monomial; signal next_state_hash[2]; - next_state_hash[0] <== not_to_hash * (stateHash[0].out - option_hash) + option_hash; // same as: (1 - not_to_hash[i]) * option_hash[i] + not_to_hash[i] * hash[i]; - next_state_hash[1] <== not_to_hash * (stateHash[1].out - option_hash) + option_hash; + next_state_hash[0] <== not_to_hash * (stateHash[0][n] - option_hash) + option_hash; // same as: (1 - not_to_hash[i]) * option_hash[i] + not_to_hash[i] * hash[i]; + next_state_hash[1] <== not_to_hash * (stateHash[1][n] - option_hash) + option_hash; // ^^^^ next_state_hash is the previous value (state_hash) or it is the newly computed value (option_hash) //--------------------------------------------------------------------------------------------// @@ -405,7 +407,7 @@ template RewriteStack(n) { signal to_change_zeroth <== (1 - is_array) * still_parsing_object_key + end_kv; signal not_end_char_for_first <== IsZero()(readColonAndNotParsingString + readCommaAndNotParsingString + readQuote + (1-next_parsing_primitive)); - signal maintain_zeroth <== is_object_value * stateHash[0].out; + signal maintain_zeroth <== is_object_value * stateHash[0][n]; signal to_change_first <== is_object_value + is_array; // signal tree_hash_change_value[2] <== [not_array_and_not_object_value_and_not_end_kv * next_state_hash[0], to_change_first * next_state_hash[1]]; diff --git a/circuits/test/json/extraction.test.ts b/circuits/test/json/extraction.test.ts index 557f3e4..d9d56d7 100644 --- a/circuits/test/json/extraction.test.ts +++ b/circuits/test/json/extraction.test.ts @@ -480,6 +480,66 @@ describe("JSON Extraction", () => { console.log("> Fourth subtest passed."); }); + it(`input: empty`, async () => { + let filename = "empty"; + let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []); + let input_padded = input.concat(Array(DATA_BYTES - input.length).fill(-1)); + + // Test `{}` in "empty" key + let keySequence: JsonMaskType[] = [ + { type: "Object", value: strToBytes("empty") }, + ]; + let [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + console.log(treeHashes); + let sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + let sequence_digest_hashed = poseidon1([sequence_digest]); + + let value_digest = BigInt(0); + + let data_digest = PolynomialDigest(input, mock_ct_digest, BigInt(0)); + + let state = Array(MAX_STACK_HEIGHT * 4 + 4).fill(0); + let state_digest = PolynomialDigest(state, mock_ct_digest, BigInt(0)); + + let step_in = [data_digest, 0, 0, 0, 0, 0, 0, 1, state_digest, sequence_digest_hashed, 0]; + + let json_extraction_step_out = await hash_parser.compute({ + data: input_padded, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in, + state, + }, ["step_out"]); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[0], value_digest); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[7], modPow(mock_ct_digest, BigInt(input.length))); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[9], sequence_digest_hashed); + console.log("> First subtest passed."); + + // Test `[]` in "arr" key + keySequence = [ + { type: "Object", value: strToBytes("arr") }, + ]; + [stack, treeHashes] = jsonTreeHasher(mock_ct_digest, keySequence, MAX_STACK_HEIGHT); + sequence_digest = compressTreeHash(mock_ct_digest, [stack, treeHashes]); + sequence_digest_hashed = poseidon1([sequence_digest]); + value_digest = BigInt(0); + step_in = [data_digest, 0, 0, 0, 0, 0, 0, 1, state_digest, sequence_digest_hashed, 0];; + + json_extraction_step_out = await hash_parser.compute({ + data: input_padded, + ciphertext_digest: mock_ct_digest, + sequence_digest, + value_digest, + step_in, + state, + }, ["step_out"]); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[0], value_digest); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[7], modPow(mock_ct_digest, BigInt(input.length))); + assert.deepEqual((json_extraction_step_out.step_out as BigInt[])[9], sequence_digest_hashed); + console.log("> Second subtest passed."); + }); + it(`input: spotify`, async () => { let filename = "spotify"; let [input, _keyUnicode, _output] = readJSONInputFile(`${filename}.json`, []); diff --git a/examples/json/empty.json b/examples/json/empty.json new file mode 100644 index 0000000..9b571d7 --- /dev/null +++ b/examples/json/empty.json @@ -0,0 +1 @@ +{"object":{},"arr":[]} \ No newline at end of file diff --git a/witness-generator/src/json/parser.rs b/witness-generator/src/json/parser.rs index c6e6a94..7877b00 100644 --- a/witness-generator/src/json/parser.rs +++ b/witness-generator/src/json/parser.rs @@ -166,7 +166,10 @@ pub fn parse( )), }, END_BRACE => match (machine.clone().status, machine.current_location()) { - (Status::None | Status::ParsingPrimitive(_), Location::ObjectValue) => { + ( + Status::None | Status::ParsingPrimitive(_), + Location::ObjectKey | Location::ObjectValue, + ) => { machine.location[machine.pointer() - 1] = Location::None; machine.status = Status::None; machine.clear_label_stack(); @@ -387,6 +390,7 @@ mod tests { r#"{"null": null, "false": false, "true": true, "num1": 2.0E-1, "num2": 2.0e+1}"# )] #[case::primitives_array(r#"[null,false,true,2.0E-1,2.0e+1]"#)] + #[case::empty(r#"{"object":{},"arr":[]}"#)] fn test_json_parser_valid(#[case] input: &str) { let polynomial_input = create_polynomial_input();