diff --git a/Cargo.toml b/Cargo.toml index b68e1f6d..c618fbd6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,8 +16,8 @@ members = [ "tooling/provekit-gnark", "tooling/verifier-server", "ntt", + "playground/passport-input-gen", ] -exclude = ["playground/passport-input-gen"] [workspace.package] edition = "2021" diff --git a/playground/passport-input-gen/Cargo.toml b/playground/passport-input-gen/Cargo.toml index 0a2487b3..0a0f4dc8 100644 --- a/playground/passport-input-gen/Cargo.toml +++ b/playground/passport-input-gen/Cargo.toml @@ -1,24 +1,42 @@ [package] name = "passport-input-gen" version = "0.1.0" -edition = "2021" +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true description = "Passport input generator" +[lints] +workspace = true + [dependencies] -rsa = { version = "0.9.8", features = ["sha2"] } -sha2 = { version = "0.10", features = ["compress"] } -x509-parser = "0.16" -base64 = "0.22" -hex = "0.4" -rasn = "0.15" -rasn-pkix = "0.15" -rasn-cms = "0.15" +# Workspace dependencies +anyhow.workspace = true +ark-bn254.workspace = true +ark-ff.workspace = true +base64.workspace = true +hex.workspace = true +noirc_abi.workspace = true +provekit-common.workspace = true +provekit-prover.workspace = true +serde.workspace = true +serde_json.workspace = true +sha2 = { workspace = true, features = ["compress"] } +tracing.workspace = true +tracing-subscriber = { workspace = true, features = ["fmt"] } + +# Crate-specific dependencies chrono = { version = "0.4", features = ["serde"] } -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0.143" -toml = "0.8" +argh.workspace = true +lazy_static = "1.5.0" noir-bignum-paramgen = "0.1.5" -thiserror = "2.0.16" +rasn = "0.15" +rasn-cms = "0.15" +rasn-pkix = "0.15" +rsa = { version = "0.9.8", features = ["sha2"] } signature = "2.2" -lazy_static = "1.5.0" - +thiserror = "2.0.16" +x509-parser = "0.16" diff --git a/playground/passport-input-gen/README.md b/playground/passport-input-gen/README.md index da843e60..052e0e41 100644 --- a/playground/passport-input-gen/README.md +++ b/playground/passport-input-gen/README.md @@ -1,110 +1,268 @@ # Passport Input Generator -A Rust crate for parsing passport data and generating circuit inputs for Noir Circuits. +A Rust crate for parsing passport data and generating circuit inputs for the `merkle_age_check` multi-circuit pipeline. ## Overview -This crate provides functionality to: +This crate handles the Rust side of the `merkle_age_check` proving pipeline. It: -- Parse passport Machine Readable Zone (MRZ) data from DG1 and SOD -- Validate passport signatures using DSC and CSCA certificates -- Generate mock passport data for testing -- Convert passport data to circuit inputs for Noir zero-knowledge circuits +- Parses passport Machine Readable Zone (MRZ) data from DG1 and SOD +- Validates passport signatures against DSC and CSCA certificates +- Computes Poseidon2 commitment chains across circuits +- Generates per-circuit input structs for both TBS-720 and TBS-1300 chains +- Provides two output modes: TOML files (for use with `nargo prove`) or direct proving (no TOML, bypasses file I/O) + +## Circuit Pipeline + +The `merkle_age_check` circuit splits passport verification into a chain of smaller circuits. The chain depends on the TBS certificate size: + +### TBS-720 (4 circuits) + +Used when the DSC TBS certificate fits within 720 bytes. + +``` +t_add_dsc_720 → t_add_id_data_720 → t_add_integrity_commit → t_attest +``` + +| Circuit | Verifies | Output | +|---------|----------|--------| +| `t_add_dsc_720` | CSCA signature over DSC TBS cert | `comm_out_1 = Poseidon2(salt, country, tbs_cert)` | +| `t_add_id_data_720` | DSC signature over SOD signed attrs | `comm_out_2 = Poseidon2(salt, country, signed_attrs, dg1, econtent, nullifier)` | +| `t_add_integrity_commit` | DG1 hash inside eContent | Merkle leaf = `Poseidon2(hDG1, sod_hash)` | +| `t_attest` | Merkle membership proof | `(param_commitment, scoped_nullifier)` | + +### TBS-1300 (5 circuits) + +Used when the DSC TBS certificate exceeds 720 bytes (padded to 1300). DSC verification is split into two circuits using partial SHA256. + +``` +t_add_dsc_hash_1300 → t_add_dsc_verify_1300 → t_add_id_data_1300 → t_add_integrity_commit → t_attest +``` + +| Circuit | Role | +|---------|------| +| `t_add_dsc_hash_1300` | SHA256 of first 640 bytes of TBS cert, outputs intermediate state | +| `t_add_dsc_verify_1300` | Continues SHA256, verifies CSCA RSA signature | +| `t_add_id_data_1300` | Same as 720 variant but with 1300-byte TBS | +| `t_add_integrity_commit` | Shared with TBS-720 chain | +| `t_attest` | Shared with TBS-720 chain | + +The expensive registration circuits (`t_add_*`) run once. The fast `t_attest` circuit (Poseidon-only, no RSA) runs repeatedly for each attestation against a Merkle tree root. + +## Library API ### `PassportReader` -Main structure for reading and validating passport data. +Wraps DG1 + SOD data and produces per-circuit input structs. + +```rust +use passport_input_gen::{ + Binary, PassportReader, + MerkleAge720Config, MerkleAge1300Config, MerkleAgeBaseConfig, +}; -**Structure:** +// Construct from parsed passport data +let reader = PassportReader::new( + Binary::from_slice(&dg1_bytes), + sod, + false, // mockdata = false for real passports + None, // csca_pubkey = None (will look up from embedded CSCA set) +); + +// Validate signatures; returns CSCA key index used +let csca_idx = reader.validate()?; + +// Generate TBS-720 circuit inputs +let config = MerkleAge720Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 18, + ..Default::default() + }, +}; +let inputs = reader.to_merkle_age_720_inputs(csca_idx, config)?; + +// Generate TBS-1300 circuit inputs +let config = MerkleAge1300Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 18, + ..Default::default() + }, + ..Default::default() +}; +let inputs = reader.to_merkle_age_1300_inputs(csca_idx, config)?; +``` + +### Config structs + +Both `MerkleAge720Config` and `MerkleAge1300Config` hold application-level parameters not extracted from the passport itself: + +| Field | Description | +|-------|-------------| +| `salt_1`, `salt_2` (720) / `salt_0`, `salt_1`, `salt_2` (1300) | Commitment salts chained across circuits | +| `r_dg1` | Blinding factor for DG1 Poseidon2 commitment | +| `current_date` | Unix timestamp used for age/expiry checks | +| `min_age_required` / `max_age_required` | Age range to prove (0 = no upper bound) | +| `merkle_root` | Current Merkle tree root (from sequencer) | +| `leaf_index` | Leaf index in the Merkle tree | +| `merkle_path` | Sibling hashes for the Merkle membership proof | +| `service_scope` / `service_subscope` | H(domain) and H(purpose) for scoped nullifiers | +| `nullifier_secret` | Optional secret for nullifier salting | + +Default values are mock/placeholder values suitable for testing. In production, `merkle_root`, `leaf_index`, `merkle_path`, and the scope fields are provided by the sequencer. + +### Output: TOML files + +Save all per-circuit inputs as TOML files for use with `nargo prove`: ```rust -pub struct PassportReader { - dg1: Binary, // DG1 (Machine Readable Zone) data - sod: SOD, // Security Object Document - mockdata: bool, // Flag indicating mock vs real passport data - csca_pubkey: Option, // Optional CSCA public key for mock data -} +use std::path::Path; + +// TBS-720: writes t_add_dsc_720.toml, t_add_id_data_720.toml, +// t_add_integrity_commit.toml, t_attest.toml +inputs.save_all(Path::new("path/to/output/dir"))?; ``` -**Key Behavior:** +### Output: Direct proving (no TOML) -- When `mockdata: false`: The reader searches for existing CSCA keys from a predefined set. Currently supports USA CSCA keys loaded from the system. The `validate()` method iterates through all available USA CSCA keys to find one that successfully validates the passport signature. +Convert inputs directly to a proof without writing TOML to disk. Inputs are serialized to JSON, parsed against the circuit ABI, and passed to `provekit-prover`: -- When `mockdata: true`: The reader uses the provided `csca_pubkey` for validation. This is useful for testing with synthetic passport data generated using mock keys. +```rust +use provekit_prover::Prove; +use noirc_abi::input_parser::Format; + +let json = serde_json::to_string(&inputs.add_dsc)?; +let input_map = Format::Json.parse(&json, prover.witness_generator.abi())?; +let proof = prover.prove(input_map)?; +``` + +## CLI -**Methods:** +The `passport_cli` binary is a non-interactive CLI tool. -- `validate() -> Result` - Validates the passport signatures and returns the CSCA key index used. For mock data, always returns index 0. For real data, returns the index of the USA CSCA key that successfully validated the passport. -- `to_circuit_inputs(current_date: u64, min_age_required: u8, max_age_required: u8, csca_key_index: usize) -> Result` - Converts passport data to circuit inputs +``` +cargo run --release --bin passport_cli -- --tbs <720|1300> --mode [OPTIONS] +``` -#### `CircuitInputs` +### CLI flags -Contains all necessary inputs for Noir circuits. +| Flag | Description | +|------|-------------| +| `--tbs <720\|1300>` | TBS variant (required) | +| `--mode ` | Mode (required) | +| `--output-dir ` | Output directory for TOML files or proof files, relative to current dir. Defaults to `benchmark-inputs/tbs_{720,1300}/test` | +| `--save-logs` | Save per-circuit log files during prove mode | +| `--log-dir ` | Directory for log files, relative to current dir. Default: `noir-examples/noir-passport/merkle_age_check/benchmark-inputs/logs/test` | -**Methods:** +### Examples + +```bash +# Generate TBS-720 TOML files to the default directory +cargo run --release --bin passport_cli -- --tbs 720 --mode toml -- `to_toml_string() -> String` - Converts circuit inputs to TOML format string -- `save_to_toml_file>(path: P) -> std::io::Result<()>` - Saves circuit inputs to a TOML file +# Generate TBS-720 TOML files to a custom directory +cargo run --release --bin passport_cli -- --tbs 720 --mode toml --output-dir my-inputs/tbs_720 -### Mock Data Generation +# Generate TBS-1300 proofs with per-circuit logs saved to default log dir +cargo run --release --bin passport_cli -- --tbs 1300 --mode prove --save-logs + +# Generate TBS-720 proofs with logs saved to a custom directory +cargo run --release --bin passport_cli -- --tbs 720 --mode prove --save-logs --log-dir my-logs/tbs_720 +``` + +### TOML mode + +Generates all Prover.toml files under the output directory (default shown below): + +``` +noir-examples/noir-passport/merkle_age_check/benchmark-inputs/ + tbs_720/test/ + t_add_dsc_720.toml + t_add_id_data_720.toml + t_add_integrity_commit.toml + t_attest.toml + tbs_1300/test/ + t_add_dsc_hash_1300.toml + t_add_dsc_verify_1300.toml + t_add_id_data_1300.toml + t_add_integrity_commit.toml + t_attest.toml +``` -#### `mock_generator` module +Use `--output-dir` to write TOML files to a different directory. -**Functions:** +### Prove mode -- `dg1_bytes_with_birthdate_expiry_date(birthdate: &[u8; 6], expiry: &[u8; 6]) -> Vec` - Generates fake DG1 data with specified birth and expiry dates (format: YYMMDD) -- `generate_fake_sod(dg1: &[u8], dsc_priv: &RsaPrivateKey, dsc_pub: &RsaPublicKey, csca_priv: &RsaPrivateKey, _csca_pub: &RsaPublicKey) -> SOD` - Creates a synthetic SOD structure for testing +Loads `.pkp` prover keys from the benchmark-inputs directory, generates proofs for all circuits in the chain (including `t_attest`), and writes `.np` proof files alongside the prover keys. -#### `mock_keys` module +The CLI includes tracing-based performance profiling. Span durations, memory usage, and allocation counts are printed to stderr during proving. -**Constants:** +When `--save-logs` is passed, a separate log file is created per circuit (e.g. `t_add_dsc_720.log`). ANSI escape codes are stripped from the log files. The default log directory is `noir-examples/noir-passport/merkle_age_check/benchmark-inputs/logs/test`; use `--log-dir` to override. -- `MOCK_CSCA_PRIV_KEY_B64: &str` - Base64-encoded mock CSCA private key for testing -- `MOCK_DSC_PRIV_KEY_B64: &str` - Base64-encoded mock DSC private key for testing +## Mock data -## Usage Example +The `mock_generator` module generates synthetic passport data for testing. All internal structures (eContent, SignedAttributes, TBS certificate) use proper DER-encoded ASN.1, matching the encoding that real passport chips produce. ```rust -use passport_input_gen::{PassportReader, mock_generator, mock_keys}; -use base64::{engine::general_purpose::STANDARD, Engine as _}; -use rsa::{RsaPrivateKey, pkcs8::DecodePrivateKey}; - -// Load mock keys -let csca_der = STANDARD.decode(mock_keys::MOCK_CSCA_PRIV_KEY_B64)?; -let csca_priv = RsaPrivateKey::from_pkcs8_der(&csca_der)?; -let csca_pub = csca_priv.to_public_key(); - -let dsc_der = STANDARD.decode(mock_keys::MOCK_DSC_PRIV_KEY_B64)?; -let dsc_priv = RsaPrivateKey::from_pkcs8_der(&dsc_der)?; -let dsc_pub = dsc_priv.to_public_key(); - -// Generate mock passport data -let dg1 = mock_generator::dg1_bytes_with_birthdate_expiry_date(b"900101", b"300101"); -let sod = mock_generator::generate_fake_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); - -// Create passport reader -let reader = PassportReader { - dg1: Binary::from_slice(&dg1), - sod, - mockdata: true, - csca_pubkey: Some(csca_pub), +use passport_input_gen::mock_generator::{ + dg1_bytes_with_birthdate_expiry_date, + generate_sod, // TBS-720: DER-encoded TBS that fits within 720 bytes + generate_sod_with_padded_tbs, // TBS-1300: extends TBS with a padding extension }; -// Validate passport -let csca_index = reader.validate()?; +// DOB: Jan 1, 2007 / Expiry: Jan 1, 2032 +let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); -// Generate circuit inputs -let current_timestamp = chrono::Utc::now().timestamp() as u64; -let inputs = reader.to_circuit_inputs(current_timestamp, 18, 70, csca_index)?; +// TBS-720 SOD +let sod = generate_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); -// Export to TOML -inputs.save_to_toml_file("circuit_inputs.toml")?; +// TBS-1300 SOD (extends TBS to ~850 bytes via a padding extension) +let sod = generate_sod_with_padded_tbs( + &dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub, 850 +); ``` -## Testing +### DG1 (MRZ) + +`dg1_bytes_with_birthdate_expiry_date` builds a 95-byte DG1 with: + +- A 5-byte ASN.1 tag prefix (`0x61 0x5B 0x5F 0x1F 0x58`) +- A 90-byte TD3 MRZ containing realistic fields (document type `P<`, country `UTO`, name `DOE< InputMap -> prover.prove() pipeline. +mod profiling_alloc; +mod span_stats; + +use { + anyhow::{Context, Result}, + argh::FromArgs, + noirc_abi::input_parser::Format, + passport_input_gen::{ + mock_generator::{ + dg1_bytes_with_birthdate_expiry_date, generate_sod, generate_sod_with_padded_tbs, + }, + mock_keys::{MOCK_CSCA_PRIV_KEY_B64, MOCK_DSC_PRIV_KEY_B64}, + Binary, CircuitInputSet, MerkleAge1300Config, MerkleAge1300Inputs, MerkleAge720Config, + MerkleAge720Inputs, MerkleAgeBaseConfig, PassportReader, + }, + profiling_alloc::ProfilingAllocator, + provekit_prover::Prove, + span_stats::SpanStats, + tracing::instrument, + tracing_subscriber::{layer::SubscriberExt, Registry}, +}; + +#[global_allocator] +static ALLOCATOR: ProfilingAllocator = ProfilingAllocator::new(); + +use { + base64::{engine::general_purpose::STANDARD, Engine as _}, + rsa::{pkcs8::DecodePrivateKey, RsaPrivateKey, RsaPublicKey}, + std::{ + fs::File, + io::{BufWriter, Write as _}, + path::{Path, PathBuf}, + sync::Mutex, + }, +}; + +// ============================================================================ +// Global log sink for tee-ing output to per-circuit log files +// ============================================================================ + +lazy_static::lazy_static! { + pub(crate) static ref LOG_SINK: Mutex>> = Mutex::new(None); +} + +/// Strip ANSI escape sequences (e.g. `\x1b[0m`, `\x1b[1;32m`) from a string. +pub(crate) fn strip_ansi(s: &str) -> String { + let mut result = String::with_capacity(s.len()); + let mut chars = s.chars().peekable(); + while let Some(c) = chars.next() { + if c == '\x1b' { + if chars.peek() == Some(&'[') { + chars.next(); // consume '[' + // skip until we hit an ASCII letter (the final byte of the sequence) + for c in chars.by_ref() { + if c.is_ascii_alphabetic() { + break; + } + } + continue; + } + } + result.push(c); + } + result +} + +/// Write a message to the global LOG_SINK (if active), stripping ANSI codes. +pub(crate) fn tee_write_log(msg: &str) { + if let Ok(mut guard) = LOG_SINK.lock() { + if let Some(ref mut writer) = *guard { + let stripped = strip_ansi(msg); + let _ = writeln!(writer, "{}", stripped); + } + } +} + +/// Open a log file and set it as the active LOG_SINK. +fn set_log_file(path: &Path) -> Result<()> { + let file = + File::create(path).with_context(|| format!("Creating log file: {}", path.display()))?; + let mut guard = LOG_SINK + .lock() + .map_err(|e| anyhow::anyhow!("LOG_SINK lock: {e}"))?; + *guard = Some(BufWriter::new(file)); + Ok(()) +} + +/// Flush and close the current LOG_SINK. +fn close_log_file() { + if let Ok(mut guard) = LOG_SINK.lock() { + if let Some(ref mut writer) = *guard { + let _ = writer.flush(); + } + *guard = None; + } +} + +/// Like `println!` but also writes ANSI-stripped output to LOG_SINK if active. +macro_rules! tee_println { + ($($arg:tt)*) => {{ + let msg = format!($($arg)*); + println!("{}", msg); + $crate::tee_write_log(&msg); + }}; +} + +// ============================================================================ +// CLI arguments +// ============================================================================ + +/// Passport Input Generator & Prover CLI +#[derive(FromArgs)] +struct Args { + /// tbs variant: 720 or 1300 + #[argh(option)] + tbs: u16, + + /// mode: "toml" or "prove" + #[argh(option)] + mode: String, + + /// output directory for TOML files (default: benchmark-inputs/tbs_{N}/test) + #[argh(option)] + output_dir: Option, + + /// save per-circuit log files during prove mode + #[argh(switch)] + save_logs: bool, + + /// directory for log files (default: .../benchmark-inputs/logs/test) + #[argh(option)] + log_dir: Option, +} + +// ============================================================================ +// Mock data helpers (consolidated from old generate_720/1300_inputs binaries) +// ============================================================================ + +fn load_mock_keys() -> (RsaPrivateKey, RsaPublicKey, RsaPrivateKey, RsaPublicKey) { + let csca_der = STANDARD + .decode(MOCK_CSCA_PRIV_KEY_B64) + .expect("Failed to decode CSCA private key"); + let csca_priv = + RsaPrivateKey::from_pkcs8_der(&csca_der).expect("Failed to parse CSCA private key"); + let csca_pub = csca_priv.to_public_key(); + + let dsc_der = STANDARD + .decode(MOCK_DSC_PRIV_KEY_B64) + .expect("Failed to decode DSC private key"); + let dsc_priv = + RsaPrivateKey::from_pkcs8_der(&dsc_der).expect("Failed to parse DSC private key"); + let dsc_pub = dsc_priv.to_public_key(); + + (csca_priv, csca_pub, dsc_priv, dsc_pub) +} + +fn generate_720_inputs( + csca_priv: &RsaPrivateKey, + csca_pub: &RsaPublicKey, + dsc_priv: &RsaPrivateKey, + dsc_pub: &RsaPublicKey, +) -> Result { + println!("\n--- Generating TBS-720 inputs ---"); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + println!(" DG1: {} bytes (DOB: 070101, Expiry: 320101)", dg1.len()); + + let sod = generate_sod(&dg1, dsc_priv, dsc_pub, csca_priv, csca_pub); + println!(" SOD generated (mock)"); + + let reader = PassportReader::new(Binary::from_slice(&dg1), sod, true, Some(csca_pub.clone())); + let csca_idx = reader.validate().context("Passport validation failed")?; + println!(" Validation passed (CSCA key index: {})", csca_idx); + + let config = MerkleAge720Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, // Jan 1, 2025 00:00:00 UTC + min_age_required: 18, + max_age_required: 0, + ..Default::default() + }, + }; + + let inputs = reader + .to_merkle_age_720_inputs(csca_idx, config) + .context("Failed to generate 720 circuit inputs")?; + println!(" Circuit inputs generated for 4 circuits"); + + Ok(inputs) +} + +fn generate_1300_inputs( + csca_priv: &RsaPrivateKey, + csca_pub: &RsaPublicKey, + dsc_priv: &RsaPrivateKey, + dsc_pub: &RsaPublicKey, +) -> Result { + println!("\n--- Generating TBS-1300 inputs ---"); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + println!(" DG1: {} bytes (DOB: 070101, Expiry: 320101)", dg1.len()); + + let sod = generate_sod_with_padded_tbs(&dg1, dsc_priv, dsc_pub, csca_priv, csca_pub, 850); + println!(" SOD generated (mock, padded TBS = 850 bytes)"); + + let reader = PassportReader::new(Binary::from_slice(&dg1), sod, true, Some(csca_pub.clone())); + let csca_idx = reader.validate().context("Passport validation failed")?; + println!(" Validation passed (CSCA key index: {})", csca_idx); + + let config = MerkleAge1300Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 17, + max_age_required: 0, + ..Default::default() + }, + ..Default::default() + }; + + let inputs = reader + .to_merkle_age_1300_inputs(csca_idx, config) + .context("Failed to generate 1300 circuit inputs")?; + println!(" Circuit inputs generated for 5 circuits"); + + Ok(inputs) +} + +// ============================================================================ +// Proving helpers +// ============================================================================ + +/// Load a prover from its .pkp file, convert circuit inputs to InputMap via +/// JSON serialization + ABI parsing, generate the proof, and write it to disk. +#[instrument(skip_all, fields(circuit_name = %circuit_name))] +fn prove_circuit( + circuit_name: &str, + inputs: &T, + pkp_path: &Path, + proof_path: &Path, +) -> Result<()> { + tee_println!( + "\n [{circuit_name}] Loading prover from: {}", + pkp_path.display() + ); + let prover: provekit_common::Prover = provekit_common::file::read(pkp_path) + .with_context(|| format!("Reading prover key for {circuit_name}"))?; + + let (num_constraints, num_witnesses) = prover.size(); + tee_println!( + " [{circuit_name}] Scheme size: {num_constraints} constraints, {num_witnesses} witnesses" + ); + + tee_println!(" [{circuit_name}] Converting inputs -> JSON -> InputMap..."); + let json = serde_json::to_string(inputs) + .with_context(|| format!("Serializing {circuit_name} inputs to JSON"))?; + let input_map = Format::Json + .parse(&json, prover.witness_generator.abi()) + .map_err(|e| anyhow::anyhow!("ABI parse error for {circuit_name}: {e}"))?; + + tee_println!(" [{circuit_name}] Generating proof..."); + let proof = prover + .prove(input_map) + .with_context(|| format!("Proving {circuit_name}"))?; + + tee_println!( + " [{circuit_name}] Writing proof to: {}", + proof_path.display() + ); + provekit_common::file::write(&proof, proof_path) + .with_context(|| format!("Writing proof for {circuit_name}"))?; + + tee_println!(" [{circuit_name}] Done."); + + Ok(()) +} + +macro_rules! prove_circuits { + ($pkp_dir:expr, $output_dir:expr, $log_dir:expr, $( ($name:expr, $input:expr) ),+ $(,)?) => { + $( + if let Some(dir) = $log_dir { + set_log_file(&dir.join(format!("{}.log", $name)))?; + } + let result = prove_circuit( + $name, + $input, + &$pkp_dir.join(format!("{}-prover.pkp", $name)), + &$output_dir.join(format!("{}-proof.np", $name)), + ); + if $log_dir.is_some() { + close_log_file(); + } + result?; + )+ + }; +} + +fn prove_720( + inputs: &MerkleAge720Inputs, + pkp_dir: &Path, + output_dir: &Path, + log_dir: Option<&Path>, +) -> Result<()> { + println!("\n Proving TBS-720 chain (4 circuits)..."); + prove_circuits!( + pkp_dir, + output_dir, + log_dir, + ("t_add_dsc_720", &inputs.add_dsc), + ("t_add_id_data_720", &inputs.add_id_data), + ("t_add_integrity_commit", &inputs.add_integrity), + ("t_attest", &inputs.attest), + ); + Ok(()) +} + +fn prove_1300( + inputs: &MerkleAge1300Inputs, + pkp_dir: &Path, + output_dir: &Path, + log_dir: Option<&Path>, +) -> Result<()> { + println!("\n Proving TBS-1300 chain (5 circuits)..."); + prove_circuits!( + pkp_dir, + output_dir, + log_dir, + ("t_add_dsc_hash_1300", &inputs.add_dsc_hash), + ("t_add_dsc_verify_1300", &inputs.add_dsc_verify), + ("t_add_id_data_1300", &inputs.add_id_data), + ("t_add_integrity_commit", &inputs.add_integrity), + ("t_attest", &inputs.attest), + ); + Ok(()) +} + +// ============================================================================ +// TOML output helpers +// ============================================================================ + +fn save_toml(inputs: &dyn CircuitInputSet, base_dir: &Path) -> Result<()> { + inputs + .save_all(base_dir) + .context("Failed to write TOML files")?; + println!("\n Written:"); + for name in inputs.circuit_names() { + println!(" {}/{name}.toml", base_dir.display()); + } + Ok(()) +} + +// ============================================================================ +// Summary printers +// ============================================================================ + +fn print_720_summary(inputs: &MerkleAge720Inputs) { + println!("\n Summary:"); + println!( + " TBS certificate len: {}", + inputs.add_dsc.tbs_certificate_len + ); + println!( + " DSC pubkey offset: {}", + inputs.add_id_data.dsc_pubkey_offset_in_dsc_cert + ); + println!( + " DG1 hash offset: {}", + inputs.add_integrity.dg1_hash_offset + ); + println!(" Country: \"{}\"", inputs.add_dsc.country); + println!( + " Salt chain: {} -> {}", + inputs.add_dsc.salt, inputs.add_id_data.salt_out + ); + println!(); + println!(" Computed commitments (Poseidon2):"); + println!( + " comm_out_1 (dsc->id_data): {}", + inputs.add_id_data.comm_in + ); + println!( + " private_nullifier: {}", + inputs.add_integrity.private_nullifier + ); + println!( + " comm_out_2 (id_data->integrity): {}", + inputs.add_integrity.comm_in + ); + println!( + " sod_hash: {}", + inputs.attest.sod_hash + ); +} + +fn print_1300_summary(inputs: &MerkleAge1300Inputs) { + println!("\n Summary:"); + println!( + " TBS certificate len: {}", + inputs.add_dsc_verify.tbs_certificate_len + ); + println!( + " SHA256 state1: {:?}", + inputs.add_dsc_verify.state1 + ); + println!( + " DSC pubkey offset: {}", + inputs.add_id_data.dsc_pubkey_offset_in_dsc_cert + ); + println!( + " DG1 hash offset: {}", + inputs.add_integrity.dg1_hash_offset + ); + println!( + " Country: \"{}\"", + inputs.add_dsc_verify.country + ); + println!( + " Salt chain: {} -> {} -> {}", + inputs.add_dsc_hash.salt, inputs.add_dsc_verify.salt_out, inputs.add_id_data.salt_out, + ); + println!(); + println!(" Computed commitments (Poseidon2):"); + println!( + " comm_out_hash (dsc_hash->dsc_verify): {}", + inputs.add_dsc_verify.comm_in + ); + println!( + " comm_out_verify (dsc_verify->id_data): {}", + inputs.add_id_data.comm_in + ); + println!( + " comm_out_id (id_data->integrity): {}", + inputs.add_integrity.comm_in + ); + println!( + " private_nullifier: {}", + inputs.add_integrity.private_nullifier + ); + println!( + " sod_hash: {}", + inputs.attest.sod_hash + ); +} + +// ============================================================================ +// Main +// ============================================================================ + +fn main() -> Result<()> { + let args: Args = argh::from_env(); + + // Initialize logging/tracing with SpanStats for detailed performance metrics + let subscriber = Registry::default().with(SpanStats); + let _ = tracing::subscriber::set_global_default(subscriber); + + println!("================================================================"); + println!(" Passport Input Generator & Prover CLI"); + println!("================================================================\n"); + + let is_720 = match args.tbs { + 720 => true, + 1300 => false, + other => anyhow::bail!("Invalid --tbs value: {other}. Must be 720 or 1300."), + }; + + let is_toml = match args.mode.as_str() { + "toml" => true, + "prove" => false, + other => { + anyhow::bail!("Invalid --mode value: \"{other}\". Must be \"toml\" or \"prove\".") + } + }; + + // Load mock RSA key pairs + println!("\nStep 1: Loading mock RSA key pairs..."); + let (csca_priv, csca_pub, dsc_priv, dsc_pub) = load_mock_keys(); + println!(" CSCA key loaded (RSA-4096)"); + println!(" DSC key loaded (RSA-2048)"); + + let cwd = std::env::current_dir().context("Failed to get current working directory")?; + let benchmark_dir: PathBuf = + cwd.join("noir-examples/noir-passport/merkle_age_check/benchmark-inputs"); + + // Resolve log directory for prove mode + let log_dir = if args.save_logs { + let dir = match args.log_dir { + Some(d) => cwd.join(d), + None => { + cwd.join("noir-examples/noir-passport/merkle_age_check/benchmark-inputs/logs/test") + } + }; + std::fs::create_dir_all(&dir) + .with_context(|| format!("Creating log directory: {}", dir.display()))?; + println!(" Logs will be saved to: {}", dir.display()); + Some(dir) + } else { + None + }; + + // Resolve output directory: --output-dir overrides, else default per TBS + // variant + let tbs_subdir = if is_720 { "tbs_720" } else { "tbs_1300" }; + let output_dir = match args.output_dir { + Some(d) => cwd.join(d), + None => benchmark_dir.join(format!("{tbs_subdir}/test")), + }; + std::fs::create_dir_all(&output_dir) + .with_context(|| format!("Creating output directory: {}", output_dir.display()))?; + println!(" Output directory: {}", output_dir.display()); + + match (is_720, is_toml) { + (true, true) => { + let inputs = generate_720_inputs(&csca_priv, &csca_pub, &dsc_priv, &dsc_pub)?; + save_toml(&inputs, &output_dir)?; + print_720_summary(&inputs); + } + (true, false) => { + let inputs = generate_720_inputs(&csca_priv, &csca_pub, &dsc_priv, &dsc_pub)?; + print_720_summary(&inputs); + prove_720(&inputs, &benchmark_dir, &output_dir, log_dir.as_deref())?; + println!("\n All TBS-720 proofs generated successfully."); + } + (false, true) => { + let inputs = generate_1300_inputs(&csca_priv, &csca_pub, &dsc_priv, &dsc_pub)?; + save_toml(&inputs, &output_dir)?; + print_1300_summary(&inputs); + } + (false, false) => { + let inputs = generate_1300_inputs(&csca_priv, &csca_pub, &dsc_priv, &dsc_pub)?; + print_1300_summary(&inputs); + prove_1300(&inputs, &benchmark_dir, &output_dir, log_dir.as_deref())?; + println!("\n All TBS-1300 proofs generated successfully."); + } + } + + println!("\n================================================================"); + println!(" Complete"); + println!("================================================================"); + Ok(()) +} diff --git a/playground/passport-input-gen/src/bin/passport_cli/profiling_alloc.rs b/playground/passport-input-gen/src/bin/passport_cli/profiling_alloc.rs new file mode 100644 index 00000000..92c62e0c --- /dev/null +++ b/playground/passport-input-gen/src/bin/passport_cli/profiling_alloc.rs @@ -0,0 +1,95 @@ +use std::{ + alloc::{GlobalAlloc, Layout, System as SystemAlloc}, + sync::atomic::{AtomicUsize, Ordering}, +}; + +/// Custom allocator that keeps track of statistics to see program memory +/// consumption. +pub struct ProfilingAllocator { + /// Allocated bytes + current: AtomicUsize, + + /// Maximum allocated bytes (reached so far) + max: AtomicUsize, + + /// Number of allocations done + count: AtomicUsize, +} + +impl ProfilingAllocator { + pub const fn new() -> Self { + Self { + current: AtomicUsize::new(0), + max: AtomicUsize::new(0), + count: AtomicUsize::new(0), + } + } + + pub fn current(&self) -> usize { + self.current.load(Ordering::SeqCst) + } + + pub fn max(&self) -> usize { + self.max.load(Ordering::SeqCst) + } + + pub fn reset_max(&self) -> usize { + let current = self.current(); + self.max.store(current, Ordering::SeqCst); + current + } + + pub fn count(&self) -> usize { + self.count.load(Ordering::SeqCst) + } +} + +#[allow(unsafe_code)] +unsafe impl GlobalAlloc for ProfilingAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + let ptr = SystemAlloc.alloc(layout); + let size = layout.size(); + let current = self + .current + .fetch_add(size, Ordering::SeqCst) + .wrapping_add(size); + self.max.fetch_max(current, Ordering::SeqCst); + self.count.fetch_add(1, Ordering::SeqCst); + ptr + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + self.current.fetch_sub(layout.size(), Ordering::SeqCst); + SystemAlloc.dealloc(ptr, layout); + } + + unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { + let ptr = SystemAlloc.alloc_zeroed(layout); + let size = layout.size(); + let current = self + .current + .fetch_add(size, Ordering::SeqCst) + .wrapping_add(size); + self.max.fetch_max(current, Ordering::SeqCst); + self.count.fetch_add(1, Ordering::SeqCst); + ptr + } + + unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 { + let ptr = SystemAlloc.realloc(ptr, old_layout, new_size); + let old_size = old_layout.size(); + if new_size > old_size { + let diff = new_size - old_size; + let current = self + .current + .fetch_add(diff, Ordering::SeqCst) + .wrapping_add(diff); + self.max.fetch_max(current, Ordering::SeqCst); + self.count.fetch_add(1, Ordering::SeqCst); + } else { + self.current + .fetch_sub(old_size - new_size, Ordering::SeqCst); + } + ptr + } +} diff --git a/playground/passport-input-gen/src/bin/passport_cli/span_stats.rs b/playground/passport-input-gen/src/bin/passport_cli/span_stats.rs new file mode 100644 index 00000000..352a7dae --- /dev/null +++ b/playground/passport-input-gen/src/bin/passport_cli/span_stats.rs @@ -0,0 +1,248 @@ +//! Using `tracing` spans to print performance statistics for the program. +use { + crate::ALLOCATOR, + provekit_common::utils::human, + std::{ + cmp::max, + fmt::{self, Write as _}, + time::Instant, + }, + tracing::{ + field::{Field, Visit}, + span::{Attributes, Id}, + Level, Subscriber, + }, + tracing_subscriber::{layer::Context, registry::LookupSpan, Layer}, +}; + +const DIM: &str = "\x1b[2m"; +const UNDIM: &str = "\x1b[22m"; + +// Span extension data +pub struct Data { + depth: usize, + time: Instant, + + memory: usize, + allocations: usize, + + /// `peak_memory` will be updated as it is not monotonic + peak_memory: usize, + + children: bool, + kvs: Vec<(&'static str, String)>, +} + +impl Data { + pub fn new(attrs: &Attributes<'_>, depth: usize) -> Self { + let mut span = Self { + depth, + time: Instant::now(), + + memory: ALLOCATOR.current(), + allocations: ALLOCATOR.count(), + peak_memory: ALLOCATOR.current(), + + children: false, + kvs: Vec::new(), + }; + attrs.record(&mut span); + span + } +} + +impl Visit for Data { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + self.kvs.push((field.name(), format!("{value:?}"))); + } +} + +pub struct FmtEvent<'a>(&'a mut String); + +impl Visit for FmtEvent<'_> { + fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) { + match field.name() { + "message" => { + write!(self.0, " {value:?}").unwrap(); + } + name => { + write!(self.0, " {name}={value:?}").unwrap(); + } + } + } +} + +/// Logging layer that keeps track of time and memory consumption of spans. +pub struct SpanStats; + +impl Layer for SpanStats +where + S: Subscriber + for<'span> LookupSpan<'span>, +{ + fn on_new_span(&self, attrs: &Attributes, id: &Id, ctx: Context) { + let span = ctx.span(id).expect("invalid span in on_new_span"); + + // Update parent + if let Some(parent) = span.parent() { + if let Some(data) = parent.extensions_mut().get_mut::() { + data.children = true; + data.peak_memory = max(data.peak_memory, ALLOCATOR.max()); + } + } + ALLOCATOR.reset_max(); + + // Add Data if it hasn't already + if span.extensions().get::().is_none() { + let depth = span.parent().map_or(0, |s| { + s.extensions() + .get::() + .expect("parent span has no data") + .depth + + 1 + }); + let data = Data::new(attrs, depth); + span.extensions_mut().insert(data); + } + + // Fetch data + let ext = span.extensions(); + let data = ext.get::().expect("span does not have data"); + + let mut buffer = String::with_capacity(100); + + // Box draw tree indentation + if data.depth >= 1 { + for _ in 0..(data.depth - 1) { + let _ = write!(&mut buffer, "│ "); + } + let _ = write!(&mut buffer, "├─"); + } + let _ = write!(&mut buffer, "╮ "); + + // Span name + let _ = write!( + &mut buffer, + "{DIM}{}::{UNDIM}{}", + span.metadata().target(), + span.metadata().name() + ); + + // KV args + for (key, val) in &data.kvs { + let _ = write!(&mut buffer, " {key}={val}"); + } + + // Start-of-span memory stats + let _ = write!( + &mut buffer, + " {DIM}start:{UNDIM} {}B{DIM} current, {UNDIM}{:#}{DIM} allocations{UNDIM}", + human(ALLOCATOR.current() as f64), + human(ALLOCATOR.count() as f64) + ); + + eprintln!("{buffer}"); + crate::tee_write_log(&buffer); + } + + fn on_event(&self, event: &tracing::Event<'_>, ctx: Context<'_, S>) { + let span = ctx.current_span().id().and_then(|id| ctx.span(id)); + + let mut buffer = String::with_capacity(100); + + // Span indentation + time in span + if let Some(span) = &span { + // Flag child on parent + if let Some(parent) = span.parent() { + if let Some(data) = parent.extensions_mut().get_mut::() { + data.children = true; + } + } + + if let Some(data) = span.extensions().get::() { + // Box draw tree indentation + for _ in 0..=data.depth { + let _ = write!(&mut buffer, "│ "); + } + + // Time + let elapsed = data.time.elapsed(); + let _ = write!( + &mut buffer, + "{DIM}{:6}s {UNDIM}", + human(elapsed.as_secs_f64()) + ); + } + } + + // Log level + match *event.metadata().level() { + Level::TRACE => write!(&mut buffer, "TRACE"), + Level::DEBUG => write!(&mut buffer, "DEBUG"), + Level::INFO => write!(&mut buffer, "\x1b[1;32mINFO\x1b[0m"), + Level::WARN => write!(&mut buffer, "\x1b[1;38;5;208mWARN\x1b[0m"), + Level::ERROR => write!(&mut buffer, "\x1b[1;31mERROR\x1b[0m"), + } + .unwrap(); + + let mut visitor = FmtEvent(&mut buffer); + event.record(&mut visitor); + + eprintln!("{buffer}"); + crate::tee_write_log(&buffer); + } + + fn on_close(&self, id: Id, ctx: Context) { + let span = ctx.span(&id).expect("invalid span in on_close"); + let ext = span.extensions(); + let data = ext.get::().expect("span does not have data"); + let duration = data.time.elapsed(); + + let mut buffer = String::with_capacity(100); + + // Box draw tree indentation + if data.depth >= 1 { + for _ in 0..(data.depth - 1) { + let _ = write!(&mut buffer, "│ "); + } + let _ = write!(&mut buffer, "├─"); + } + let _ = write!(&mut buffer, "╯ "); + + // Short span name if not childless + if data.children { + let _ = write!(&mut buffer, "{DIM}{}: {UNDIM}", span.metadata().name()); + } + + // Print stats + let _ = write!( + &mut buffer, + "{}s{DIM} duration", + human(duration.as_secs_f64()), + ); + + let peak_memory: usize = std::cmp::max(ALLOCATOR.max(), data.peak_memory); + let allocations = ALLOCATOR.count() - data.allocations; + let own = peak_memory - data.memory; + + // Update parent + if let Some(parent) = span.parent() { + if let Some(data) = parent.extensions_mut().get_mut::() { + data.peak_memory = max(data.peak_memory, peak_memory); + } + } + + let current_now = ALLOCATOR.current(); + let _ = write!( + &mut buffer, + ", {UNDIM}{}B{DIM} peak memory, {UNDIM}{}B{DIM} local, {UNDIM}{}B{DIM} current, \ + {UNDIM}{:#}{DIM} allocations{UNDIM}", + human(peak_memory as f64), + human(own as f64), + human(current_now as f64), + human(allocations as f64) + ); + + eprintln!("{buffer}"); + crate::tee_write_log(&buffer); + } +} diff --git a/playground/passport-input-gen/src/commitment.rs b/playground/passport-input-gen/src/commitment.rs new file mode 100644 index 00000000..2ec2dd64 --- /dev/null +++ b/playground/passport-input-gen/src/commitment.rs @@ -0,0 +1,413 @@ +//! Noir-compatible commitment functions for passport circuit inputs. +//! +//! These functions replicate the commitment computations from the Noir +//! circuits, allowing the Rust input generator to compute actual values instead +//! of placeholders. + +use { + crate::{parser::types::PassportError, poseidon2::poseidon2_hash}, + ark_bn254::Fr, + ark_ff::{BigInteger, PrimeField}, +}; + +/// Parse a 0x-prefixed hex string (e.g. "0x2") into a BN254 field element. +pub fn parse_hex_to_field(hex_str: &str) -> Result { + let stripped = hex_str.strip_prefix("0x").unwrap_or(hex_str); + let padded = format!("{:0>64}", stripped); + let bytes = hex::decode(&padded).map_err(|e| PassportError::InvalidHexField { + field: hex_str.to_string(), + source: e, + })?; + Ok(Fr::from_be_bytes_mod_order(&bytes)) +} + +/// Pack big-endian bytes into BN254 field elements, matching Noir's +/// `pack_be_bytes_into_fields()`. +/// +/// Packing scheme (31 bytes per field, reversed storage order): +/// - N = (len + 30) / 31 field elements +/// - First chunk (may be shorter): `bytes[0..first_chunk_size]` → `result[N-1]` +/// - Remaining chunks (31 bytes each): stored in `result[N-2]`, `result[N-3]`, +/// ..., `result[0]` +/// +/// Each chunk is interpreted as a big-endian integer. +pub fn pack_be_bytes_into_fields(bytes: &[u8]) -> Vec { + let n_bytes = bytes.len(); + if n_bytes == 0 { + return vec![]; + } + let n = (n_bytes + 30) / 31; + let mut result = vec![Fr::from(0u64); n]; + + let mut k = 0usize; + + // First chunk: may be shorter than 31 bytes + // first_chunk_size = 31 - (N*31 - NBytes) = NBytes - 31*(N-1) + let first_chunk_size = 31 - (n * 31 - n_bytes); + let mut limb = Fr::from(0u64); + for _ in 0..first_chunk_size { + limb *= Fr::from(256u64); + limb += Fr::from(bytes[k] as u64); + k += 1; + } + result[n - 1] = limb; + + // Remaining chunks: each exactly 31 bytes + for i in 1..n { + let mut limb = Fr::from(0u64); + for _ in 0..31 { + limb *= Fr::from(256u64); + limb += Fr::from(bytes[k] as u64); + k += 1; + } + result[n - i - 1] = limb; + } + + result +} + +/// Compute SOD hash: Poseidon2(pack_be_bytes_into_fields(e_content)). +/// +/// Matches Noir's `calculate_sod_hash(e_content)` from +/// `utils/commitment/common/src/lib.nr:111-117`. +pub fn calculate_sod_hash(e_content: &[u8]) -> Fr { + let packed = pack_be_bytes_into_fields(e_content); + poseidon2_hash(&packed) +} + +/// Compute circuit 1 commitment: Poseidon2(salt, packed_country, packed_tbs). +/// +/// Matches Noir's `hash_salt_country_tbs()` from +/// `utils/commitment/common/src/lib.nr:46-65`. +/// +/// Field layout (26 fields for TBS_MAX_SIZE=720): +/// `[0]` = salt +/// `[1]` = packed country (3 bytes → 1 field) +/// `[2..26]` = packed TBS certificate (720 bytes → 24 fields) +pub fn hash_salt_country_tbs( + salt: &str, + country: &[u8], + tbs_certificate: &[u8], +) -> Result { + let mut fields = Vec::new(); + fields.push(parse_hex_to_field(salt)?); + fields.extend(pack_be_bytes_into_fields(country)); + fields.extend(pack_be_bytes_into_fields(tbs_certificate)); + Ok(poseidon2_hash(&fields)) +} + +/// Compute private nullifier: Poseidon2(packed_dg1, packed_e_content, +/// packed_sod_sig). +/// +/// Matches Noir's `calculate_private_nullifier()` from `utils/commitment/common/src/lib.nr:81-109`. +/// +/// Field layout (20 fields for DG1=95, ECONTENT=200, SIG=256): +/// [0..4] = packed DG1 (95 bytes → 4 fields) +/// [4..11] = packed eContent (200 bytes → 7 fields) +/// [11..20] = packed SOD signature (256 bytes → 9 fields) +pub fn calculate_private_nullifier(dg1: &[u8], e_content: &[u8], sod_signature: &[u8]) -> Fr { + let mut fields = Vec::new(); + fields.extend(pack_be_bytes_into_fields(dg1)); + fields.extend(pack_be_bytes_into_fields(e_content)); + fields.extend(pack_be_bytes_into_fields(sod_signature)); + poseidon2_hash(&fields) +} + +/// Compute circuit 2 commitment: Poseidon2(salt, country, signed_attr, sa_size, +/// dg1, e_content, nullifier). +/// +/// Matches Noir's +/// `hash_salt_country_signed_attr_dg1_e_content_private_nullifier<...>()` from +/// `utils/commitment/common/src/lib.nr:119-161`. +/// +/// Field layout (22 fields for SA=200, DG1=95, ECONTENT=200): +/// `[0]` = salt +/// `[1]` = packed country (3 bytes → 1 field) +/// `[2..9]` = packed signed_attributes (200 bytes → 7 fields) +/// `[9]` = signed_attr_size as field +/// `[10..14]` = packed DG1 (95 bytes → 4 fields) +/// `[14..21]` = packed eContent (200 bytes → 7 fields) +/// `[21]` = private_nullifier +pub fn hash_salt_country_sa_dg1_econtent_nullifier( + salt: &str, + country: &[u8], + signed_attr: &[u8], + signed_attr_size: u64, + dg1: &[u8], + e_content: &[u8], + private_nullifier: Fr, +) -> Result { + let mut fields = Vec::new(); + fields.push(parse_hex_to_field(salt)?); + fields.extend(pack_be_bytes_into_fields(country)); + fields.extend(pack_be_bytes_into_fields(signed_attr)); + fields.push(Fr::from(signed_attr_size)); + fields.extend(pack_be_bytes_into_fields(dg1)); + fields.extend(pack_be_bytes_into_fields(e_content)); + fields.push(private_nullifier); + Ok(poseidon2_hash(&fields)) +} + +/// Commit to a data chunk: Poseidon2(salt, packed_data). +/// +/// Matches Noir's `commit_to_data_chunk(salt, data)` from +/// `partial_sha256/src/lib.nr`. +/// +/// Field layout for CHUNK1_SIZE=640: +/// `[0]` = salt +/// `[1..22]` = pack_be_bytes_into_fields(data) (640 bytes → 21 fields) +pub fn commit_to_data_chunk(salt: &str, data: &[u8]) -> Result { + let mut fields = Vec::new(); + fields.push(parse_hex_to_field(salt)?); + fields.extend(pack_be_bytes_into_fields(data)); + Ok(poseidon2_hash(&fields)) +} + +/// Commit to SHA256 state + data commitment: Poseidon2(salt, state[0..7], +/// processed_bytes, data_commitment). +/// +/// Matches Noir's `commit_to_sha256_state_and_data(salt, state, +/// processed_bytes, data_commitment)` from `partial_sha256/src/lib.nr`. +/// +/// Field layout (always 11 fields): +/// `[0]` = salt +/// `[1..9]` = `state[0]`, `state[1]`, ..., `state[7]` (each u32 → Field) +/// `[9]` = processed_bytes as Field +/// `[10]` = data_commitment +pub fn commit_to_sha256_state_and_data( + salt: &str, + state: &[u32; 8], + processed_bytes: u32, + data_commitment: Fr, +) -> Result { + let mut fields = Vec::with_capacity(11); + fields.push(parse_hex_to_field(salt)?); + for &s in state.iter() { + fields.push(Fr::from(s as u64)); + } + fields.push(Fr::from(processed_bytes as u64)); + fields.push(data_commitment); + Ok(poseidon2_hash(&fields)) +} + +/// Compute h_dg1: Poseidon2([r_dg1, packed_dg1[0..4]]). +/// +/// Matches Noir's `Poseidon2::hash([r_dg1].concat(packed_dg1), 5)` from +/// `t_attest/src/main.nr` and `t_add_integrity_commit/src/main.nr`. +pub fn calculate_h_dg1(r_dg1: &str, dg1: &[u8]) -> Result { + let mut fields = Vec::with_capacity(5); + fields.push(parse_hex_to_field(r_dg1)?); + fields.extend(pack_be_bytes_into_fields(dg1)); + Ok(poseidon2_hash(&fields)) +} + +/// Compute Merkle leaf: Poseidon2([h_dg1, sod_hash]). +/// +/// Matches Noir's `Poseidon2::hash([h_dg1, sod_hash], 2)` from +/// `t_attest/src/main.nr` and `t_add_integrity_commit/src/main.nr`. +pub fn calculate_leaf(h_dg1: Fr, sod_hash: Fr) -> Fr { + poseidon2_hash(&[h_dg1, sod_hash]) +} + +/// Compute Merkle root from leaf, index, and sibling path. +/// +/// Translates Noir's `compute_merkle_root(leaf, index, hash_path)` from +/// `zkpassport_libs/commitment/common/src/lib.nr:315-328`. +/// +/// Binary Merkle tree with Poseidon2 hashing. Bit `i` of `leaf_index` (LE) +/// determines whether `current` is the left or right child at level `i`. +pub fn compute_merkle_root(leaf: Fr, leaf_index: u64, merkle_path: &[Fr]) -> Fr { + let mut current = leaf; + for (i, sibling) in merkle_path.iter().enumerate() { + let bit = (leaf_index >> i) & 1; + let (left, right) = if bit == 0 { + (current, *sibling) + } else { + (*sibling, current) + }; + current = poseidon2_hash(&[left, right]); + } + current +} + +/// Convert a BN254 field element to a 0x-prefixed hex string (64 hex chars). +pub fn field_to_hex_string(f: &Fr) -> String { + let bytes = f.into_bigint().to_bytes_be(); + format!("0x{}", hex::encode(bytes)) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_pack_be_bytes_3_bytes() { + // 3 bytes → 1 field element: N = (3+30)/31 = 1 + // first_chunk_size = 3 - 31*(1-1) = 3 + let bytes = [0x41u8, 0x42, 0x43]; // "ABC" + let packed = pack_be_bytes_into_fields(&bytes); + assert_eq!(packed.len(), 1); + // 0x41*256^2 + 0x42*256 + 0x43 = 65*65536 + 66*256 + 67 = 4276803 + assert_eq!(packed[0], Fr::from(4276803u64)); + } + + #[test] + fn test_pack_be_bytes_32_bytes() { + // 32 bytes → 2 field elements: N = (32+30)/31 = 2 + // first_chunk_size = 32 - 31*(2-1) = 1 + let mut bytes = [0u8; 32]; + bytes[0] = 0xff; // First chunk: 1 byte → result[1] + for i in 1..32 { + bytes[i] = i as u8; // Second chunk: 31 bytes → result[0] + } + let packed = pack_be_bytes_into_fields(&bytes); + assert_eq!(packed.len(), 2); + assert_eq!(packed[1], Fr::from(0xffu64)); // Short first chunk + } + + #[test] + fn test_pack_be_bytes_200_bytes() { + // 200 bytes → 7 field elements (matching e_content size) + let bytes = [0u8; 200]; + let packed = pack_be_bytes_into_fields(&bytes); + assert_eq!(packed.len(), 7); + } + + #[test] + fn test_calculate_sod_hash_known_good() { + // e_content from both known-good and test TOML files (identical) + let e_content: [u8; 200] = [ + 54, 197, 174, 86, 62, 194, 237, 211, 184, 91, 92, 169, 195, 149, 233, 156, 60, 80, 224, + 124, 161, 170, 204, 239, 154, 92, 165, 10, 81, 42, 90, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + ]; + let hash = calculate_sod_hash(&e_content); + let hash_hex = field_to_hex_string(&hash); + assert_eq!( + hash_hex, "0x0f7f8bb032ad068e1c3b717ec1e7020d3537e20688af7bd7a7ae51df72f368bc", + "sod_hash mismatch with known-good value from t_attest.toml" + ); + } + + #[test] + fn test_parse_hex_to_field_small() { + // "0x2" should parse to Fr(2) + assert_eq!(parse_hex_to_field("0x2").unwrap(), Fr::from(2u64)); + assert_eq!(parse_hex_to_field("0x3").unwrap(), Fr::from(3u64)); + } + + #[test] + fn test_parse_hex_to_field_roundtrip() { + let hex = "0x0f7f8bb032ad068e1c3b717ec1e7020d3537e20688af7bd7a7ae51df72f368bc"; + let f = parse_hex_to_field(hex).unwrap(); + let back = field_to_hex_string(&f); + assert_eq!(back, hex); + } + + #[test] + fn test_field_count_sanity() { + // Verify field counts match Noir's expectations + assert_eq!(pack_be_bytes_into_fields(&[0u8; 3]).len(), 1); // country + assert_eq!(pack_be_bytes_into_fields(&[0u8; 720]).len(), 24); // tbs_certificate 720 + assert_eq!(pack_be_bytes_into_fields(&[0u8; 1300]).len(), 42); // tbs_certificate 1300 + assert_eq!(pack_be_bytes_into_fields(&[0u8; 640]).len(), 21); // chunk1 + assert_eq!(pack_be_bytes_into_fields(&[0u8; 95]).len(), 4); // dg1 + assert_eq!(pack_be_bytes_into_fields(&[0u8; 200]).len(), 7); // e_content/signed_attr + assert_eq!(pack_be_bytes_into_fields(&[0u8; 256]).len(), 9); // sod_signature + } + + #[test] + fn test_commit_to_sha256_state_and_data_matches_benchmark() { + // Use benchmark data from tbs_1300 to verify the commitment chain: + // t_add_dsc_hash_1300.toml provides: salt="0x1", chunk1 (640 bytes) + // t_add_dsc_verify_1300.toml provides: comm_in, state1 + // + // The dsc_hash circuit computes: + // data_comm1 = commit_to_data_chunk("0x1", chunk1) + // comm_out = commit_to_sha256_state_and_data("0x1", state1, 640, + // data_comm1) This comm_out must equal the comm_in in + // t_add_dsc_verify_1300.toml. + + let chunk1: [u8; 640] = [ + 48, 130, 1, 10, 2, 130, 1, 1, 0, 175, 129, 169, 48, 75, 201, 148, 9, 44, 101, 74, 102, + 208, 170, 80, 87, 167, 158, 254, 182, 81, 253, 14, 124, 113, 45, 48, 144, 36, 5, 248, + 31, 93, 49, 75, 149, 184, 114, 188, 161, 128, 33, 61, 152, 20, 57, 11, 226, 80, 82, 80, + 10, 209, 152, 144, 112, 231, 229, 31, 130, 146, 213, 195, 46, 163, 187, 24, 68, 79, 56, + 124, 205, 49, 44, 70, 146, 221, 223, 68, 147, 89, 27, 16, 80, 111, 178, 109, 166, 123, + 27, 29, 37, 120, 192, 202, 246, 6, 132, 249, 14, 254, 239, 204, 225, 127, 186, 207, + 215, 178, 142, 60, 232, 125, 83, 126, 240, 68, 243, 79, 119, 91, 83, 101, 115, 122, 64, + 30, 91, 221, 154, 108, 225, 93, 137, 17, 211, 26, 118, 192, 139, 66, 108, 134, 167, + 187, 106, 71, 227, 24, 98, 192, 198, 153, 49, 239, 67, 212, 101, 101, 4, 76, 153, 212, + 177, 159, 190, 78, 10, 224, 173, 157, 91, 210, 237, 178, 115, 123, 245, 116, 202, 34, + 222, 78, 153, 81, 155, 248, 151, 112, 213, 128, 252, 173, 11, 165, 189, 128, 245, 216, + 176, 34, 8, 89, 234, 4, 237, 161, 225, 16, 206, 84, 251, 235, 84, 100, 148, 53, 18, + 159, 134, 159, 65, 197, 221, 254, 23, 118, 144, 109, 54, 163, 163, 137, 13, 21, 182, + 72, 183, 104, 190, 89, 8, 248, 244, 38, 62, 248, 56, 97, 149, 68, 81, 218, 203, 203, + 183, 2, 3, 1, 0, 1, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, + 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, + 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, + 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, + 126, 127, 128, 129, 130, 131, 132, 133, 134, + ]; + let state1: [u32; 8] = [ + 3828948639, 4073271942, 433182166, 3811311365, 3566743306, 1923568254, 3109579459, + 1110735471, + ]; + + let data_comm1 = commit_to_data_chunk("0x1", &chunk1).unwrap(); + let comm_out = commit_to_sha256_state_and_data("0x1", &state1, 640, data_comm1).unwrap(); + let comm_out_hex = field_to_hex_string(&comm_out); + + assert_eq!( + comm_out_hex, "0x045433920bc35680c37f22815da747e86bf7974625da04b1f015af21e42446b1", + "commit_to_sha256_state_and_data output mismatch with benchmark comm_in" + ); + } + + #[test] + fn test_compute_merkle_root_empty_tree() { + // Compute merkle root for leaf_index=0, merkle_path=all zeros (first leaf in + // empty tree). This exercises the full chain: h_dg1 -> leaf -> + // merkle_root. + let r_dg1 = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"; + let dg1 = [0u8; 95]; // placeholder DG1 + + let e_content: [u8; 200] = [0u8; 200]; // placeholder eContent + let sod_hash = calculate_sod_hash(&e_content); + + let h_dg1 = calculate_h_dg1(r_dg1, &dg1).unwrap(); + let leaf = calculate_leaf(h_dg1, sod_hash); + + // leaf_index=0, all-zero path (24 levels) + let merkle_path = vec![Fr::from(0u64); 24]; + let root = compute_merkle_root(leaf, 0, &merkle_path); + + // The root should be deterministic and non-zero + assert_ne!(root, Fr::from(0u64), "merkle root should not be zero"); + + // Verify consistency: computing the same root again gives the same value + let root2 = compute_merkle_root(leaf, 0, &merkle_path); + assert_eq!(root, root2, "merkle root should be deterministic"); + } +} diff --git a/playground/passport-input-gen/src/lib.rs b/playground/passport-input-gen/src/lib.rs index 4f78c8d9..8841866a 100644 --- a/playground/passport-input-gen/src/lib.rs +++ b/playground/passport-input-gen/src/lib.rs @@ -1,13 +1,16 @@ +pub mod commitment; pub mod mock_generator; pub mod mock_keys; mod parser; +pub mod partial_sha256; +pub mod poseidon2; pub use crate::parser::{binary::Binary, sod::SOD}; use { crate::parser::{ types::{ - PassportError, SignatureAlgorithmName, MAX_DG1_SIZE, MAX_ECONTENT_SIZE, - MAX_SIGNED_ATTRIBUTES_SIZE, MAX_TBS_SIZE, SIG_BYTES, + PassportError, SignatureAlgorithmName, CHUNK1_SIZE, MAX_DG1_SIZE, MAX_ECONTENT_SIZE, + MAX_SIGNED_ATTRIBUTES_SIZE, MAX_TBS_SIZE, MAX_TBS_SIZE_1300, SIG_BYTES, TREE_DEPTH, }, utils::{ find_offset, fit, load_csca_public_keys, to_fixed_array, to_u32, ASN1_HEADER_LEN, @@ -24,45 +27,421 @@ use { std::{fmt::Write as _, path::Path}, }; -/// Parsed passport data -pub struct PassportReader { - dg1: Binary, - sod: SOD, - /// Indicates whether this reader contains mock data or real passport data - mockdata: bool, - /// Optional CSCA public key when using mock data - csca_pubkey: Option, +// ============================================================================ +// Constants +// ============================================================================ + +/// Zero BN254 field element as a 0x-prefixed hex string (used as sentinel / +/// default for Merkle fields). +pub const ZERO_FIELD: &str = "0x0000000000000000000000000000000000000000000000000000000000000000"; + +// ============================================================================ +// Configuration +// ============================================================================ + +/// Shared application-level parameters for all merkle_age_check circuit chains. +/// Contains commitment salts, Merkle tree data, and attestation parameters. +pub struct MerkleAgeBaseConfig { + /// Salt for the penultimate commitment stage + pub salt_1: String, + /// Salt for the final commitment stage + pub salt_2: String, + /// Blinding factor for DG1 Poseidon2 commitment + pub r_dg1: String, + /// Current date as unix timestamp + pub current_date: u64, + /// Minimum age to prove + pub min_age_required: u8, + /// Maximum age (0 = no upper bound) + pub max_age_required: u8, + /// Service scope hash (H(domain_name)) + pub service_scope: String, + /// Service sub-scope hash (H(purpose)) + pub service_subscope: String, + /// Optional nullifier secret for salting + pub nullifier_secret: String, + /// Merkle tree root (from sequencer) + pub merkle_root: String, + /// Leaf index in Merkle tree + pub leaf_index: String, + /// Merkle path sibling hashes (TREE_DEPTH elements) + pub merkle_path: Vec, +} + +impl Default for MerkleAgeBaseConfig { + fn default() -> Self { + Self { + salt_1: "0x2".to_string(), + salt_2: "0x3".to_string(), + r_dg1: "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + .to_string(), + current_date: 1735689600, // Jan 1, 2025 + min_age_required: 18, + max_age_required: 0, + service_scope: ZERO_FIELD.to_string(), + service_subscope: ZERO_FIELD.to_string(), + nullifier_secret: ZERO_FIELD.to_string(), + merkle_root: ZERO_FIELD.to_string(), + leaf_index: "0".to_string(), + merkle_path: vec![ZERO_FIELD.to_string(); TREE_DEPTH], + } + } } -/// Circuit inputs for Noir -pub struct CircuitInputs { +impl MerkleAgeBaseConfig { + /// Build AttestInputs from the shared config and passport data, computing + /// the merkle root if the sentinel zero value is present. + fn build_attest( + self, + dg1_padded: &[u8; MAX_DG1_SIZE], + computed_sod_hash: ark_bn254::Fr, + sod_hash_hex: String, + ) -> Result { + let merkle_root = { + if self.merkle_root == ZERO_FIELD { + let h_dg1 = commitment::calculate_h_dg1(&self.r_dg1, dg1_padded)?; + let leaf = commitment::calculate_leaf(h_dg1, computed_sod_hash); + let leaf_idx: u64 = + self.leaf_index + .parse() + .map_err(|_| PassportError::InvalidLeafIndex { + value: self.leaf_index.clone(), + })?; + let path_fields: Vec = self + .merkle_path + .iter() + .map(|s| commitment::parse_hex_to_field(s)) + .collect::, _>>()?; + let root = commitment::compute_merkle_root(leaf, leaf_idx, &path_fields); + commitment::field_to_hex_string(&root) + } else { + self.merkle_root + } + }; + + Ok(AttestInputs { + root: merkle_root, + current_date: self.current_date, + service_scope: self.service_scope, + service_subscope: self.service_subscope, + dg1: *dg1_padded, + r_dg1: self.r_dg1, + sod_hash: sod_hash_hex, + leaf_index: self.leaf_index, + merkle_path: self.merkle_path, + min_age_required: self.min_age_required, + max_age_required: self.max_age_required, + nullifier_secret: self.nullifier_secret, + }) + } +} + +/// Application-level parameters for the 4-circuit merkle_age_check TBS-720 +/// chain. +pub struct MerkleAge720Config { + /// Shared configuration fields + pub base: MerkleAgeBaseConfig, +} + +impl Default for MerkleAge720Config { + fn default() -> Self { + Self { + base: MerkleAgeBaseConfig::default(), + } + } +} + +/// Application-level parameters for the 5-circuit merkle_age_check TBS-1300 +/// chain. +/// +/// The TBS-1300 chain has 3 salts (vs 2 for TBS-720) because DSC verification +/// is split into two circuits (dsc_hash + dsc_verify). +pub struct MerkleAge1300Config { + /// Salt for circuits 1+2 (dsc_hash & dsc_verify input): "0x1" + pub salt_0: String, + /// Shared configuration fields + pub base: MerkleAgeBaseConfig, +} + +impl Default for MerkleAge1300Config { + fn default() -> Self { + Self { + salt_0: "0x1".to_string(), + base: MerkleAgeBaseConfig::default(), + } + } +} + +// ============================================================================ +// Circuit input structs +// ============================================================================ + +/// Inputs for t_add_dsc_720: Verify CSCA signed DSC certificate (720-byte TBS) +#[derive(serde::Serialize)] +pub struct AddDsc720Inputs { + /// CSCA public key modulus (RSA-4096, 512 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub csc_pubkey: [u8; SIG_BYTES * 2], + /// Salt for commitment + pub salt: String, + /// 3-character country code from passport + pub country: String, + /// DSC TBS certificate padded to 720 bytes + #[serde(serialize_with = "byte_array::serialize")] + pub tbs_certificate: [u8; MAX_TBS_SIZE], + /// Barrett reduction parameter for CSCA modulus (513 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub csc_pubkey_redc_param: [u8; SIG_BYTES * 2 + 1], + /// CSCA signature over the DSC TBS certificate (512 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dsc_signature: [u8; SIG_BYTES * 2], + /// RSA exponent (CSCA) + pub exponent: u32, + /// Actual TBS certificate length before padding + pub tbs_certificate_len: u32, +} + +/// Inputs for t_add_id_data_720: Verify DSC signed passport data (720-byte TBS) +#[derive(serde::Serialize)] +pub struct AddIdData720Inputs { + /// Commitment from circuit 1 (placeholder until circuit 1 runs) + pub comm_in: String, + /// Input salt (must match circuit 1's salt) + pub salt_in: String, + /// Output salt for this circuit's commitment + pub salt_out: String, + /// DG1 Machine Readable Zone data (95 bytes) + #[serde(serialize_with = "byte_array::serialize")] pub dg1: [u8; MAX_DG1_SIZE], - pub dg1_padded_length: usize, - pub current_date: u64, + /// DSC public key modulus (RSA-2048, 256 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dsc_pubkey: [u8; SIG_BYTES], + /// Barrett reduction parameter for DSC modulus (257 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dsc_pubkey_redc_param: [u8; SIG_BYTES + 1], + /// Byte offset of DSC pubkey within TBS certificate + pub dsc_pubkey_offset_in_dsc_cert: u32, + /// DSC signature over signed_attributes (256 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub sod_signature: [u8; SIG_BYTES], + /// DSC TBS certificate padded to 720 bytes + #[serde(serialize_with = "byte_array::serialize")] + pub tbs_certificate: [u8; MAX_TBS_SIZE], + /// Signed attributes from SOD (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub signed_attributes: [u8; MAX_SIGNED_ATTRIBUTES_SIZE], + /// Actual signed attributes size + pub signed_attributes_size: u64, + /// RSA exponent (DSC) + pub exponent: u32, + /// eContent hash values (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub e_content: [u8; MAX_ECONTENT_SIZE], +} + +/// Inputs for t_add_integrity_commit: Verify data integrity + generate Merkle +/// leaf +#[derive(serde::Serialize)] +pub struct AddIntegrityCommitInputs { + /// Commitment from circuit 2 (placeholder until circuit 2 runs) + pub comm_in: String, + /// Input salt (must match circuit 2's output salt) + pub salt_in: String, + /// DG1 Machine Readable Zone data (95 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dg1: [u8; MAX_DG1_SIZE], + /// DG1 padded length for SHA256 + pub dg1_padded_length: u64, + /// Offset of DG1 hash within eContent + pub dg1_hash_offset: u32, + /// Signed attributes from SOD (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub signed_attributes: [u8; MAX_SIGNED_ATTRIBUTES_SIZE], + /// Actual signed attributes size + pub signed_attributes_size: u32, + /// eContent hash values (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub e_content: [u8; MAX_ECONTENT_SIZE], + /// Actual eContent length + pub e_content_len: u32, + /// Pre-computed private nullifier (Poseidon2 hash) + pub private_nullifier: String, + /// Blinding factor for DG1 commitment + pub r_dg1: String, +} + +/// Inputs for t_attest: Age attestation with Merkle tree membership proof +#[derive(serde::Serialize)] +pub struct AttestInputs { + /// Current Merkle tree root (from sequencer) + pub root: String, + /// Current date as unix timestamp + pub current_date: u64, + /// Service scope: H(domain_name) + pub service_scope: String, + /// Service sub-scope: H(purpose) + pub service_subscope: String, + /// DG1 Machine Readable Zone data (95 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dg1: [u8; MAX_DG1_SIZE], + /// Blinding factor from registration + pub r_dg1: String, + /// SOD hash: Poseidon2(packed_e_content) + pub sod_hash: String, + /// Position in Merkle tree + pub leaf_index: String, + /// Sibling hashes for Merkle path (TREE_DEPTH elements) + pub merkle_path: Vec, + /// Minimum age to prove pub min_age_required: u8, + /// Maximum age (0 = no upper bound) pub max_age_required: u8, - pub passport_validity_contents: PassportValidityContent, + /// Optional secret for nullifier salting + pub nullifier_secret: String, } -/// Extracted validity contents from SOD -pub struct PassportValidityContent { - pub signed_attributes: [u8; MAX_SIGNED_ATTRIBUTES_SIZE], - pub signed_attributes_size: usize, - pub econtent: [u8; MAX_ECONTENT_SIZE], - pub econtent_len: usize, +/// Container for all 4 circuit inputs in the merkle_age_check TBS-720 chain +pub struct MerkleAge720Inputs { + pub add_dsc: AddDsc720Inputs, + pub add_id_data: AddIdData720Inputs, + pub add_integrity: AddIntegrityCommitInputs, + pub attest: AttestInputs, +} + +// --- TBS-1300 circuit input structs (5-circuit chain) --- + +/// Inputs for t_add_dsc_hash_1300: Process first 640 bytes of TBS, output +/// SHA256 state commitment +#[derive(serde::Serialize)] +pub struct AddDscHash1300Inputs { + /// Salt for commitment (shared with dsc_verify) + pub salt: String, + /// First 640 bytes of TBS certificate + #[serde(serialize_with = "byte_array::serialize")] + pub chunk1: [u8; CHUNK1_SIZE], +} + +/// Inputs for t_add_dsc_verify_1300: Continue SHA256, verify RSA, output +/// country+TBS commitment +#[derive(serde::Serialize)] +pub struct AddDscVerify1300Inputs { + /// Commitment from circuit 1 (SHA256 state + data commitment) + pub comm_in: String, + /// CSCA public key modulus (RSA-4096, 512 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub csc_pubkey: [u8; SIG_BYTES * 2], + /// Salt (same as dsc_hash's salt) + pub salt: String, + /// 3-character country code + pub country: String, + /// SHA256 intermediate state from processing chunk1 + pub state1: [u32; 8], + /// Full TBS certificate padded to 1300 bytes + #[serde(serialize_with = "byte_array::serialize")] + pub tbs_certificate: [u8; MAX_TBS_SIZE_1300], + /// Actual TBS certificate length before padding + pub tbs_certificate_len: u32, + /// Barrett reduction parameter for CSCA modulus (513 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub csc_pubkey_redc_param: [u8; SIG_BYTES * 2 + 1], + /// CSCA signature over the DSC TBS certificate (512 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dsc_signature: [u8; SIG_BYTES * 2], + /// RSA exponent (CSCA) + pub exponent: u32, + /// Output salt for this circuit's commitment + pub salt_out: String, +} + +/// Inputs for t_add_id_data_1300: Verify DSC signed passport data (1300-byte +/// TBS) +#[derive(serde::Serialize)] +pub struct AddIdData1300Inputs { + /// Commitment from circuit 2 + pub comm_in: String, + /// Input salt (must match circuit 2's output salt) + pub salt_in: String, + /// Output salt for this circuit's commitment + pub salt_out: String, + /// DG1 Machine Readable Zone data (95 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dg1: [u8; MAX_DG1_SIZE], + /// DSC public key modulus (RSA-2048, 256 bytes) + #[serde(serialize_with = "byte_array::serialize")] pub dsc_pubkey: [u8; SIG_BYTES], - pub dsc_barrett_mu: [u8; SIG_BYTES + 1], - pub dsc_signature: [u8; SIG_BYTES], - pub dsc_rsa_exponent: u32, - pub csc_pubkey: [u8; SIG_BYTES * 2], - pub csc_barrett_mu: [u8; (SIG_BYTES * 2) + 1], - pub dsc_cert_signature: [u8; SIG_BYTES * 2], - pub csc_rsa_exponent: u32, - pub dg1_hash_offset: usize, - pub econtent_hash_offset: usize, - pub dsc_pubkey_offset_in_dsc_cert: usize, - pub dsc_cert: [u8; MAX_TBS_SIZE], - pub dsc_cert_len: usize, + /// Barrett reduction parameter for DSC modulus (257 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub dsc_pubkey_redc_param: [u8; SIG_BYTES + 1], + /// Byte offset of DSC pubkey within TBS certificate + pub dsc_pubkey_offset_in_dsc_cert: u32, + /// DSC signature over signed_attributes (256 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub sod_signature: [u8; SIG_BYTES], + /// DSC TBS certificate padded to 1300 bytes + #[serde(serialize_with = "byte_array::serialize")] + pub tbs_certificate: [u8; MAX_TBS_SIZE_1300], + /// Signed attributes from SOD (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub signed_attributes: [u8; MAX_SIGNED_ATTRIBUTES_SIZE], + /// Actual signed attributes size + pub signed_attributes_size: u64, + /// RSA exponent (DSC) + pub exponent: u32, + /// eContent hash values (200 bytes) + #[serde(serialize_with = "byte_array::serialize")] + pub e_content: [u8; MAX_ECONTENT_SIZE], +} + +/// Container for all 5 circuit inputs in the merkle_age_check TBS-1300 chain +pub struct MerkleAge1300Inputs { + pub add_dsc_hash: AddDscHash1300Inputs, + pub add_dsc_verify: AddDscVerify1300Inputs, + pub add_id_data: AddIdData1300Inputs, + pub add_integrity: AddIntegrityCommitInputs, + pub attest: AttestInputs, +} + +// ============================================================================ +// Extracted passport data (shared between 720/1300 paths) +// ============================================================================ + +/// Common passport data extracted from DG1 + SOD, shared by both the 720 and +/// 1300 circuit pipelines. +struct PassportData { + dg1_padded: [u8; MAX_DG1_SIZE], + dg1_len: usize, + signed_attrs: [u8; MAX_SIGNED_ATTRIBUTES_SIZE], + signed_attributes_size: usize, + econtent: [u8; MAX_ECONTENT_SIZE], + econtent_len: usize, + dsc_modulus: [u8; SIG_BYTES], + dsc_exponent: u32, + dsc_barrett: [u8; SIG_BYTES + 1], + sod_signature: [u8; SIG_BYTES], + csca_modulus: [u8; SIG_BYTES * 2], + csca_exponent: u32, + csca_barrett: [u8; SIG_BYTES * 2 + 1], + csca_signature: [u8; SIG_BYTES * 2], + country: String, + dg1_hash_offset: usize, + private_nullifier: ark_bn254::Fr, + private_nullifier_hex: String, + computed_sod_hash: ark_bn254::Fr, + sod_hash_hex: String, +} + +// ============================================================================ +// PassportReader +// ============================================================================ + +/// Parsed passport data +pub struct PassportReader { + dg1: Binary, + sod: SOD, + /// Indicates whether this reader contains mock data or real passport data + mockdata: bool, + /// Optional CSCA public key when using mock data + csca_pubkey: Option, } impl PassportReader { @@ -93,7 +472,7 @@ impl PassportReader { Ok((padded, len, econtent_bytes)) } - /// Extract DSC public key, exponent, Barrett mu, and signature + /// Extract DSC public key, exponent, Barrett mu, and SOD signature fn extract_dsc( &self, ) -> Result<([u8; SIG_BYTES], u32, [u8; SIG_BYTES + 1], [u8; SIG_BYTES]), PassportError> { @@ -121,10 +500,19 @@ impl PassportReader { Ok((modulus, exponent, barrett, signature)) } - /// Extract CSCA public key, exponent, Barrett mu, and signature - fn extract_csca( + /// Decode a base64-encoded CSCA public key from DER format + fn decode_csca_pubkey(b64: &str) -> Result { + let der = STANDARD + .decode(b64.as_bytes()) + .map_err(|e| PassportError::Base64DecodingFailed(e.to_string()))?; + RsaPublicKey::from_public_key_der(&der).map_err(|_| PassportError::CscaPublicKeyInvalid) + } + + /// Extract CSCA modulus, exponent, Barrett mu, and certificate signature + /// from a given public key + fn extract_csca_fields( &self, - idx: usize, + pubkey: &RsaPublicKey, ) -> Result< ( [u8; SIG_BYTES * 2], @@ -134,14 +522,6 @@ impl PassportReader { ), PassportError, > { - let csca_keys = load_csca_public_keys().map_err(|_| PassportError::FailedToLoadCscaKeys)?; - let usa_csca = csca_keys.get("USA").ok_or(PassportError::NoUsaCsca)?; - let der = STANDARD - .decode(usa_csca[idx].public_key.as_bytes()) - .map_err(|e| PassportError::Base64DecodingFailed(e.to_string()))?; - let pubkey = RsaPublicKey::from_public_key_der(&der) - .map_err(|_| PassportError::CscaPublicKeyInvalid)?; - let modulus = to_fixed_array::<{ SIG_BYTES * 2 }>(&pubkey.n().to_bytes_be(), "CSCA modulus")?; let exponent = to_u32(pubkey.e().to_bytes_be())?; @@ -157,6 +537,25 @@ impl PassportReader { Ok((modulus, exponent, barrett, signature)) } + /// Extract CSCA public key, exponent, Barrett mu, and certificate signature + fn extract_csca( + &self, + idx: usize, + ) -> Result< + ( + [u8; SIG_BYTES * 2], + u32, + [u8; SIG_BYTES * 2 + 1], + [u8; SIG_BYTES * 2], + ), + PassportError, + > { + let csca_keys = load_csca_public_keys().map_err(|_| PassportError::FailedToLoadCscaKeys)?; + let usa_csca = csca_keys.get("USA").ok_or(PassportError::NoUsaCsca)?; + let pubkey = Self::decode_csca_pubkey(&usa_csca[idx].public_key)?; + self.extract_csca_fields(&pubkey) + } + /// Extract CSCA data from an in-memory public key (used for mock data) fn extract_csca_from_pubkey( &self, @@ -170,33 +569,97 @@ impl PassportReader { ), PassportError, > { - let modulus = - to_fixed_array::<{ SIG_BYTES * 2 }>(&pubkey.n().to_bytes_be(), "CSCA modulus")?; - let exponent = to_u32(pubkey.e().to_bytes_be())?; - let barrett = to_fixed_array::<{ SIG_BYTES * 2 + 1 }>( - &compute_barrett_reduction_parameter(&BigUint::from_bytes_be(&modulus)).to_bytes_be(), - "CSCA Barrett", - )?; - let signature = to_fixed_array::<{ SIG_BYTES * 2 }>( - self.sod.certificate.signature.as_bytes(), - "CSCA signature", - )?; - - Ok((modulus, exponent, barrett, signature)) + self.extract_csca_fields(pubkey) } - /// Extract DSC certificate (padded + len + offset of modulus inside cert) + /// Extract DSC certificate TBS (padded to 720 + actual len + pubkey offset) fn extract_dsc_cert( &self, dsc_modulus: &[u8; SIG_BYTES], ) -> Result<([u8; MAX_TBS_SIZE], usize, usize), PassportError> { + self.extract_dsc_cert_sized::(dsc_modulus) + } + + /// Extract DSC certificate TBS padded to a generic size N, with actual + /// length and pubkey offset + fn extract_dsc_cert_sized( + &self, + dsc_modulus: &[u8; SIG_BYTES], + ) -> Result<([u8; N], usize, usize), PassportError> { let tbs_bytes = self.sod.certificate.tbs.bytes.as_bytes(); let cert_len = tbs_bytes.len(); - let padded = fit::(tbs_bytes)?; + let padded = fit::(tbs_bytes)?; let pubkey_offset = find_offset(tbs_bytes, dsc_modulus, "DSC modulus in cert")?; Ok((padded, cert_len, pubkey_offset)) } + /// Extract country code from DG1 bytes [7..10] + fn extract_country(&self) -> String { + let dg1 = self.dg1.as_bytes(); + if dg1.len() >= 10 { + String::from_utf8_lossy(&dg1[7..10]).to_string() + } else { + "<<<".to_string() + } + } + + /// Extract all common passport data fields needed by both the 720 and 1300 + /// circuit pipelines. This consolidates the repeated extraction preamble. + fn extract_passport_data(&self, csca_key_index: usize) -> Result { + let dg1_padded = fit::(self.dg1.as_bytes())?; + let dg1_len = self.dg1.len(); + + let (signed_attrs, signed_attributes_size) = self.extract_signed_attrs()?; + let (econtent, econtent_len, econtent_bytes) = self.extract_econtent()?; + + let (dsc_modulus, dsc_exponent, dsc_barrett, sod_signature) = self.extract_dsc()?; + + let (csca_modulus, csca_exponent, csca_barrett, csca_signature) = if self.mockdata { + let key = self + .csca_pubkey + .as_ref() + .ok_or(PassportError::MissingCscaMockKey)?; + self.extract_csca_from_pubkey(key)? + } else { + self.extract_csca(csca_key_index)? + }; + + let dg1_hash = Sha256::digest(self.dg1.as_bytes()); + let dg1_hash_offset = find_offset(econtent_bytes, dg1_hash.as_slice(), "DG1 hash")?; + + let country = self.extract_country(); + + let private_nullifier = + commitment::calculate_private_nullifier(&dg1_padded, &econtent, &sod_signature); + let private_nullifier_hex = commitment::field_to_hex_string(&private_nullifier); + + let computed_sod_hash = commitment::calculate_sod_hash(&econtent); + let sod_hash_hex = commitment::field_to_hex_string(&computed_sod_hash); + + Ok(PassportData { + dg1_padded, + dg1_len, + signed_attrs, + signed_attributes_size, + econtent, + econtent_len, + dsc_modulus, + dsc_exponent, + dsc_barrett, + sod_signature, + csca_modulus, + csca_exponent, + csca_barrett, + csca_signature, + country, + dg1_hash_offset, + private_nullifier, + private_nullifier_hex, + computed_sod_hash, + sod_hash_hex, + }) + } + /// Validate DG1, eContent, and signatures against DSC + CSCA pub fn validate(&self) -> Result { // 1. Check DG1 hash inside eContent @@ -281,11 +744,7 @@ impl PassportReader { let usa_csca = all_csca.get("USA").ok_or(PassportError::NoUsaCsca)?; for (i, csca) in usa_csca.iter().enumerate() { - let der = STANDARD - .decode(csca.public_key.as_bytes()) - .map_err(|e| PassportError::Base64DecodingFailed(e.to_string()))?; - let csca_pubkey = RsaPublicKey::from_public_key_der(&der) - .map_err(|_| PassportError::CscaPublicKeyInvalid)?; + let csca_pubkey = Self::decode_csca_pubkey(&csca.public_key)?; if csca_pubkey .verify( Pkcs1v15Sign::new::(), @@ -300,119 +759,790 @@ impl PassportReader { Err(PassportError::CscaSignatureInvalid) } - /// Convert to circuit inputs for Noir Circuits - pub fn to_circuit_inputs( + /// Generate inputs for the 4-circuit merkle_age_check TBS-720 chain. + /// + /// Extracts passport data and distributes it across 4 circuit input + /// structs. Commitment values and Merkle data come from the config + /// (placeholders by default). + pub fn to_merkle_age_720_inputs( &self, - current_date: u64, - min_age_required: u8, - max_age_required: u8, csca_key_index: usize, - ) -> Result { - // === Step 1. DG1 === - let dg1_padded = fit::(self.dg1.as_bytes())?; - let dg1_len = self.dg1.len(); + config: MerkleAge720Config, + ) -> Result { + let pd = self.extract_passport_data(csca_key_index)?; - // === Step 2. SignedAttributes === - let (signed_attrs, signed_attributes_size) = self.extract_signed_attrs()?; + // DSC certificate TBS (720-byte path) + let (tbs_cert, tbs_cert_len, dsc_pubkey_offset) = self.extract_dsc_cert(&pd.dsc_modulus)?; - // === Step 3. eContent === - let (econtent, econtent_len, econtent_bytes) = self.extract_econtent()?; + // === Compute Poseidon2 commitments === - // === Step 4. DSC === - let (dsc_modulus, dsc_exponent, dsc_barrett, dsc_signature) = self.extract_dsc()?; + // Circuit 1 output: hash(salt_1, country, tbs_cert) + let comm_out_1 = commitment::hash_salt_country_tbs( + &config.base.salt_1, + pd.country.as_bytes(), + &tbs_cert, + )?; + let comm_out_1_hex = commitment::field_to_hex_string(&comm_out_1); - // === Step 5. CSCA === - let (csca_modulus, csca_exponent, csca_barrett, csca_signature) = if self.mockdata { - let key = self - .csca_pubkey - .as_ref() - .ok_or(PassportError::MissingCscaMockKey)?; - self.extract_csca_from_pubkey(key)? - } else { - self.extract_csca(csca_key_index)? + // Circuit 2 output: hash(salt_2, country, signed_attr, sa_size, dg1, e_content, + // nullifier) + let comm_out_2 = commitment::hash_salt_country_sa_dg1_econtent_nullifier( + &config.base.salt_2, + pd.country.as_bytes(), + &pd.signed_attrs, + pd.signed_attributes_size as u64, + &pd.dg1_padded, + &pd.econtent, + pd.private_nullifier, + )?; + let comm_out_2_hex = commitment::field_to_hex_string(&comm_out_2); + + // === Build circuit input structs === + + let add_dsc = AddDsc720Inputs { + csc_pubkey: pd.csca_modulus, + salt: config.base.salt_1.clone(), + country: pd.country, + tbs_certificate: tbs_cert, + csc_pubkey_redc_param: pd.csca_barrett, + dsc_signature: pd.csca_signature, + exponent: pd.csca_exponent, + tbs_certificate_len: tbs_cert_len as u32, }; - // === Step 6. Offsets === - let dg1_hash = Sha256::digest(self.dg1.as_bytes()); - let dg1_hash_offset = find_offset(econtent_bytes, dg1_hash.as_slice(), "DG1 hash")?; + let add_id_data = AddIdData720Inputs { + comm_in: comm_out_1_hex, + salt_in: config.base.salt_1.clone(), + salt_out: config.base.salt_2.clone(), + dg1: pd.dg1_padded, + dsc_pubkey: pd.dsc_modulus, + dsc_pubkey_redc_param: pd.dsc_barrett, + dsc_pubkey_offset_in_dsc_cert: dsc_pubkey_offset as u32, + sod_signature: pd.sod_signature, + tbs_certificate: tbs_cert, + signed_attributes: pd.signed_attrs, + signed_attributes_size: pd.signed_attributes_size as u64, + exponent: pd.dsc_exponent, + e_content: pd.econtent, + }; - let econtent_hash = Sha256::digest(econtent_bytes); - let econtent_hash_offset = - find_offset(&signed_attrs, econtent_hash.as_slice(), "eContent hash")?; - - // === Step 7. DSC Certificate === - let (dsc_cert, dsc_cert_len, dsc_pubkey_offset) = self.extract_dsc_cert(&dsc_modulus)?; - - // === Step 8. Build CircuitInputs === - Ok(CircuitInputs { - dg1: dg1_padded, - dg1_padded_length: dg1_len, - current_date, - min_age_required, - max_age_required, - passport_validity_contents: PassportValidityContent { - signed_attributes: signed_attrs, - signed_attributes_size, - econtent, - econtent_len, - dsc_pubkey: dsc_modulus, - dsc_barrett_mu: dsc_barrett, - dsc_signature, - dsc_rsa_exponent: dsc_exponent, - csc_pubkey: csca_modulus, - csc_barrett_mu: csca_barrett, - dsc_cert_signature: csca_signature, - csc_rsa_exponent: csca_exponent, - dg1_hash_offset, - econtent_hash_offset, - dsc_pubkey_offset_in_dsc_cert: dsc_pubkey_offset, - dsc_cert, - dsc_cert_len, - }, + let add_integrity = AddIntegrityCommitInputs { + comm_in: comm_out_2_hex, + salt_in: config.base.salt_2.clone(), + dg1: pd.dg1_padded, + dg1_padded_length: pd.dg1_len as u64, + dg1_hash_offset: pd.dg1_hash_offset as u32, + signed_attributes: pd.signed_attrs, + signed_attributes_size: pd.signed_attributes_size as u32, + e_content: pd.econtent, + e_content_len: pd.econtent_len as u32, + private_nullifier: pd.private_nullifier_hex, + r_dg1: config.base.r_dg1.clone(), + }; + + let attest = + config + .base + .build_attest(&pd.dg1_padded, pd.computed_sod_hash, pd.sod_hash_hex)?; + + Ok(MerkleAge720Inputs { + add_dsc, + add_id_data, + add_integrity, + attest, + }) + } + + /// Generate inputs for the 5-circuit merkle_age_check TBS-1300 chain. + /// + /// Circuit chain: dsc_hash_1300 -> dsc_verify_1300 -> id_data_1300 -> + /// integrity -> attest + /// + /// The key difference from TBS-720 is that DSC signature verification + /// is split into two circuits using partial SHA256. Circuit 1 processes the + /// first 640 bytes (CHUNK1_SIZE) and outputs an intermediate SHA256 state + /// commitment. Circuit 2 continues the hash, verifies the RSA signature, + /// and outputs the standard country+TBS commitment. + pub fn to_merkle_age_1300_inputs( + &self, + csca_key_index: usize, + config: MerkleAge1300Config, + ) -> Result { + let pd = self.extract_passport_data(csca_key_index)?; + + // DSC certificate TBS at 1300-byte size + let (tbs_cert_1300, tbs_cert_len, dsc_pubkey_offset) = + self.extract_dsc_cert_sized::(&pd.dsc_modulus)?; + + // chunk1: first 640 bytes of TBS + let mut chunk1 = [0u8; CHUNK1_SIZE]; + chunk1.copy_from_slice(&tbs_cert_1300[..CHUNK1_SIZE]); + + // Partial SHA256: compute intermediate state + let state1 = partial_sha256::sha256_start(&chunk1); + + // === Compute Poseidon2 commitments for 5-circuit chain === + + // Circuit 1 (dsc_hash) output: + // data_comm1 = commit_to_data_chunk(salt_0, chunk1) + // comm_out_hash = commit_to_sha256_state_and_data(salt_0, state1, 640, + // data_comm1) + let data_comm1 = commitment::commit_to_data_chunk(&config.salt_0, &chunk1)?; + let comm_out_hash = commitment::commit_to_sha256_state_and_data( + &config.salt_0, + &state1, + CHUNK1_SIZE as u32, + data_comm1, + )?; + let comm_out_hash_hex = commitment::field_to_hex_string(&comm_out_hash); + + // Circuit 2 (dsc_verify) output: + // comm_out_verify = hash_salt_country_tbs(salt_1, country, tbs_cert_1300) + let comm_out_verify = commitment::hash_salt_country_tbs( + &config.base.salt_1, + pd.country.as_bytes(), + &tbs_cert_1300, + )?; + let comm_out_verify_hex = commitment::field_to_hex_string(&comm_out_verify); + + // Circuit 3 (id_data) output: + // comm_out_id = hash_salt_country_sa_dg1_econtent_nullifier(salt_2, ...) + let comm_out_id = commitment::hash_salt_country_sa_dg1_econtent_nullifier( + &config.base.salt_2, + pd.country.as_bytes(), + &pd.signed_attrs, + pd.signed_attributes_size as u64, + &pd.dg1_padded, + &pd.econtent, + pd.private_nullifier, + )?; + let comm_out_id_hex = commitment::field_to_hex_string(&comm_out_id); + + // === Build 5 circuit input structs === + + let add_dsc_hash = AddDscHash1300Inputs { + salt: config.salt_0.clone(), + chunk1, + }; + + let add_dsc_verify = AddDscVerify1300Inputs { + comm_in: comm_out_hash_hex, + csc_pubkey: pd.csca_modulus, + salt: config.salt_0, + country: pd.country, + state1, + tbs_certificate: tbs_cert_1300, + tbs_certificate_len: tbs_cert_len as u32, + csc_pubkey_redc_param: pd.csca_barrett, + dsc_signature: pd.csca_signature, + exponent: pd.csca_exponent, + salt_out: config.base.salt_1.clone(), + }; + + let add_id_data = AddIdData1300Inputs { + comm_in: comm_out_verify_hex, + salt_in: config.base.salt_1.clone(), + salt_out: config.base.salt_2.clone(), + dg1: pd.dg1_padded, + dsc_pubkey: pd.dsc_modulus, + dsc_pubkey_redc_param: pd.dsc_barrett, + dsc_pubkey_offset_in_dsc_cert: dsc_pubkey_offset as u32, + sod_signature: pd.sod_signature, + tbs_certificate: tbs_cert_1300, + signed_attributes: pd.signed_attrs, + signed_attributes_size: pd.signed_attributes_size as u64, + exponent: pd.dsc_exponent, + e_content: pd.econtent, + }; + + let add_integrity = AddIntegrityCommitInputs { + comm_in: comm_out_id_hex, + salt_in: config.base.salt_2.clone(), + dg1: pd.dg1_padded, + dg1_padded_length: pd.dg1_len as u64, + dg1_hash_offset: pd.dg1_hash_offset as u32, + signed_attributes: pd.signed_attrs, + signed_attributes_size: pd.signed_attributes_size as u32, + e_content: pd.econtent, + e_content_len: pd.econtent_len as u32, + private_nullifier: pd.private_nullifier_hex, + r_dg1: config.base.r_dg1.clone(), + }; + + let attest = + config + .base + .build_attest(&pd.dg1_padded, pd.computed_sod_hash, pd.sod_hash_hex)?; + + Ok(MerkleAge1300Inputs { + add_dsc_hash, + add_dsc_verify, + add_id_data, + add_integrity, + attest, }) } } -impl CircuitInputs { - pub fn to_toml_string(&self) -> String { +// ============================================================================ +// Serde helper for large fixed-size arrays (serde only supports [T; N] for N <= +// 32) +// ============================================================================ + +mod byte_array { + use serde::{Serialize, Serializer}; + + pub fn serialize( + arr: &[u8; N], + s: S, + ) -> Result { + arr.as_slice().serialize(s) + } +} + +// ============================================================================ +// TOML serialization trait and helpers +// ============================================================================ + +/// Trait for circuit input types that can be serialized to TOML format. +pub trait SaveToml { + /// Serialize this circuit input to a Noir-compatible TOML string. + fn to_toml_string(&self) -> String; + + /// Write the TOML serialization to a file. + fn save_to_toml_file>(&self, path: P) -> std::io::Result<()> { + std::fs::write(path, self.to_toml_string()) + } +} + +/// Trait for circuit input container types (720/1300) that hold all circuit +/// inputs for a proving pipeline. +pub trait CircuitInputSet { + /// Return the list of circuit file names (without extension). + fn circuit_names(&self) -> Vec<&str>; + + /// Save all circuit input TOML files to the given directory. + fn save_all(&self, base_dir: &Path) -> std::io::Result<()>; +} + +/// Format a numeric slice as a TOML array: [1, 2, 3, ...] +fn fmt_array(arr: &[T]) -> String { + format!( + "[{}]", + arr.iter() + .map(|v| v.to_string()) + .collect::>() + .join(", ") + ) +} + +// ============================================================================ +// TOML serialization for each circuit +// ============================================================================ + +impl SaveToml for AddDsc720Inputs { + fn to_toml_string(&self) -> String { let mut out = String::new(); - let _ = writeln!(out, "dg1 = {:?}", self.dg1); - let _ = writeln!(out, "dg1_padded_length = {}", self.dg1_padded_length); - let _ = writeln!(out, "current_date = {}", self.current_date); - let _ = writeln!(out, "min_age_required = {}", self.min_age_required); - let _ = writeln!(out, "max_age_required = {}", self.max_age_required); - let _ = writeln!(out, "\n[passport_validity_contents]"); + let _ = writeln!(out, "csc_pubkey = {}", fmt_array(&self.csc_pubkey)); + let _ = writeln!(out, "salt = \"{}\"", self.salt); + let _ = writeln!(out, "country = \"{}\"", self.country); + let _ = writeln!( + out, + "tbs_certificate = {}", + fmt_array(&self.tbs_certificate) + ); + let _ = writeln!( + out, + "csc_pubkey_redc_param = {}", + fmt_array(&self.csc_pubkey_redc_param) + ); + let _ = writeln!(out, "dsc_signature = {}", fmt_array(&self.dsc_signature)); + let _ = writeln!(out, "exponent = {}", self.exponent); + let _ = writeln!(out, "tbs_certificate_len = {}", self.tbs_certificate_len); + out + } +} - let pvc = &self.passport_validity_contents; - let _ = writeln!(out, "signed_attributes = {:?}", pvc.signed_attributes); +impl SaveToml for AddIdData720Inputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "comm_in = \"{}\"", self.comm_in); + let _ = writeln!(out, "salt_in = \"{}\"", self.salt_in); + let _ = writeln!(out, "salt_out = \"{}\"", self.salt_out); + let _ = writeln!(out, "dg1 = {}", fmt_array(&self.dg1)); + let _ = writeln!(out, "dsc_pubkey = {}", fmt_array(&self.dsc_pubkey)); + let _ = writeln!( + out, + "dsc_pubkey_redc_param = {}", + fmt_array(&self.dsc_pubkey_redc_param) + ); + let _ = writeln!( + out, + "dsc_pubkey_offset_in_dsc_cert = {}", + self.dsc_pubkey_offset_in_dsc_cert + ); + let _ = writeln!(out, "sod_signature = {}", fmt_array(&self.sod_signature)); + let _ = writeln!( + out, + "tbs_certificate = {}", + fmt_array(&self.tbs_certificate) + ); + let _ = writeln!( + out, + "signed_attributes = {}", + fmt_array(&self.signed_attributes) + ); let _ = writeln!( out, "signed_attributes_size = {}", - pvc.signed_attributes_size - ); - let _ = writeln!(out, "econtent = {:?}", pvc.econtent); - let _ = writeln!(out, "econtent_len = {}", pvc.econtent_len); - let _ = writeln!(out, "dsc_signature = {:?}", pvc.dsc_signature); - let _ = writeln!(out, "dsc_rsa_exponent = {}", pvc.dsc_rsa_exponent); - let _ = writeln!(out, "dsc_pubkey = {:?}", pvc.dsc_pubkey); - let _ = writeln!(out, "dsc_barrett_mu = {:?}", pvc.dsc_barrett_mu); - let _ = writeln!(out, "csc_pubkey = {:?}", pvc.csc_pubkey); - let _ = writeln!(out, "csc_barrett_mu = {:?}", pvc.csc_barrett_mu); - let _ = writeln!(out, "dsc_cert_signature = {:?}", pvc.dsc_cert_signature); - let _ = writeln!(out, "csc_rsa_exponent = {}", pvc.csc_rsa_exponent); - let _ = writeln!(out, "dg1_hash_offset = {}", pvc.dg1_hash_offset); - let _ = writeln!(out, "econtent_hash_offset = {}", pvc.econtent_hash_offset); + self.signed_attributes_size + ); + let _ = writeln!(out, "exponent = {}", self.exponent); + let _ = writeln!(out, "e_content = {}", fmt_array(&self.e_content)); + out + } +} + +impl SaveToml for AddIntegrityCommitInputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "comm_in = \"{}\"", self.comm_in); + let _ = writeln!(out, "salt_in = \"{}\"", self.salt_in); + let _ = writeln!(out, "dg1 = {}", fmt_array(&self.dg1)); + let _ = writeln!(out, "dg1_padded_length = {}", self.dg1_padded_length); + let _ = writeln!(out, "dg1_hash_offset = {}", self.dg1_hash_offset); + let _ = writeln!( + out, + "signed_attributes = {}", + fmt_array(&self.signed_attributes) + ); + let _ = writeln!( + out, + "signed_attributes_size = {}", + self.signed_attributes_size + ); + let _ = writeln!(out, "e_content = {}", fmt_array(&self.e_content)); + let _ = writeln!(out, "e_content_len = {}", self.e_content_len); + let _ = writeln!(out, "private_nullifier = \"{}\"", self.private_nullifier); + let _ = writeln!(out, "r_dg1 = \"{}\"", self.r_dg1); + out + } +} + +impl SaveToml for AttestInputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "root = \"{}\"", self.root); + let _ = writeln!(out, "current_date = \"{}\"", self.current_date); + let _ = writeln!(out, "service_scope = \"{}\"", self.service_scope); + let _ = writeln!(out, "service_subscope = \"{}\"", self.service_subscope); + let _ = writeln!(out, "dg1 = {}", fmt_array(&self.dg1)); + let _ = writeln!(out, "r_dg1 = \"{}\"", self.r_dg1); + let _ = writeln!(out, "sod_hash = \"{}\"", self.sod_hash); + let _ = writeln!(out, "leaf_index = \"{}\"", self.leaf_index); + // Merkle path as TOML array of quoted strings + let _ = writeln!(out, "merkle_path = ["); + for (i, h) in self.merkle_path.iter().enumerate() { + let comma = if i < self.merkle_path.len() - 1 { + "," + } else { + "" + }; + let _ = writeln!(out, " \"{}\"{}", h, comma); + } + let _ = writeln!(out, "]"); + let _ = writeln!(out, "min_age_required = \"{}\"", self.min_age_required); + let _ = writeln!(out, "max_age_required = \"{}\"", self.max_age_required); + let _ = writeln!(out, "nullifier_secret = \"{}\"", self.nullifier_secret); + out + } +} + +impl CircuitInputSet for MerkleAge720Inputs { + fn circuit_names(&self) -> Vec<&str> { + vec![ + "t_add_dsc_720", + "t_add_id_data_720", + "t_add_integrity_commit", + "t_attest", + ] + } + + fn save_all(&self, base_dir: &Path) -> std::io::Result<()> { + let base = base_dir; + std::fs::create_dir_all(base)?; + self.add_dsc + .save_to_toml_file(base.join("t_add_dsc_720.toml"))?; + self.add_id_data + .save_to_toml_file(base.join("t_add_id_data_720.toml"))?; + self.add_integrity + .save_to_toml_file(base.join("t_add_integrity_commit.toml"))?; + self.attest.save_to_toml_file(base.join("t_attest.toml"))?; + Ok(()) + } +} + +// --- TBS-1300 TOML serialization --- + +impl SaveToml for AddDscHash1300Inputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "salt = \"{}\"", self.salt); + let _ = writeln!(out, "chunk1 = {}", fmt_array(&self.chunk1)); + out + } +} + +impl SaveToml for AddDscVerify1300Inputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "comm_in = \"{}\"", self.comm_in); + let _ = writeln!(out, "csc_pubkey = {}", fmt_array(&self.csc_pubkey)); + let _ = writeln!(out, "salt = \"{}\"", self.salt); + let _ = writeln!(out, "country = \"{}\"", self.country); + let _ = writeln!(out, "state1 = {}", fmt_array(&self.state1)); + let _ = writeln!( + out, + "tbs_certificate = {}", + fmt_array(&self.tbs_certificate) + ); + let _ = writeln!(out, "tbs_certificate_len = {}", self.tbs_certificate_len); + let _ = writeln!( + out, + "csc_pubkey_redc_param = {}", + fmt_array(&self.csc_pubkey_redc_param) + ); + let _ = writeln!(out, "dsc_signature = {}", fmt_array(&self.dsc_signature)); + let _ = writeln!(out, "exponent = {}", self.exponent); + let _ = writeln!(out, "salt_out = \"{}\"", self.salt_out); + out + } +} + +impl SaveToml for AddIdData1300Inputs { + fn to_toml_string(&self) -> String { + let mut out = String::new(); + let _ = writeln!(out, "comm_in = \"{}\"", self.comm_in); + let _ = writeln!(out, "salt_in = \"{}\"", self.salt_in); + let _ = writeln!(out, "salt_out = \"{}\"", self.salt_out); + let _ = writeln!(out, "dg1 = {}", fmt_array(&self.dg1)); + let _ = writeln!(out, "dsc_pubkey = {}", fmt_array(&self.dsc_pubkey)); + let _ = writeln!( + out, + "dsc_pubkey_redc_param = {}", + fmt_array(&self.dsc_pubkey_redc_param) + ); let _ = writeln!( out, "dsc_pubkey_offset_in_dsc_cert = {}", - pvc.dsc_pubkey_offset_in_dsc_cert + self.dsc_pubkey_offset_in_dsc_cert + ); + let _ = writeln!(out, "sod_signature = {}", fmt_array(&self.sod_signature)); + let _ = writeln!( + out, + "tbs_certificate = {}", + fmt_array(&self.tbs_certificate) ); - let _ = writeln!(out, "dsc_cert = {:?}", pvc.dsc_cert); - let _ = writeln!(out, "dsc_cert_len = {}", pvc.dsc_cert_len); + let _ = writeln!( + out, + "signed_attributes = {}", + fmt_array(&self.signed_attributes) + ); + let _ = writeln!( + out, + "signed_attributes_size = {}", + self.signed_attributes_size + ); + let _ = writeln!(out, "exponent = {}", self.exponent); + let _ = writeln!(out, "e_content = {}", fmt_array(&self.e_content)); out } +} - pub fn save_to_toml_file>(&self, path: P) -> std::io::Result<()> { - std::fs::write(path, self.to_toml_string()) +impl CircuitInputSet for MerkleAge1300Inputs { + fn circuit_names(&self) -> Vec<&str> { + vec![ + "t_add_dsc_hash_1300", + "t_add_dsc_verify_1300", + "t_add_id_data_1300", + "t_add_integrity_commit", + "t_attest", + ] + } + + fn save_all(&self, base_dir: &Path) -> std::io::Result<()> { + let base = base_dir; + std::fs::create_dir_all(base)?; + self.add_dsc_hash + .save_to_toml_file(base.join("t_add_dsc_hash_1300.toml"))?; + self.add_dsc_verify + .save_to_toml_file(base.join("t_add_dsc_verify_1300.toml"))?; + self.add_id_data + .save_to_toml_file(base.join("t_add_id_data_1300.toml"))?; + self.add_integrity + .save_to_toml_file(base.join("t_add_integrity_commit.toml"))?; + self.attest.save_to_toml_file(base.join("t_attest.toml"))?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use { + super::*, + crate::{ + mock_generator::{ + dg1_bytes_with_birthdate_expiry_date, generate_sod, generate_sod_with_padded_tbs, + }, + mock_keys::{MOCK_CSCA_PRIV_KEY_B64, MOCK_DSC_PRIV_KEY_B64}, + }, + rsa::{pkcs8::DecodePrivateKey, RsaPrivateKey}, + }; + + /// End-to-end test: generate mock passport data and verify all + /// computed commitments match the known-good values from the + /// verified TOML files in noir-examples/.../tbs_720/. + #[test] + fn test_commitment_chain_matches_known_good() { + // Generate the same mock data the binary uses + let csca_der = STANDARD + .decode(MOCK_CSCA_PRIV_KEY_B64) + .expect("decode CSCA key"); + let csca_priv = RsaPrivateKey::from_pkcs8_der(&csca_der).expect("parse CSCA key"); + let csca_pub = csca_priv.to_public_key(); + + let dsc_der = STANDARD + .decode(MOCK_DSC_PRIV_KEY_B64) + .expect("decode DSC key"); + let dsc_priv = RsaPrivateKey::from_pkcs8_der(&dsc_der).expect("parse DSC key"); + let dsc_pub = dsc_priv.to_public_key(); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let sod = generate_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); + + let reader = PassportReader::new(Binary::from_slice(&dg1), sod, true, Some(csca_pub)); + let csca_idx = reader.validate().expect("validation failed"); + + let config = MerkleAge720Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 18, + max_age_required: 0, + ..Default::default() + }, + }; + + let inputs = reader + .to_merkle_age_720_inputs(csca_idx, config) + .expect("generate inputs"); + + // === Verify commitment chain self-consistency === + // Re-compute each commitment independently and verify it matches + // the value produced by `to_merkle_age_720_inputs`. + + // Circuit 1 output: hash(salt_1, country, tbs_cert) → add_id_data.comm_in + let country_bytes = inputs.add_dsc.country.as_bytes(); + let comm_out_1 = commitment::hash_salt_country_tbs( + &inputs.add_dsc.salt, + country_bytes, + &inputs.add_id_data.tbs_certificate, + ) + .unwrap(); + assert_eq!( + commitment::field_to_hex_string(&comm_out_1), + inputs.add_id_data.comm_in, + "comm_out_1 mismatch: hash_salt_country_tbs" + ); + + // Private nullifier: hash(dg1, e_content, sod_signature) + let private_nullifier = commitment::calculate_private_nullifier( + &inputs.add_id_data.dg1, + &inputs.add_id_data.e_content, + &inputs.add_id_data.sod_signature, + ); + assert_eq!( + commitment::field_to_hex_string(&private_nullifier), + inputs.add_integrity.private_nullifier, + "private_nullifier mismatch: calculate_private_nullifier" + ); + + // Circuit 2 output: hash(salt_2, country, signed_attrs, dg1, econtent, + // nullifier) → add_integrity.comm_in + let comm_out_2 = commitment::hash_salt_country_sa_dg1_econtent_nullifier( + &inputs.add_id_data.salt_out, + country_bytes, + &inputs.add_id_data.signed_attributes, + inputs.add_id_data.signed_attributes_size, + &inputs.add_id_data.dg1, + &inputs.add_id_data.e_content, + private_nullifier, + ) + .unwrap(); + assert_eq!( + commitment::field_to_hex_string(&comm_out_2), + inputs.add_integrity.comm_in, + "comm_out_2 mismatch: hash_salt_country_sa_dg1_econtent_nullifier" + ); + + // sod_hash: consistent across circuits + let sod_hash = commitment::calculate_sod_hash(&inputs.add_id_data.e_content); + assert_eq!( + commitment::field_to_hex_string(&sod_hash), + inputs.attest.sod_hash, + "sod_hash mismatch" + ); + + // Verify shared fields between circuits are consistent + assert_eq!( + inputs.add_integrity.dg1, inputs.add_id_data.dg1, + "dg1 should be the same in id_data and integrity" + ); + assert_eq!( + inputs.attest.dg1, inputs.add_integrity.dg1, + "dg1 should be the same in integrity and attest" + ); + } + + /// End-to-end test for tbs_1300: generate mock passport data with padded + /// TBS, produce all 5 circuit inputs, and verify the commitment chain + /// is self-consistent. + #[test] + fn test_1300_commitment_chain_self_consistent() { + // Generate mock keys + let csca_der = STANDARD + .decode(MOCK_CSCA_PRIV_KEY_B64) + .expect("decode CSCA key"); + let csca_priv = RsaPrivateKey::from_pkcs8_der(&csca_der).expect("parse CSCA key"); + let csca_pub = csca_priv.to_public_key(); + + let dsc_der = STANDARD + .decode(MOCK_DSC_PRIV_KEY_B64) + .expect("decode DSC key"); + let dsc_priv = RsaPrivateKey::from_pkcs8_der(&dsc_der).expect("parse DSC key"); + let dsc_pub = dsc_priv.to_public_key(); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let sod = + generate_sod_with_padded_tbs(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub, 850); + + let reader = PassportReader::new(Binary::from_slice(&dg1), sod, true, Some(csca_pub)); + let csca_idx = reader.validate().expect("validation failed"); + + let config = MerkleAge1300Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 17, + max_age_required: 0, + ..Default::default() + }, + ..Default::default() + }; + + let inputs = reader + .to_merkle_age_1300_inputs(csca_idx, config) + .expect("generate 1300 inputs"); + + // === Verify commitment chain consistency === + // Re-compute each commitment independently and verify it matches. + + // Circuit 1 output: dsc_hash → dsc_verify.comm_in + let data_comm1 = commitment::commit_to_data_chunk( + &inputs.add_dsc_hash.salt, + &inputs.add_dsc_hash.chunk1, + ) + .unwrap(); + let state1 = partial_sha256::sha256_start(&inputs.add_dsc_hash.chunk1); + let comm_out_hash = commitment::commit_to_sha256_state_and_data( + &inputs.add_dsc_hash.salt, + &state1, + CHUNK1_SIZE as u32, + data_comm1, + ) + .unwrap(); + assert_eq!( + commitment::field_to_hex_string(&comm_out_hash), + inputs.add_dsc_verify.comm_in, + "dsc_hash output != dsc_verify.comm_in" + ); + + // Circuit 2 output: dsc_verify → id_data.comm_in + let country_bytes = inputs.add_dsc_verify.country.as_bytes(); + let comm_out_verify = commitment::hash_salt_country_tbs( + &inputs.add_dsc_verify.salt_out, + country_bytes, + &inputs.add_dsc_verify.tbs_certificate, + ) + .unwrap(); + assert_eq!( + commitment::field_to_hex_string(&comm_out_verify), + inputs.add_id_data.comm_in, + "dsc_verify output != id_data.comm_in" + ); + + // Circuit 3 output: id_data → integrity.comm_in + let private_nullifier = commitment::calculate_private_nullifier( + &inputs.add_id_data.dg1, + &inputs.add_id_data.e_content, + &inputs.add_id_data.sod_signature, + ); + assert_eq!( + commitment::field_to_hex_string(&private_nullifier), + inputs.add_integrity.private_nullifier, + "private_nullifier mismatch" + ); + + let comm_out_id = commitment::hash_salt_country_sa_dg1_econtent_nullifier( + &inputs.add_id_data.salt_out, + country_bytes, + &inputs.add_id_data.signed_attributes, + inputs.add_id_data.signed_attributes_size, + &inputs.add_id_data.dg1, + &inputs.add_id_data.e_content, + private_nullifier, + ) + .unwrap(); + assert_eq!( + commitment::field_to_hex_string(&comm_out_id), + inputs.add_integrity.comm_in, + "id_data output != integrity.comm_in" + ); + + // sod_hash: consistent across circuits + let sod_hash = commitment::calculate_sod_hash(&inputs.add_id_data.e_content); + assert_eq!( + commitment::field_to_hex_string(&sod_hash), + inputs.attest.sod_hash, + "sod_hash mismatch" + ); + + // Verify shared fields between circuits are consistent + assert_eq!( + inputs.add_dsc_verify.state1, state1, + "state1 stored in dsc_verify should match computed state1" + ); + assert_eq!( + inputs.add_id_data.tbs_certificate, inputs.add_dsc_verify.tbs_certificate, + "tbs_certificate should be the same in dsc_verify and id_data" + ); + assert_eq!( + inputs.add_integrity.dg1, inputs.add_id_data.dg1, + "dg1 should be the same in id_data and integrity" + ); + assert_eq!( + inputs.attest.dg1, inputs.add_integrity.dg1, + "dg1 should be the same in integrity and attest" + ); + + // Verify sod_hash and nullifier are non-trivial (non-zero) + assert!( + inputs.attest.sod_hash + != "0x0000000000000000000000000000000000000000000000000000000000000000", + "sod_hash should be non-trivial" + ); + assert!( + inputs.add_integrity.private_nullifier + != "0x0000000000000000000000000000000000000000000000000000000000000000", + "nullifier should be non-trivial" + ); } } diff --git a/playground/passport-input-gen/src/mock_generator.rs b/playground/passport-input-gen/src/mock_generator.rs index a5ca0baa..f34161b9 100644 --- a/playground/passport-input-gen/src/mock_generator.rs +++ b/playground/passport-input-gen/src/mock_generator.rs @@ -4,10 +4,18 @@ use { dsc::{SubjectPublicKeyInfo, TbsCertificate, DSC}, sod::SOD, types::{ - DataGroupHashValues, DigestAlgorithm, EContent, EncapContentInfo, SignatureAlgorithm, - SignatureAlgorithmName, SignedAttrs, SignerIdentifier, SignerInfo, MAX_DG1_SIZE, + DataGroupHash, DataGroupHashValues, DigestAlgorithm, EContent, EncapContentInfo, + LDSSecurityObject, SignatureAlgorithm, SignatureAlgorithmName, SignedAttrs, + SignerIdentifier, SignerInfo, MAX_DG1_SIZE, MAX_ECONTENT_SIZE, + MAX_SIGNED_ATTRIBUTES_SIZE, }, }, + rasn::{ + der, + types::{Any, BitString, Integer, ObjectIdentifier, OctetString}, + }, + rasn_cms::Attribute, + rasn_pkix::AlgorithmIdentifier, rsa::{ pkcs1::EncodeRsaPublicKey, pkcs1v15::SigningKey, @@ -15,36 +23,406 @@ use { RsaPrivateKey, RsaPublicKey, }, sha2::{Digest, Sha256}, - std::collections::HashMap, + std::collections::{BTreeSet, HashMap}, }; -/// Build a fake DG1 (MRZ) with given birthdate and expiry dates. -/// Birthdate and expiry are encoded as YYMMDD and inserted into the MRZ -/// positions. The rest of the bytes are filled with `<` characters and the -/// final two bytes are zeroed. +// ============================================================================ +// Well-known OIDs +// ============================================================================ + +/// SHA-256: 2.16.840.1.101.3.4.2.1 +fn oid_sha256() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 16, 840, 1, 101, 3, 4, 2, 1].into()) +} + +/// sha256WithRSAEncryption: 1.2.840.113549.1.1.11 +fn oid_sha256_with_rsa() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![1, 2, 840, 113549, 1, 1, 11].into()) +} + +/// rsaEncryption: 1.2.840.113549.1.1.1 +fn oid_rsa_encryption() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![1, 2, 840, 113549, 1, 1, 1].into()) +} + +/// mRTDSignatureData (id-ldsSecurityObject): 2.23.136.1.1.1 +fn oid_mrtd_signature_data() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 23, 136, 1, 1, 1].into()) +} + +/// id-contentType: 1.2.840.113549.1.9.3 +fn oid_content_type() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![1, 2, 840, 113549, 1, 9, 3].into()) +} + +/// id-messageDigest: 1.2.840.113549.1.9.4 +fn oid_message_digest() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![1, 2, 840, 113549, 1, 9, 4].into()) +} + +/// id-ce-basicConstraints: 2.5.29.19 +fn oid_basic_constraints() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 29, 19].into()) +} + +/// id-ce-keyUsage: 2.5.29.15 +fn oid_key_usage() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 29, 15].into()) +} + +/// id-ce-subjectKeyIdentifier: 2.5.29.14 +fn oid_subject_key_identifier() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 29, 14].into()) +} + +/// id-at-commonName: 2.5.4.3 +fn oid_common_name() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 4, 3].into()) +} + +/// id-at-countryName: 2.5.4.6 +fn oid_country_name() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 4, 6].into()) +} + +/// id-at-organizationName: 2.5.4.10 +fn oid_organization_name() -> ObjectIdentifier { + ObjectIdentifier::new_unchecked(vec![2, 5, 4, 10].into()) +} + +// ============================================================================ +// ICAO check digit computation +// ============================================================================ + +/// Compute an ICAO 9303 check digit over a byte slice. +/// Characters are mapped to values: 0-9 -> 0-9, A-Z -> 10-35, '<' -> 0. +/// The weighted sum (weights cycling 7, 3, 1) modulo 10 gives the digit. +fn compute_check_digit(data: &[u8]) -> u8 { + let weights = [7u32, 3, 1]; + let sum: u32 = data + .iter() + .enumerate() + .map(|(i, &b)| { + let val = match b { + b'0'..=b'9' => (b - b'0') as u32, + b'A'..=b'Z' => (b - b'A' + 10) as u32, + _ => 0, + }; + val * weights[i % 3] + }) + .sum(); + b'0' + (sum % 10) as u8 +} + +// ============================================================================ +// DG1 builder +// ============================================================================ + +/// Build a realistic DG1 (MRZ) with given birthdate and expiry dates. +/// +/// The result is a 95-byte structure: +/// - bytes 0..5: ASN.1 tag prefix (0x61 0x5B 0x5F 0x1F 0x58) +/// - bytes 5..95: 90-byte TD3 (passport) MRZ with realistic fields +/// +/// Birthdate and expiry are encoded as YYMMDD (6 ASCII digit bytes). +/// MRZ line 1 (positions 0..44): document type, country, name +/// MRZ line 2 (positions 44..88): doc number, nationality, DOB, gender, expiry, +/// optional The final two bytes (positions 88,89) are the composite check digit +/// and a filler. pub fn dg1_bytes_with_birthdate_expiry_date(birthdate: &[u8; 6], expiry: &[u8; 6]) -> Vec { - let mut dg1 = vec![b'<'; MAX_DG1_SIZE]; - let mrz_offset = 5; - dg1[mrz_offset + 57..mrz_offset + 57 + 6].copy_from_slice(birthdate); - dg1[mrz_offset + 65..mrz_offset + 65 + 6].copy_from_slice(expiry); - dg1[93] = 0; - dg1[94] = 0; + // ASN.1 tag prefix for DG1: Tag 0x61, Length 0x5B, then 0x5F 0x1F 0x58 + let header: [u8; 5] = [0x61, 0x5b, 0x5f, 0x1f, 0x58]; + + let mut mrz = [b'<'; 90]; + + // --- MRZ Line 1 (44 chars) --- + // Document type + mrz[0] = b'P'; + mrz[1] = b'<'; + // Issuing country (Utopia – ICAO test code) + mrz[2..5].copy_from_slice(b"UTO"); + // Name: DOE< = [ + &mrz[44..54], // doc number + check + &mrz[57..64], // DOB + check + &mrz[65..72], // expiry + check + &mrz[72..87], // optional + check + ] + .concat(); + mrz[87] = compute_check_digit(&composite_data); + // Positions 88, 89 are filler (null terminators per convention) + mrz[88] = 0; + mrz[89] = 0; + + let mut dg1 = Vec::with_capacity(MAX_DG1_SIZE); + dg1.extend_from_slice(&header); + dg1.extend_from_slice(&mrz); + assert_eq!(dg1.len(), MAX_DG1_SIZE); dg1 } -/// Generate a synthetic SOD structure for the given DG1 and key pairs. -pub fn generate_fake_sod( +// ============================================================================ +// eContent builder (DER-encoded LDSSecurityObject) +// ============================================================================ + +/// Build a DER-encoded LDSSecurityObject containing the SHA-256 hash of DG1 +/// and a dummy DG2 hash, and return both the encoded bytes and the raw DG1 +/// hash. +fn build_econtent_bytes(dg1: &[u8]) -> Vec { + let dg1_hash = Sha256::digest(dg1); + + let lds_security_object = LDSSecurityObject { + version: Integer::from(0), + hash_algorithm: AlgorithmIdentifier { + algorithm: oid_sha256(), + parameters: None, + }, + data_group_hash_values: vec![ + DataGroupHash { + data_group_number: Integer::from(1), + data_group_hash_value: OctetString::from(dg1_hash.to_vec()), + }, + DataGroupHash { + data_group_number: Integer::from(2), + data_group_hash_value: OctetString::from(vec![0x01u8; 32]), + }, + ] + .into(), + lds_version_info: None, + }; + + let econtent_bytes = + der::encode(&lds_security_object).expect("Failed to encode LDSSecurityObject"); + assert!( + econtent_bytes.len() <= MAX_ECONTENT_SIZE, + "eContent DER ({} bytes) exceeds MAX_ECONTENT_SIZE ({})", + econtent_bytes.len(), + MAX_ECONTENT_SIZE + ); + econtent_bytes +} + +// ============================================================================ +// SignedAttributes builder (DER-encoded CMS Attribute SET) +// ============================================================================ + +/// Build a DER-encoded SET OF Attribute containing contentType and +/// messageDigest, matching the reconstruction logic in +/// `SOD::parse_signed_attrs`. +fn build_signed_attrs_bytes(econtent_bytes: &[u8]) -> Vec { + let econtent_hash = Sha256::digest(econtent_bytes); + + // contentType attribute: OID -> mRTDSignatureData + let content_type_value = der::encode(&oid_mrtd_signature_data()).expect("encode mRTD OID"); + let content_type_attr = Attribute { + r#type: oid_content_type(), + values: [Any::new(content_type_value)].into(), + }; + + // messageDigest attribute: OCTET STRING of eContent hash + let digest_value = + der::encode(&OctetString::from(econtent_hash.to_vec())).expect("encode digest"); + let message_digest_attr = Attribute { + r#type: oid_message_digest(), + values: [Any::new(digest_value)].into(), + }; + + // Encode as SET OF Attribute (BTreeSet ensures DER SET-OF ordering) + let signed_attrs_set: BTreeSet = [content_type_attr, message_digest_attr] + .into_iter() + .collect(); + + let signed_attrs_bytes = + der::encode(&signed_attrs_set).expect("Failed to encode SignedAttributes"); + assert!( + signed_attrs_bytes.len() <= MAX_SIGNED_ATTRIBUTES_SIZE, + "SignedAttributes DER ({} bytes) exceeds MAX_SIGNED_ATTRIBUTES_SIZE ({})", + signed_attrs_bytes.len(), + MAX_SIGNED_ATTRIBUTES_SIZE + ); + signed_attrs_bytes +} + +// ============================================================================ +// TBS Certificate builder (DER-encoded X.509 TBSCertificate) +// ============================================================================ + +/// Build a Distinguished Name containing CN + C + O attributes. +fn build_dn(cn: &str, country: &str, org: &str) -> rasn_pkix::Name { + use rasn_pkix::{AttributeTypeAndValue, Name, RdnSequence, RelativeDistinguishedName}; + + let cn_attr = AttributeTypeAndValue { + r#type: oid_common_name(), + value: Any::new( + der::encode(&rasn::types::Utf8String::from(cn.to_string())).expect("encode CN"), + ), + }; + let c_attr = AttributeTypeAndValue { + r#type: oid_country_name(), + value: Any::new( + der::encode( + &rasn::types::PrintableString::try_from(country.to_string()) + .expect("valid country"), + ) + .expect("encode C"), + ), + }; + let o_attr = AttributeTypeAndValue { + r#type: oid_organization_name(), + value: Any::new( + der::encode(&rasn::types::Utf8String::from(org.to_string())).expect("encode O"), + ), + }; + + // Each attribute in its own RDN (standard multi-RDN approach) + Name::RdnSequence(RdnSequence::from(vec![ + RelativeDistinguishedName::from(BTreeSet::from([c_attr])), + RelativeDistinguishedName::from(BTreeSet::from([o_attr])), + RelativeDistinguishedName::from(BTreeSet::from([cn_attr])), + ])) +} + +/// Build X.509 extensions for a DSC certificate: +/// - basicConstraints (critical, cA=false) +/// - keyUsage (critical, digitalSignature) +/// - subjectKeyIdentifier (non-critical, SHA-256 of public key) +fn build_dsc_extensions(dsc_pub: &RsaPublicKey) -> rasn_pkix::Extensions { + use rasn_pkix::Extension; + + // basicConstraints: SEQUENCE { BOOLEAN FALSE } + // DER: 30 03 01 01 00 (SEQUENCE { BOOLEAN false }) + let basic_constraints_value = vec![0x30, 0x03, 0x01, 0x01, 0x00]; + let basic_constraints = Extension { + extn_id: oid_basic_constraints(), + critical: true, + extn_value: OctetString::from(basic_constraints_value), + }; + + // keyUsage: BIT STRING with digitalSignature (bit 0) + // DER: 03 02 07 80 (BIT STRING, 7 unused bits, byte 0x80 = bit 0 set) + let key_usage_value = vec![0x03, 0x02, 0x07, 0x80]; + let key_usage = Extension { + extn_id: oid_key_usage(), + critical: true, + extn_value: OctetString::from(key_usage_value), + }; + + // subjectKeyIdentifier: OCTET STRING wrapping SHA-256 of DER public key + let pub_der = dsc_pub.to_pkcs1_der().expect("pkcs1 der").to_vec(); + let ski_hash = Sha256::digest(&pub_der); + // Wrap in OCTET STRING: 04 20 <32 bytes> + let mut ski_value = vec![0x04, 0x20]; + ski_value.extend_from_slice(&ski_hash); + let ski = Extension { + extn_id: oid_subject_key_identifier(), + critical: false, + extn_value: OctetString::from(ski_value), + }; + + vec![basic_constraints, key_usage, ski].into() +} + +/// Build a DER-encoded rasn_pkix::TbsCertificate for a DSC signed by CSCA. +fn build_tbs_certificate_bytes(dsc_pub: &RsaPublicKey) -> Vec { + use rasn_pkix::{ + SubjectPublicKeyInfo as RasnSpki, TbsCertificate as RasnTbs, Validity, Version, + }; + + // NULL parameters for RSA algorithms + let null_params = der::encode(&()).expect("encode NULL"); + + let spki = RasnSpki { + algorithm: AlgorithmIdentifier { + algorithm: oid_rsa_encryption(), + parameters: Some(Any::new(null_params.clone())), + }, + subject_public_key: BitString::from_vec( + dsc_pub.to_pkcs1_der().expect("pkcs1 der").to_vec(), + ), + }; + + // Validity: from 5 years ago to 5 years from now + let now = chrono::Utc::now(); + let five_years_secs: i64 = 5 * 365 * 24 * 60 * 60; + let not_before_ts = now.timestamp() - five_years_secs; + let not_after_ts = now.timestamp() + five_years_secs; + + let not_before_dt = + chrono::DateTime::from_timestamp(not_before_ts, 0).expect("valid timestamp"); + let not_after_dt = chrono::DateTime::from_timestamp(not_after_ts, 0).expect("valid timestamp"); + + let validity = Validity { + not_before: rasn_pkix::Time::Utc(not_before_dt), + not_after: rasn_pkix::Time::Utc(not_after_dt), + }; + + let tbs = RasnTbs { + version: Version::V3, + serial_number: Integer::from(2), + signature: AlgorithmIdentifier { + algorithm: oid_sha256_with_rsa(), + parameters: Some(Any::new(null_params)), + }, + issuer: build_dn("Mock CSCA", "UT", "Mock Passport Authority"), + validity, + subject: build_dn("Mock DSC", "UT", "Mock Passport Authority"), + subject_public_key_info: spki, + issuer_unique_id: None, + subject_unique_id: None, + extensions: Some(build_dsc_extensions(dsc_pub)), + }; + + der::encode(&tbs).expect("Failed to encode TbsCertificate") +} + +// ============================================================================ +// SOD assembly +// ============================================================================ + +/// Core SOD builder: given DG1 data and a pre-built TBS byte vector, construct +/// the full SOD with all cryptographic signatures and proper DER-encoded +/// internal structures. +fn build_sod_from_tbs( dg1: &[u8], dsc_priv: &RsaPrivateKey, dsc_pub: &RsaPublicKey, csca_priv: &RsaPrivateKey, - _csca_pub: &RsaPublicKey, + tbs_bytes: Vec, ) -> SOD { - // Hash DG1 and build eContent + // --- eContent: DER-encoded LDSSecurityObject --- + let econtent_bytes = build_econtent_bytes(dg1); + let dg1_hash = Sha256::digest(dg1); - let econtent_bytes = dg1_hash.to_vec(); let mut dg_map = HashMap::new(); dg_map.insert(1u32, Binary::from_slice(&dg1_hash)); + dg_map.insert(2u32, Binary::from_slice(&vec![0x01u8; 32])); let data_group_hashes = DataGroupHashValues { values: dg_map }; let econtent = EContent { version: 0, @@ -57,19 +435,24 @@ pub fn generate_fake_sod( e_content: econtent, }; - // Hash eContent and build SignedAttributes + // --- SignedAttributes: DER-encoded SET OF Attribute --- + let signed_attr_bytes = build_signed_attrs_bytes(&econtent_bytes); + let econtent_hash = Sha256::digest(&econtent_bytes); - let signed_attr_bytes = econtent_hash.to_vec(); let signed_attrs = SignedAttrs { - content_type: "data".to_string(), - message_digest: Binary::from_slice(&econtent_hash), + content_type: "mRTDSignatureData".to_string(), + message_digest: Binary::from_slice( + // Store the raw messageDigest value (OCTET STRING DER of the hash) + &der::encode(&OctetString::from(econtent_hash.to_vec())).expect("encode digest"), + ), signing_time: None, bytes: Binary::from_slice(&signed_attr_bytes), }; - // Sign SignedAttributes with DSC private key + // --- Sign SignedAttributes with DSC private key --- let dsc_signer = SigningKey::::new(dsc_priv.clone()); let dsc_signature = dsc_signer.sign(&signed_attr_bytes).to_bytes(); + let signer_info = SignerInfo { version: 1, signed_attrs, @@ -85,33 +468,37 @@ pub fn generate_fake_sod( }, }; - // Build fake DSC certificate (TBS = DER of DSC public key) + // --- Build DSC certificate with real TBS --- let dsc_pub_der = dsc_pub.to_pkcs1_der().expect("pkcs1 der").to_vec(); - let tbs_bytes = dsc_pub_der.clone(); + // CSCA signs the DER-encoded TBS bytes let csca_signer = SigningKey::::new(csca_priv.clone()); let csca_signature = csca_signer.sign(&tbs_bytes).to_bytes(); let dsc_cert = DSC { tbs: TbsCertificate { - version: 1, - serial_number: Binary::from_slice(&[1]), + version: 2, // v3 + serial_number: Binary::from_slice(&[2]), signature_algorithm: SignatureAlgorithm { name: SignatureAlgorithmName::Sha256WithRsaEncryption, parameters: None, }, - issuer: "CSCA".to_string(), + issuer: "countryName=UT, organizationName=Mock Passport Authority, \ + commonName=Mock CSCA" + .to_string(), validity_not_before: chrono::Utc::now() - chrono::Duration::from_std(std::time::Duration::from_secs( 5 * 365 * 24 * 60 * 60, )) - .expect("valid duration before 5 years"), // before 5 year date + .expect("valid duration"), validity_not_after: chrono::Utc::now() + chrono::Duration::from_std(std::time::Duration::from_secs( 5 * 365 * 24 * 60 * 60, )) - .expect("valid duration after 5 years"), // after 5 years - subject: "DSC".to_string(), + .expect("valid duration"), + subject: "countryName=UT, organizationName=Mock Passport Authority, \ + commonName=Mock DSC" + .to_string(), subject_public_key_info: SubjectPublicKeyInfo { signature_algorithm: SignatureAlgorithm { name: SignatureAlgorithmName::RsaEncryption, @@ -132,7 +519,7 @@ pub fn generate_fake_sod( }; SOD { - version: 1, + version: 3, digest_algorithms: vec![DigestAlgorithm::SHA256], encap_content_info, signer_info, @@ -141,16 +528,182 @@ pub fn generate_fake_sod( } } +/// Generate a synthetic SOD with proper DER-encoded internal structures. +/// +/// The SOD contains: +/// - eContent: DER-encoded LDSSecurityObject with DG1 + DG2 hashes +/// - SignedAttributes: DER-encoded CMS Attribute SET (contentType + +/// messageDigest) +/// - TBS Certificate: DER-encoded X.509 TBSCertificate with extensions +/// - All proper RSA signatures (DSC signs signedAttrs, CSCA signs TBS) +pub fn generate_sod( + dg1: &[u8], + dsc_priv: &RsaPrivateKey, + dsc_pub: &RsaPublicKey, + csca_priv: &RsaPrivateKey, + _csca_pub: &RsaPublicKey, +) -> SOD { + let tbs_bytes = build_tbs_certificate_bytes(dsc_pub); + build_sod_from_tbs(dg1, dsc_priv, dsc_pub, csca_priv, tbs_bytes) +} + +/// Generate a synthetic SOD with a TBS certificate padded to +/// `tbs_actual_len` bytes. +/// +/// First builds a realistic DER-encoded TBSCertificate, then extends it +/// with a large opaque X.509 extension to reach the target length. +/// If the base TBS already exceeds `tbs_actual_len`, it is used as-is. +pub fn generate_sod_with_padded_tbs( + dg1: &[u8], + dsc_priv: &RsaPrivateKey, + dsc_pub: &RsaPublicKey, + csca_priv: &RsaPrivateKey, + _csca_pub: &RsaPublicKey, + tbs_actual_len: usize, +) -> SOD { + let base_tbs = build_tbs_certificate_bytes(dsc_pub); + + let tbs_bytes = if base_tbs.len() >= tbs_actual_len { + base_tbs + } else { + // Rebuild with a padding extension to hit the target length. + // We compute how many extra bytes we need and add a dummy extension. + build_padded_tbs_certificate_bytes(dsc_pub, tbs_actual_len) + }; + + build_sod_from_tbs(dg1, dsc_priv, dsc_pub, csca_priv, tbs_bytes) +} + +/// Build a TBS certificate with an extra padding extension to reach the target +/// size. +fn build_padded_tbs_certificate_bytes(dsc_pub: &RsaPublicKey, target_len: usize) -> Vec { + use rasn_pkix::{ + Extension, SubjectPublicKeyInfo as RasnSpki, TbsCertificate as RasnTbs, Validity, Version, + }; + + let null_params = der::encode(&()).expect("encode NULL"); + + let spki = RasnSpki { + algorithm: AlgorithmIdentifier { + algorithm: oid_rsa_encryption(), + parameters: Some(Any::new(null_params.clone())), + }, + subject_public_key: BitString::from_vec( + dsc_pub.to_pkcs1_der().expect("pkcs1 der").to_vec(), + ), + }; + + let now = chrono::Utc::now(); + let five_years_secs: i64 = 5 * 365 * 24 * 60 * 60; + let not_before_dt = + chrono::DateTime::from_timestamp(now.timestamp() - five_years_secs, 0).expect("valid"); + let not_after_dt = + chrono::DateTime::from_timestamp(now.timestamp() + five_years_secs, 0).expect("valid"); + + // Start with base extensions + let extensions = build_dsc_extensions(dsc_pub); + + // Build once without padding to measure base size + let base_tbs = RasnTbs { + version: Version::V3, + serial_number: Integer::from(2), + signature: AlgorithmIdentifier { + algorithm: oid_sha256_with_rsa(), + parameters: Some(Any::new(null_params.clone())), + }, + issuer: build_dn("Mock CSCA", "UT", "Mock Passport Authority"), + validity: Validity { + not_before: rasn_pkix::Time::Utc(not_before_dt), + not_after: rasn_pkix::Time::Utc(not_after_dt), + }, + subject: build_dn("Mock DSC", "UT", "Mock Passport Authority"), + subject_public_key_info: spki.clone(), + issuer_unique_id: None, + subject_unique_id: None, + extensions: Some(extensions.clone()), + }; + let base_encoded = der::encode(&base_tbs).expect("encode base TBS"); + let base_len = base_encoded.len(); + + if base_len >= target_len { + return base_encoded; + } + + // Helper closure to build a TBS with given extensions + let build_tbs = |exts: rasn_pkix::Extensions| -> Vec { + let tbs = RasnTbs { + version: Version::V3, + serial_number: Integer::from(2), + signature: AlgorithmIdentifier { + algorithm: oid_sha256_with_rsa(), + parameters: Some(Any::new(null_params.clone())), + }, + issuer: build_dn("Mock CSCA", "UT", "Mock Passport Authority"), + validity: Validity { + not_before: rasn_pkix::Time::Utc(not_before_dt), + not_after: rasn_pkix::Time::Utc(not_after_dt), + }, + subject: build_dn("Mock DSC", "UT", "Mock Passport Authority"), + subject_public_key_info: spki.clone(), + issuer_unique_id: None, + subject_unique_id: None, + extensions: Some(exts), + }; + der::encode(&tbs).expect("encode padded TBS") + }; + + // Use a private-use OID for the padding extension: 1.3.6.1.4.1.99999.1 + let padding_oid = ObjectIdentifier::new_unchecked(vec![1, 3, 6, 1, 4, 1, 99999, 1].into()); + + // Iterative approach: add padding, encode, check length, adjust + let mut padding_size = target_len.saturating_sub(base_len); + for _ in 0..5 { + let padding_data = vec![0x42u8; padding_size]; + let padding_ext = Extension { + extn_id: padding_oid.clone(), + critical: false, + extn_value: OctetString::from(padding_data), + }; + + let mut exts: Vec = extensions.to_vec(); + exts.push(padding_ext); + + let encoded = build_tbs(exts.into()); + if encoded.len() == target_len { + return encoded; + } else if encoded.len() > target_len { + padding_size -= encoded.len() - target_len; + } else { + padding_size += target_len - encoded.len(); + } + } + + // Final attempt with current padding_size + let padding_data = vec![0x42u8; padding_size]; + let padding_ext = Extension { + extn_id: padding_oid, + critical: false, + extn_value: OctetString::from(padding_data), + }; + let mut final_exts: Vec = extensions.to_vec(); + final_exts.push(padding_ext); + + build_tbs(final_exts.into()) +} + +// ============================================================================ +// Tests +// ============================================================================ + #[cfg(test)] mod tests { use { super::*, crate::{ mock_keys::{MOCK_CSCA_PRIV_KEY_B64, MOCK_DSC_PRIV_KEY_B64}, - PassportReader, + PassportReader, SaveToml, }, base64::{engine::general_purpose::STANDARD, Engine as _}, - chrono::Utc, rsa::pkcs8::DecodePrivateKey, }; @@ -170,13 +723,158 @@ mod tests { #[test] fn test_generate_and_validate_sod() { + use crate::{MerkleAge720Config, MerkleAgeBaseConfig}; + let csca_priv = load_csca_mock_private_key(); let csca_pub = csca_priv.to_public_key(); let dsc_priv = load_dsc_mock_private_key(); let dsc_pub = dsc_priv.to_public_key(); let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); - let sod = generate_fake_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); + let sod = generate_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); + + let reader = PassportReader::new(Binary::from_slice(&dg1), sod, true, Some(csca_pub)); + let csca_idx = reader.validate().expect("validation failed"); + + let config = MerkleAge720Config { + base: MerkleAgeBaseConfig { + current_date: 1735689600, + min_age_required: 18, + max_age_required: 0, + ..Default::default() + }, + }; + + let inputs = reader + .to_merkle_age_720_inputs(csca_idx, config) + .expect("to merkle age 720 inputs"); + + // Verify the inputs can be saved (exercises TOML serialization) + inputs + .add_dsc + .save_to_toml_file("/dev/null") + .expect("save add_dsc toml"); + } + + #[test] + fn test_dg1_has_proper_asn1_header() { + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + assert_eq!(dg1.len(), MAX_DG1_SIZE); + // ASN.1 tag prefix + assert_eq!(&dg1[0..5], &[0x61, 0x5b, 0x5f, 0x1f, 0x58]); + // Document type + assert_eq!(&dg1[5..7], b"P<"); + // Issuing country + assert_eq!(&dg1[7..10], b"UTO"); + // Birthdate at correct offset (5 + 57 = 62) + assert_eq!(&dg1[62..68], b"070101"); + // Expiry at correct offset (5 + 65 = 70) + assert_eq!(&dg1[70..76], b"320101"); + } + + #[test] + fn test_econtent_is_valid_der() { + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let econtent_bytes = build_econtent_bytes(&dg1); + + // Should be parseable as LDSSecurityObject + let parsed: LDSSecurityObject = + rasn::der::decode(&econtent_bytes).expect("should parse as LDSSecurityObject"); + assert_eq!(parsed.version, Integer::from(0)); + assert_eq!(parsed.data_group_hash_values.len(), 2); + + // DG1 hash should match + let expected_hash = Sha256::digest(&dg1); + let dg1_entry = parsed + .data_group_hash_values + .iter() + .find(|dg| dg.data_group_number == Integer::from(1)) + .expect("DG1 entry"); + assert_eq!( + dg1_entry.data_group_hash_value.as_ref(), + expected_hash.as_slice() + ); + } + + #[test] + fn test_signed_attrs_is_valid_der() { + use crate::parser::utils::oid_to_string; + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let econtent_bytes = build_econtent_bytes(&dg1); + let signed_attrs_bytes = build_signed_attrs_bytes(&econtent_bytes); + + // Should be parseable as BTreeSet + let parsed: BTreeSet = + rasn::der::decode(&signed_attrs_bytes).expect("should parse as SET OF Attribute"); + assert_eq!(parsed.len(), 2); + + // Should contain contentType and messageDigest + let oids: Vec = parsed.iter().map(|a| oid_to_string(&a.r#type)).collect(); + assert!( + oids.iter().any(|o| o == "1.2.840.113549.1.9.3"), + "contentType OID not found in {:?}", + oids + ); + assert!( + oids.iter().any(|o| o == "1.2.840.113549.1.9.4"), + "messageDigest OID not found in {:?}", + oids + ); + } + + #[test] + fn test_tbs_certificate_is_valid_der() { + let dsc_priv = load_dsc_mock_private_key(); + let dsc_pub = dsc_priv.to_public_key(); + let tbs_bytes = build_tbs_certificate_bytes(&dsc_pub); + + // Should be parseable by x509-parser (needs to be wrapped in a + // Certificate for full parsing, but the raw bytes should at least + // be valid DER SEQUENCE) + assert!( + tbs_bytes.len() <= 720, + "TBS must fit in 720 bytes, got {}", + tbs_bytes.len() + ); + assert!( + tbs_bytes.len() > 200, + "TBS should be >200 bytes for RSA-2048, got {}", + tbs_bytes.len() + ); + + // The DSC modulus should be findable inside the TBS + use rsa::traits::PublicKeyParts; + let modulus_bytes = dsc_pub.n().to_bytes_be(); + let offset = tbs_bytes + .windows(modulus_bytes.len()) + .position(|w| w == modulus_bytes.as_slice()); + assert!( + offset.is_some(), + "DSC modulus should be findable in TBS bytes" + ); + } + + #[test] + fn test_padded_tbs_reaches_target_length() { + let csca_priv = load_csca_mock_private_key(); + let csca_pub = csca_priv.to_public_key(); + let dsc_priv = load_dsc_mock_private_key(); + let dsc_pub = dsc_priv.to_public_key(); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let sod = + generate_sod_with_padded_tbs(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub, 850); + + let tbs_len = sod.certificate.tbs.bytes.len(); + // Should be close to 850 (within a few bytes due to DER length encoding) + assert!( + tbs_len >= 845 && tbs_len <= 855, + "Padded TBS should be ~850 bytes, got {}", + tbs_len + ); + + // Should still validate let reader = PassportReader { dg1: Binary::from_slice(&dg1), sod, @@ -184,15 +882,139 @@ mod tests { csca_pubkey: Some(csca_pub), }; assert!(reader.validate().is_ok()); + } - let current_date = Utc::now(); - let current_timestamp = current_date.timestamp() as u64; + #[test] + fn test_check_digit_icao() { + // Known ICAO example: "L898902C3" -> check digit 6 + assert_eq!(compute_check_digit(b"L898902C3"), b'6'); + // Numeric only: "881112" -> check digit can be verified + let cd = compute_check_digit(b"881112"); + assert!(cd >= b'0' && cd <= b'9'); + } - let inputs = reader - .to_circuit_inputs(current_timestamp, 18, 70, 0) - .expect("to circuit inputs"); - let _toml_output = inputs.to_toml_string(); + #[test] + fn test_size_constraints() { + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + + let econtent_bytes = build_econtent_bytes(&dg1); + assert!(econtent_bytes.len() <= MAX_ECONTENT_SIZE); + + let signed_attrs_bytes = build_signed_attrs_bytes(&econtent_bytes); + assert!(signed_attrs_bytes.len() <= MAX_SIGNED_ATTRIBUTES_SIZE); + + let dsc_priv = load_dsc_mock_private_key(); + let dsc_pub = dsc_priv.to_public_key(); + let tbs_bytes = build_tbs_certificate_bytes(&dsc_pub); + assert!(tbs_bytes.len() <= 720); + } + + #[test] + fn test_roundtrip_hash_chain_and_components() { + use rsa::{pkcs1::DecodeRsaPublicKey, Pkcs1v15Sign, RsaPublicKey as RsaPub}; + + let csca_priv = load_csca_mock_private_key(); + let csca_pub = csca_priv.to_public_key(); + let dsc_priv = load_dsc_mock_private_key(); + let dsc_pub = dsc_priv.to_public_key(); + + let dg1 = dg1_bytes_with_birthdate_expiry_date(b"070101", b"320101"); + let sod = generate_sod(&dg1, &dsc_priv, &dsc_pub, &csca_priv, &csca_pub); + + // 1. eContent bytes are valid DER-encoded LDSSecurityObject + let econtent_bytes = sod.encap_content_info.e_content.bytes.as_bytes(); + let parsed_lds: LDSSecurityObject = + rasn::der::decode(econtent_bytes).expect("eContent should parse as LDSSecurityObject"); + + // 2. DG1 hash in LDSSecurityObject matches SHA-256(dg1) + let dg1_hash = Sha256::digest(&dg1); + let dg1_entry = parsed_lds + .data_group_hash_values + .iter() + .find(|dg| dg.data_group_number == Integer::from(1)) + .expect("DG1 hash entry"); + assert_eq!( + dg1_entry.data_group_hash_value.as_ref(), + dg1_hash.as_slice(), + "DG1 hash mismatch in eContent" + ); + + // 3. SignedAttributes bytes are valid DER-encoded SET OF Attribute + let signed_attrs_bytes = sod.signer_info.signed_attrs.bytes.as_bytes(); + let parsed_attrs: BTreeSet = rasn::der::decode(signed_attrs_bytes) + .expect("SignedAttributes should parse as SET OF Attribute"); + assert_eq!(parsed_attrs.len(), 2); + + // 4. messageDigest in SignedAttributes == SHA-256(eContent bytes) + let econtent_hash = Sha256::digest(econtent_bytes); + let msg_digest_attr = parsed_attrs + .iter() + .find(|a| { + let oid = crate::parser::utils::oid_to_string(&a.r#type); + oid == "1.2.840.113549.1.9.4" + }) + .expect("messageDigest attribute"); + let msg_digest_value = msg_digest_attr.values.first().expect("digest value"); + let parsed_digest: OctetString = + rasn::der::decode(msg_digest_value.as_bytes()).expect("parse OCTET STRING"); + assert_eq!( + parsed_digest.as_ref(), + econtent_hash.as_slice(), + "eContent hash mismatch in SignedAttributes" + ); + + // 5. DSC signature over SignedAttributes verifies with DSC public key + let dsc_pub_der = sod + .certificate + .tbs + .subject_public_key_info + .subject_public_key + .as_bytes(); + let recovered_dsc_pub = RsaPub::from_pkcs1_der(dsc_pub_der).expect("parse DSC public key"); + let signed_attr_hash = Sha256::digest(signed_attrs_bytes); + recovered_dsc_pub + .verify( + Pkcs1v15Sign::new::(), + signed_attr_hash.as_slice(), + sod.signer_info.signature.as_bytes(), + ) + .expect("DSC signature over SignedAttributes should verify"); + + // 6. CSCA signature over TBS certificate verifies with CSCA public key + let tbs_bytes = sod.certificate.tbs.bytes.as_bytes(); + let tbs_hash = Sha256::digest(tbs_bytes); + csca_pub + .verify( + Pkcs1v15Sign::new::(), + tbs_hash.as_slice(), + sod.certificate.signature.as_bytes(), + ) + .expect("CSCA signature over TBS should verify"); + + // 7. TBS bytes contain the DSC modulus at a findable offset + use rsa::traits::PublicKeyParts; + let modulus = dsc_pub.n().to_bytes_be(); + let offset = tbs_bytes + .windows(modulus.len()) + .position(|w| w == modulus.as_slice()); + assert!(offset.is_some(), "DSC modulus should be findable in TBS"); + + // 8. DG1 hash is findable in eContent bytes + let dg1_hash_offset = econtent_bytes + .windows(dg1_hash.len()) + .position(|w| w == dg1_hash.as_slice()); + assert!( + dg1_hash_offset.is_some(), + "DG1 hash should be findable in eContent bytes" + ); - println!("{}", _toml_output); + // 9. eContent hash is findable in SignedAttributes bytes + let econtent_hash_offset = signed_attrs_bytes + .windows(econtent_hash.len()) + .position(|w| w == econtent_hash.as_slice()); + assert!( + econtent_hash_offset.is_some(), + "eContent hash should be findable in SignedAttributes bytes" + ); } } diff --git a/playground/passport-input-gen/src/parser/binary.rs b/playground/passport-input-gen/src/parser/binary.rs index 2755ac7d..c4f528f6 100644 --- a/playground/passport-input-gen/src/parser/binary.rs +++ b/playground/passport-input-gen/src/parser/binary.rs @@ -1,6 +1,6 @@ use base64::{engine::general_purpose, Engine as _}; -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub struct Binary { pub data: Vec, } @@ -45,18 +45,28 @@ impl Binary { format!("0x{}", hex::encode(&self.data)) } - pub fn equals(&self, other: &Binary) -> bool { - self.data.eq(&other.data) - } - pub fn from_hex(hex_str: &str) -> Result { let data = hex::decode(hex_str)?; Ok(Binary::new(data)) } } -impl PartialEq for Binary { - fn eq(&self, other: &Self) -> bool { - self.data == other.data +impl From> for Binary { + fn from(data: Vec) -> Self { + Binary { data } + } +} + +impl From<&[u8]> for Binary { + fn from(data: &[u8]) -> Self { + Binary { + data: data.to_vec(), + } + } +} + +impl AsRef<[u8]> for Binary { + fn as_ref(&self) -> &[u8] { + &self.data } } diff --git a/playground/passport-input-gen/src/parser/types.rs b/playground/passport-input-gen/src/parser/types.rs index a129f804..7ca7fb90 100644 --- a/playground/passport-input-gen/src/parser/types.rs +++ b/playground/passport-input-gen/src/parser/types.rs @@ -14,7 +14,10 @@ pub const MAX_SIGNED_ATTRIBUTES_SIZE: usize = 200; pub const MAX_DG1_SIZE: usize = 95; pub const SIG_BYTES: usize = 256; pub const MAX_ECONTENT_SIZE: usize = 200; -pub const MAX_TBS_SIZE: usize = 1300; +pub const MAX_TBS_SIZE: usize = 720; +pub const MAX_TBS_SIZE_1300: usize = 1300; +pub const CHUNK1_SIZE: usize = 640; +pub const TREE_DEPTH: usize = 24; #[derive(Debug, Clone)] pub enum DigestAlgorithm { @@ -214,4 +217,11 @@ pub enum PassportError { InvalidDate(String), #[error("Unsupported digest algorithm: {0}")] UnsupportedDigestAlgorithm(String), + #[error("Invalid hex field '{field}': {source}")] + InvalidHexField { + field: String, + source: hex::FromHexError, + }, + #[error("Invalid leaf_index '{value}': must be a non-negative integer")] + InvalidLeafIndex { value: String }, } diff --git a/playground/passport-input-gen/src/parser/utils.rs b/playground/passport-input-gen/src/parser/utils.rs index 65b54f70..fc9bec21 100644 --- a/playground/passport-input-gen/src/parser/utils.rs +++ b/playground/passport-input-gen/src/parser/utils.rs @@ -35,7 +35,7 @@ pub fn oid_to_string(oid: &rasn::types::ObjectIdentifier) -> String { } pub fn strip_length_prefix(binary: &Binary) -> Binary { - if binary.slice(0, 2).equals(&Binary::new(vec![119, 130])) { + if binary.slice(0, 2) == Binary::new(vec![119, 130]) { binary.slice(4, binary.len()) } else { binary.clone() diff --git a/playground/passport-input-gen/src/partial_sha256.rs b/playground/passport-input-gen/src/partial_sha256.rs new file mode 100644 index 00000000..4c4b3d71 --- /dev/null +++ b/playground/passport-input-gen/src/partial_sha256.rs @@ -0,0 +1,106 @@ +//! Partial SHA256 computation for the tbs_1300 circuit chain. +//! +//! Provides `sha256_start` which processes complete 64-byte blocks and returns +//! the intermediate SHA256 state, matching Noir's `sha256_start()` exactly. + +use sha2::digest::{generic_array::GenericArray, typenum::U64}; + +/// Standard SHA256 initialization vector. +pub const SHA256_IV: [u32; 8] = [ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, +]; + +/// Process `msg` (must be a multiple of 64 bytes) and return intermediate +/// SHA256 state. +/// +/// This matches the Noir `sha256_start(msg)` function from +/// `partial_sha256/src/lib.nr`. It runs the SHA256 compression function over +/// each 64-byte block without finalization (no padding or length encoding), +/// returning the raw `[u32; 8]` state. +pub fn sha256_start(msg: &[u8]) -> [u32; 8] { + assert!( + msg.len() % 64 == 0, + "Message size must be a multiple of 64 bytes, got {}", + msg.len() + ); + + let mut state = SHA256_IV; + + for block_bytes in msg.chunks_exact(64) { + let block: &GenericArray = GenericArray::from_slice(block_bytes); + sha2::compress256(&mut state, &[*block]); + } + + state +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_sha256_start_single_block() { + // 64 bytes of 0x61 ('a') + let msg = [0x61u8; 64]; + let state = sha256_start(&msg); + // Should differ from IV after processing + assert_ne!(state, SHA256_IV); + } + + #[test] + #[should_panic(expected = "multiple of 64")] + fn test_sha256_start_invalid_size() { + let msg = [0u8; 63]; + sha256_start(&msg); + } + + #[test] + fn test_sha256_start_matches_benchmark() { + // chunk1 from benchmark-inputs/tbs_1300/t_add_dsc_hash_1300.toml + let chunk1: [u8; 640] = [ + 48, 130, 1, 10, 2, 130, 1, 1, 0, 175, 129, 169, 48, 75, 201, 148, 9, 44, 101, 74, 102, + 208, 170, 80, 87, 167, 158, 254, 182, 81, 253, 14, 124, 113, 45, 48, 144, 36, 5, 248, + 31, 93, 49, 75, 149, 184, 114, 188, 161, 128, 33, 61, 152, 20, 57, 11, 226, 80, 82, 80, + 10, 209, 152, 144, 112, 231, 229, 31, 130, 146, 213, 195, 46, 163, 187, 24, 68, 79, 56, + 124, 205, 49, 44, 70, 146, 221, 223, 68, 147, 89, 27, 16, 80, 111, 178, 109, 166, 123, + 27, 29, 37, 120, 192, 202, 246, 6, 132, 249, 14, 254, 239, 204, 225, 127, 186, 207, + 215, 178, 142, 60, 232, 125, 83, 126, 240, 68, 243, 79, 119, 91, 83, 101, 115, 122, 64, + 30, 91, 221, 154, 108, 225, 93, 137, 17, 211, 26, 118, 192, 139, 66, 108, 134, 167, + 187, 106, 71, 227, 24, 98, 192, 198, 153, 49, 239, 67, 212, 101, 101, 4, 76, 153, 212, + 177, 159, 190, 78, 10, 224, 173, 157, 91, 210, 237, 178, 115, 123, 245, 116, 202, 34, + 222, 78, 153, 81, 155, 248, 151, 112, 213, 128, 252, 173, 11, 165, 189, 128, 245, 216, + 176, 34, 8, 89, 234, 4, 237, 161, 225, 16, 206, 84, 251, 235, 84, 100, 148, 53, 18, + 159, 134, 159, 65, 197, 221, 254, 23, 118, 144, 109, 54, 163, 163, 137, 13, 21, 182, + 72, 183, 104, 190, 89, 8, 248, 244, 38, 62, 248, 56, 97, 149, 68, 81, 218, 203, 203, + 183, 2, 3, 1, 0, 1, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, + 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, + 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, + 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, + 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, + 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, + 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, + 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, + 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, + 126, 127, 128, 129, 130, 131, 132, 133, 134, + ]; + let state = sha256_start(&chunk1); + assert_eq!( + state, + [ + 3828948639, 4073271942, 433182166, 3811311365, 3566743306, 1923568254, 3109579459, + 1110735471 + ], + "sha256_start state mismatch with benchmark" + ); + } +} diff --git a/playground/passport-input-gen/src/poseidon2.rs b/playground/passport-input-gen/src/poseidon2.rs new file mode 100644 index 00000000..d952bceb --- /dev/null +++ b/playground/passport-input-gen/src/poseidon2.rs @@ -0,0 +1,320 @@ +//! Poseidon2 hash function for BN254, matching Noir's stdlib implementation. +//! +//! This is a self-contained implementation using the same round constants and +//! MDS matrices as Noir's Poseidon2. It implements a sponge construction with +//! width=4, rate=3, capacity=1 over BN254's scalar field. + +use { + ark_bn254::Fr, + ark_ff::{Field, PrimeField}, +}; + +fn fe(s: &str) -> Fr { + crate::commitment::parse_hex_to_field(s).expect("invalid Poseidon2 constant") +} + +// ============================================================================ +// Constants for t=4 (width-4 permutation) +// ============================================================================ +// Source: provekit/r1cs-compiler/src/poseidon2/constants.rs + +const NUM_PARTIAL_ROUNDS: usize = 56; +const RATE: usize = 3; + +fn load_diag() -> [Fr; 4] { + [ + fe("0x10dc6e9c006ea38b04b1e03b4bd9490c0d03f98929ca1d7fb56821fd19d3b6e7"), + fe("0x0c28145b6a44df3e0149b3d0a30b3bb599df9756d4dd9b84a86b38cfb45a740b"), + fe("0x00544b8338791518b2c7645a50392798b21f75bb60e3596170067d00141cac15"), + fe("0x222c01175718386f2e2e82eb122789e352e105a3b8fa852613bc534433ee428b"), + ] +} + +fn load_rc_full1() -> [[Fr; 4]; 4] { + [ + [ + fe("0x19b849f69450b06848da1d39bd5e4a4302bb86744edc26238b0878e269ed23e5"), + fe("0x265ddfe127dd51bd7239347b758f0a1320eb2cc7450acc1dad47f80c8dcf34d6"), + fe("0x199750ec472f1809e0f66a545e1e51624108ac845015c2aa3dfc36bab497d8aa"), + fe("0x157ff3fe65ac7208110f06a5f74302b14d743ea25067f0ffd032f787c7f1cdf8"), + ], + [ + fe("0x2e49c43c4569dd9c5fd35ac45fca33f10b15c590692f8beefe18f4896ac94902"), + fe("0x0e35fb89981890520d4aef2b6d6506c3cb2f0b6973c24fa82731345ffa2d1f1e"), + fe("0x251ad47cb15c4f1105f109ae5e944f1ba9d9e7806d667ffec6fe723002e0b996"), + fe("0x13da07dc64d428369873e97160234641f8beb56fdd05e5f3563fa39d9c22df4e"), + ], + [ + fe("0x0c009b84e650e6d23dc00c7dccef7483a553939689d350cd46e7b89055fd4738"), + fe("0x011f16b1c63a854f01992e3956f42d8b04eb650c6d535eb0203dec74befdca06"), + fe("0x0ed69e5e383a688f209d9a561daa79612f3f78d0467ad45485df07093f367549"), + fe("0x04dba94a7b0ce9e221acad41472b6bbe3aec507f5eb3d33f463672264c9f789b"), + ], + [ + fe("0x0a3f2637d840f3a16eb094271c9d237b6036757d4bb50bf7ce732ff1d4fa28e8"), + fe("0x259a666f129eea198f8a1c502fdb38fa39b1f075569564b6e54a485d1182323f"), + fe("0x28bf7459c9b2f4c6d8e7d06a4ee3a47f7745d4271038e5157a32fdf7ede0d6a1"), + fe("0x0a1ca941f057037526ea200f489be8d4c37c85bbcce6a2aeec91bd6941432447"), + ], + ] +} + +fn load_rc_full2() -> [[Fr; 4]; 4] { + [ + [ + fe("0x1797130f4b7a3e1777eb757bc6f287f6ab0fb85f6be63b09f3b16ef2b1405d38"), + fe("0x0a76225dc04170ae3306c85abab59e608c7f497c20156d4d36c668555decc6e5"), + fe("0x1fffb9ec1992d66ba1e77a7b93209af6f8fa76d48acb664796174b5326a31a5c"), + fe("0x25721c4fc15a3f2853b57c338fa538d85f8fbba6c6b9c6090611889b797b9c5f"), + ], + [ + fe("0x0c817fd42d5f7a41215e3d07ba197216adb4c3790705da95eb63b982bfcaf75a"), + fe("0x13abe3f5239915d39f7e13c2c24970b6df8cf86ce00a22002bc15866e52b5a96"), + fe("0x2106feea546224ea12ef7f39987a46c85c1bc3dc29bdbd7a92cd60acb4d391ce"), + fe("0x21ca859468a746b6aaa79474a37dab49f1ca5a28c748bc7157e1b3345bb0f959"), + ], + [ + fe("0x05ccd6255c1e6f0c5cf1f0df934194c62911d14d0321662a8f1a48999e34185b"), + fe("0x0f0e34a64b70a626e464d846674c4c8816c4fb267fe44fe6ea28678cb09490a4"), + fe("0x0558531a4e25470c6157794ca36d0e9647dbfcfe350d64838f5b1a8a2de0d4bf"), + fe("0x09d3dca9173ed2faceea125157683d18924cadad3f655a60b72f5864961f1455"), + ], + [ + fe("0x0328cbd54e8c0913493f866ed03d218bf23f92d68aaec48617d4c722e5bd4335"), + fe("0x2bf07216e2aff0a223a487b1a7094e07e79e7bcc9798c648ee3347dd5329d34b"), + fe("0x1daf345a58006b736499c583cb76c316d6f78ed6a6dffc82111e11a63fe412df"), + fe("0x176563472456aaa746b694c60e1823611ef39039b2edc7ff391e6f2293d2c404"), + ], + ] +} + +fn load_rc_partial() -> [Fr; NUM_PARTIAL_ROUNDS] { + [ + fe("0x0c6f8f958be0e93053d7fd4fc54512855535ed1539f051dcb43a26fd926361cf"), + fe("0x123106a93cd17578d426e8128ac9d90aa9e8a00708e296e084dd57e69caaf811"), + fe("0x26e1ba52ad9285d97dd3ab52f8e840085e8fa83ff1e8f1877b074867cd2dee75"), + fe("0x1cb55cad7bd133de18a64c5c47b9c97cbe4d8b7bf9e095864471537e6a4ae2c5"), + fe("0x1dcd73e46acd8f8e0e2c7ce04bde7f6d2a53043d5060a41c7143f08e6e9055d0"), + fe("0x011003e32f6d9c66f5852f05474a4def0cda294a0eb4e9b9b12b9bb4512e5574"), + fe("0x2b1e809ac1d10ab29ad5f20d03a57dfebadfe5903f58bafed7c508dd2287ae8c"), + fe("0x2539de1785b735999fb4dac35ee17ed0ef995d05ab2fc5faeaa69ae87bcec0a5"), + fe("0x0c246c5a2ef8ee0126497f222b3e0a0ef4e1c3d41c86d46e43982cb11d77951d"), + fe("0x192089c4974f68e95408148f7c0632edbb09e6a6ad1a1c2f3f0305f5d03b527b"), + fe("0x1eae0ad8ab68b2f06a0ee36eeb0d0c058529097d91096b756d8fdc2fb5a60d85"), + fe("0x179190e5d0e22179e46f8282872abc88db6e2fdc0dee99e69768bd98c5d06bfb"), + fe("0x29bb9e2c9076732576e9a81c7ac4b83214528f7db00f31bf6cafe794a9b3cd1c"), + fe("0x225d394e42207599403efd0c2464a90d52652645882aac35b10e590e6e691e08"), + fe("0x064760623c25c8cf753d238055b444532be13557451c087de09efd454b23fd59"), + fe("0x10ba3a0e01df92e87f301c4b716d8a394d67f4bf42a75c10922910a78f6b5b87"), + fe("0x0e070bf53f8451b24f9c6e96b0c2a801cb511bc0c242eb9d361b77693f21471c"), + fe("0x1b94cd61b051b04dd39755ff93821a73ccd6cb11d2491d8aa7f921014de252fb"), + fe("0x1d7cb39bafb8c744e148787a2e70230f9d4e917d5713bb050487b5aa7d74070b"), + fe("0x2ec93189bd1ab4f69117d0fe980c80ff8785c2961829f701bb74ac1f303b17db"), + fe("0x2db366bfdd36d277a692bb825b86275beac404a19ae07a9082ea46bd83517926"), + fe("0x062100eb485db06269655cf186a68532985275428450359adc99cec6960711b8"), + fe("0x0761d33c66614aaa570e7f1e8244ca1120243f92fa59e4f900c567bf41f5a59b"), + fe("0x20fc411a114d13992c2705aa034e3f315d78608a0f7de4ccf7a72e494855ad0d"), + fe("0x25b5c004a4bdfcb5add9ec4e9ab219ba102c67e8b3effb5fc3a30f317250bc5a"), + fe("0x23b1822d278ed632a494e58f6df6f5ed038b186d8474155ad87e7dff62b37f4b"), + fe("0x22734b4c5c3f9493606c4ba9012499bf0f14d13bfcfcccaa16102a29cc2f69e0"), + fe("0x26c0c8fe09eb30b7e27a74dc33492347e5bdff409aa3610254413d3fad795ce5"), + fe("0x070dd0ccb6bd7bbae88eac03fa1fbb26196be3083a809829bbd626df348ccad9"), + fe("0x12b6595bdb329b6fb043ba78bb28c3bec2c0a6de46d8c5ad6067c4ebfd4250da"), + fe("0x248d97d7f76283d63bec30e7a5876c11c06fca9b275c671c5e33d95bb7e8d729"), + fe("0x1a306d439d463b0816fc6fd64cc939318b45eb759ddde4aa106d15d9bd9baaaa"), + fe("0x28a8f8372e3c38daced7c00421cb4621f4f1b54ddc27821b0d62d3d6ec7c56cf"), + fe("0x0094975717f9a8a8bb35152f24d43294071ce320c829f388bc852183e1e2ce7e"), + fe("0x04d5ee4c3aa78f7d80fde60d716480d3593f74d4f653ae83f4103246db2e8d65"), + fe("0x2a6cf5e9aa03d4336349ad6fb8ed2269c7bef54b8822cc76d08495c12efde187"), + fe("0x2304d31eaab960ba9274da43e19ddeb7f792180808fd6e43baae48d7efcba3f3"), + fe("0x03fd9ac865a4b2a6d5e7009785817249bff08a7e0726fcb4e1c11d39d199f0b0"), + fe("0x00b7258ded52bbda2248404d55ee5044798afc3a209193073f7954d4d63b0b64"), + fe("0x159f81ada0771799ec38fca2d4bf65ebb13d3a74f3298db36272c5ca65e92d9a"), + fe("0x1ef90e67437fbc8550237a75bc28e3bb9000130ea25f0c5471e144cf4264431f"), + fe("0x1e65f838515e5ff0196b49aa41a2d2568df739bc176b08ec95a79ed82932e30d"), + fe("0x2b1b045def3a166cec6ce768d079ba74b18c844e570e1f826575c1068c94c33f"), + fe("0x0832e5753ceb0ff6402543b1109229c165dc2d73bef715e3f1c6e07c168bb173"), + fe("0x02f614e9cedfb3dc6b762ae0a37d41bab1b841c2e8b6451bc5a8e3c390b6ad16"), + fe("0x0e2427d38bd46a60dd640b8e362cad967370ebb777bedff40f6a0be27e7ed705"), + fe("0x0493630b7c670b6deb7c84d414e7ce79049f0ec098c3c7c50768bbe29214a53a"), + fe("0x22ead100e8e482674decdab17066c5a26bb1515355d5461a3dc06cc85327cea9"), + fe("0x25b3e56e655b42cdaae2626ed2554d48583f1ae35626d04de5084e0b6d2a6f16"), + fe("0x1e32752ada8836ef5837a6cde8ff13dbb599c336349e4c584b4fdc0a0cf6f9d0"), + fe("0x2fa2a871c15a387cc50f68f6f3c3455b23c00995f05078f672a9864074d412e5"), + fe("0x2f569b8a9a4424c9278e1db7311e889f54ccbf10661bab7fcd18e7c7a7d83505"), + fe("0x044cb455110a8fdd531ade530234c518a7df93f7332ffd2144165374b246b43d"), + fe("0x227808de93906d5d420246157f2e42b191fe8c90adfe118178ddc723a5319025"), + fe("0x02fcca2934e046bc623adead873579865d03781ae090ad4a8579d2e7a6800355"), + fe("0x0ef915f0ac120b876abccceb344a1d36bad3f3c5ab91a8ddcbec2e060d8befac"), + ] +} + +// ============================================================================ +// MDS matrices +// ============================================================================ + +/// External MDS for t=4: M_E * state +/// Matrix: `[[5,7,1,3],[4,6,1,1],[1,3,5,7],[1,1,4,6]]` +fn external_mds(state: &mut [Fr; 4]) { + let [s0, s1, s2, s3] = *state; + let f1 = Fr::from(1u64); + let f3 = Fr::from(3u64); + let f4 = Fr::from(4u64); + let f5 = Fr::from(5u64); + let f6 = Fr::from(6u64); + let f7 = Fr::from(7u64); + + state[0] = f5 * s0 + f7 * s1 + f1 * s2 + f3 * s3; + state[1] = f4 * s0 + f6 * s1 + f1 * s2 + f1 * s3; + state[2] = f1 * s0 + f3 * s1 + f5 * s2 + f7 * s3; + state[3] = f1 * s0 + f1 * s1 + f4 * s2 + f6 * s3; +} + +/// Internal MDS for t=4: `out[i] = diag[i] * x[i] + sum(x)` +fn internal_mds(state: &mut [Fr; 4], diag: &[Fr; 4]) { + let sum: Fr = state.iter().sum(); + for i in 0..4 { + state[i] = diag[i] * state[i] + sum; + } +} + +/// S-box: x -> x^5 +#[inline] +fn sbox(x: &mut Fr) { + let x2 = x.square(); + let x4 = x2.square(); + *x = x4 * *x; +} + +// ============================================================================ +// Lazily initialized constants (parsed once, reused across all permutations) +// ============================================================================ + +use std::sync::LazyLock; + +static RC_FULL1: LazyLock<[[Fr; 4]; 4]> = LazyLock::new(load_rc_full1); +static RC_FULL2: LazyLock<[[Fr; 4]; 4]> = LazyLock::new(load_rc_full2); +static RC_PARTIAL: LazyLock<[Fr; 56]> = LazyLock::new(load_rc_partial); +static DIAG: LazyLock<[Fr; 4]> = LazyLock::new(load_diag); + +// ============================================================================ +// Poseidon2 permutation (t=4) +// ============================================================================ + +/// Apply 4 full rounds: add round constants, S-box all lanes, external MDS. +fn full_rounds(state: &mut [Fr; 4], rc: &[[Fr; 4]; 4]) { + for r in 0..4 { + for i in 0..4 { + state[i] += rc[r][i]; + } + for i in 0..4 { + sbox(&mut state[i]); + } + external_mds(state); + } +} + +/// Poseidon2 permutation for t=4 (BN254). +/// +/// Round schedule: ext_MDS → [RC + S-box → ext_MDS] × 4 full +/// → [RC(lane0) + S-box(lane0) → int_MDS] × 56 partial +/// → [RC + S-box → ext_MDS] × 4 full +pub fn poseidon2_permutation(state: &mut [Fr; 4]) { + // Initial external MDS + external_mds(state); + + // First 4 full rounds + full_rounds(state, &RC_FULL1); + + // 56 partial rounds + for r in 0..NUM_PARTIAL_ROUNDS { + state[0] += RC_PARTIAL[r]; + sbox(&mut state[0]); + internal_mds(state, &DIAG); + } + + // Final 4 full rounds + full_rounds(state, &RC_FULL2); +} + +// ============================================================================ +// Sponge construction (matching Noir's stdlib) +// ============================================================================ + +/// Poseidon2 sponge hash matching Noir's `Poseidon2::hash(inputs, len)`. +/// +/// Sponge parameters: width=4, rate=3, capacity=1. +/// IV = message_length * 2^64, placed in `state[3]` (capacity lane). +pub fn poseidon2_hash(inputs: &[Fr]) -> Fr { + let msg_len = inputs.len(); + let two_pow_64 = Fr::from(1u64 << 32) * Fr::from(1u64 << 32); + let iv = Fr::from(msg_len as u64) * two_pow_64; + + let mut state: [Fr; 4] = [Fr::from(0u64), Fr::from(0u64), Fr::from(0u64), iv]; + let mut cache: [Fr; RATE] = [Fr::from(0u64); RATE]; + let mut cache_size: usize = 0; + + for &input in inputs { + if cache_size == RATE { + // Perform duplex: add cache into state, permute + for i in 0..RATE { + state[i] += cache[i]; + } + cache = [Fr::from(0u64); RATE]; + cache_size = 0; + poseidon2_permutation(&mut state); + } + cache[cache_size] = input; + cache_size += 1; + } + + // Final squeeze: add remaining cache into state, permute + for i in 0..cache_size { + state[i] += cache[i]; + } + poseidon2_permutation(&mut state); + + state[0] +} + +// ============================================================================ +// Tests +// ============================================================================ + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_poseidon2_hash_single_element() { + // From Noir test: Poseidon2::hash([1], 1) + let inputs = [Fr::from(1u64)]; + let result = poseidon2_hash(&inputs); + let expected = fe("0x168758332d5b3e2d13be8048c8011b454590e06c44bce7f702f09103eef5a373"); + assert_eq!(result, expected, "Poseidon2::hash([1], 1) mismatch"); + } + + #[test] + fn test_poseidon2_hash_two_elements() { + // From Noir test: Poseidon2::hash([e, e], 2) where e = hash([1], 1) + let e = fe("0x168758332d5b3e2d13be8048c8011b454590e06c44bce7f702f09103eef5a373"); + let inputs = [e, e]; + let result = poseidon2_hash(&inputs); + let expected = fe("0x113d8ff59c2e15d711241797c380264e39dc1b9e00f2713e707d8d7773b6d912"); + assert_eq!(result, expected, "Poseidon2::hash([e, e], 2) mismatch"); + } + + #[test] + fn test_poseidon2_hash_four_elements() { + // Verify 4-element hash (fills the rate exactly once) + let inputs = [ + Fr::from(1u64), + Fr::from(2u64), + Fr::from(3u64), + Fr::from(4u64), + ]; + let result = poseidon2_hash(&inputs); + // Just verify it doesn't panic and produces a non-zero result + assert_ne!(result, Fr::from(0u64)); + } +} diff --git a/provekit/prover/src/lib.rs b/provekit/prover/src/lib.rs index be86c8f3..b3026169 100644 --- a/provekit/prover/src/lib.rs +++ b/provekit/prover/src/lib.rs @@ -21,7 +21,9 @@ mod witness; pub trait Prove { fn generate_witness(&mut self, input_map: InputMap) -> Result>; - fn prove(self, prover_toml: impl AsRef) -> Result; + fn prove(self, input_map: InputMap) -> Result; + + fn prove_with_toml(self, prover_toml: impl AsRef) -> Result; } impl Prove for Prover { @@ -54,12 +56,9 @@ impl Prove for Prover { } #[instrument(skip_all)] - fn prove(mut self, prover_toml: impl AsRef) -> Result { + fn prove(mut self, input_map: InputMap) -> Result { provekit_common::register_ntt(); - let (input_map, _expected_return) = - read_inputs_from_file(prover_toml.as_ref(), self.witness_generator.abi())?; - let acir_witness_idx_to_value_map = self.generate_witness(input_map)?; let acir_public_inputs = self.program.functions[0].public_inputs().indices(); @@ -145,6 +144,13 @@ impl Prove for Prover { whir_r1cs_proof, }) } + + #[instrument(skip_all)] + fn prove_with_toml(self, prover_toml: impl AsRef) -> Result { + let (input_map, _expected_return) = + read_inputs_from_file(prover_toml.as_ref(), self.witness_generator.abi())?; + self.prove(input_map) + } } #[cfg(test)] diff --git a/tooling/cli/src/cmd/prove.rs b/tooling/cli/src/cmd/prove.rs index 32da1d13..68eb2a7e 100644 --- a/tooling/cli/src/cmd/prove.rs +++ b/tooling/cli/src/cmd/prove.rs @@ -50,7 +50,7 @@ impl Command for Args { // Generate the proof let proof = prover - .prove(&self.input_path) + .prove_with_toml(&self.input_path) .context("While proving Noir program statement")?; // Verify the proof (not in release build) diff --git a/tooling/provekit-bench/benches/bench.rs b/tooling/provekit-bench/benches/bench.rs index ce703453..5cf7c0f7 100644 --- a/tooling/provekit-bench/benches/bench.rs +++ b/tooling/provekit-bench/benches/bench.rs @@ -30,7 +30,7 @@ fn prove_poseidon_1000(bencher: Bencher) { bencher.bench_local(|| { let prover = black_box(prover.clone()); let witness_path = black_box(&witness_path); - prover.prove(witness_path) + prover.prove_with_toml(witness_path) }); } @@ -52,7 +52,7 @@ fn prove_poseidon_1000_with_io(bencher: Bencher) { }) .expect("Reading prover failed"); let prover = black_box(prover); - prover.prove(black_box(&witness_path)) + prover.prove_with_toml(black_box(&witness_path)) }); } diff --git a/tooling/provekit-bench/tests/compiler.rs b/tooling/provekit-bench/tests/compiler.rs index 8643bcfb..dce69bee 100644 --- a/tooling/provekit-bench/tests/compiler.rs +++ b/tooling/provekit-bench/tests/compiler.rs @@ -43,7 +43,7 @@ fn test_compiler(test_case_path: impl AsRef) { let mut verifier = Verifier::from_noir_proof_scheme(schema.clone()); let proof = prover - .prove(&witness_file_path) + .prove_with_toml(&witness_file_path) .expect("While proving Noir program statement"); verifier.verify(&proof).expect("Verifying proof"); diff --git a/tooling/provekit-ffi/src/ffi.rs b/tooling/provekit-ffi/src/ffi.rs index 9cd8c27e..68666280 100644 --- a/tooling/provekit-ffi/src/ffi.rs +++ b/tooling/provekit-ffi/src/ffi.rs @@ -57,7 +57,9 @@ pub unsafe extern "C" fn pk_prove_to_file( let prover: Prover = read(Path::new(&prover_path)).map_err(|_| PKError::SchemeReadError)?; - let proof = prover.prove(&input_path).map_err(|_| PKError::ProofError)?; + let proof = prover + .prove_with_toml(&input_path) + .map_err(|_| PKError::ProofError)?; provekit_common::file::write(&proof, Path::new(&out_path)) .map_err(|_| PKError::FileWriteError)?; @@ -116,7 +118,9 @@ pub unsafe extern "C" fn pk_prove_to_json( let prover: Prover = read(Path::new(&prover_path)).map_err(|_| PKError::SchemeReadError)?; - let proof = prover.prove(&input_path).map_err(|_| PKError::ProofError)?; + let proof = prover + .prove_with_toml(&input_path) + .map_err(|_| PKError::ProofError)?; let json_string = serde_json::to_string(&proof).map_err(|_| PKError::SerializationError)?;