diff --git a/lightning/src/ln/offers_tests.rs b/lightning/src/ln/offers_tests.rs index de08af5d276..31b98a969d2 100644 --- a/lightning/src/ln/offers_tests.rs +++ b/lightning/src/ln/offers_tests.rs @@ -61,6 +61,8 @@ use crate::offers::invoice_error::InvoiceError; use crate::offers::invoice_request::{InvoiceRequest, InvoiceRequestFields, InvoiceRequestVerifiedFromOffer}; use crate::offers::nonce::Nonce; use crate::offers::parse::Bolt12SemanticError; +use crate::offers::payer_proof::{PayerProof, PayerProofBuilder, PayerProofError}; +use crate::types::payment::PaymentPreimage; use crate::onion_message::messenger::{DefaultMessageRouter, Destination, MessageSendInstructions, NodeIdMessageRouter, NullMessageRouter, PeeledOnion, DUMMY_HOPS_PATH_LENGTH, QR_CODED_DUMMY_HOPS_PATH_LENGTH}; use crate::onion_message::offers::OffersMessage; use crate::routing::gossip::{NodeAlias, NodeId}; @@ -264,6 +266,21 @@ fn extract_offer_nonce<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, message: &OnionMessa } } +/// Extract the payer's nonce from an invoice onion message received by the payer. +/// +/// When the payer receives an invoice through their reply path, the blinded path context +/// contains the nonce originally used for deriving their payer signing key. This nonce is +/// needed to build a [`PayerProof`] using [`PayerProofBuilder::build_and_sign_with_derived_key`]. +fn extract_payer_context<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, message: &OnionMessage) -> (PaymentId, Nonce) { + match node.onion_messenger.peel_onion_message(message) { + Ok(PeeledOnion::Offers(_, Some(OffersContext::OutboundPaymentForOffer { payment_id, nonce, .. }), _)) => (payment_id, nonce), + Ok(PeeledOnion::Offers(_, context, _)) => panic!("Expected OutboundPaymentForOffer context, got: {:?}", context), + Ok(PeeledOnion::Forward(_, _)) => panic!("Unexpected onion message forward"), + Ok(_) => panic!("Unexpected onion message"), + Err(e) => panic!("Failed to process onion message {:?}", e), + } +} + pub(super) fn extract_invoice_request<'a, 'b, 'c>( node: &Node<'a, 'b, 'c>, message: &OnionMessage ) -> (InvoiceRequest, BlindedMessagePath) { @@ -2667,3 +2684,236 @@ fn creates_and_pays_for_phantom_offer() { assert!(nodes[0].onion_messenger.next_onion_message_for_peer(node_c_id).is_none()); } } + +/// Tests the full payer proof lifecycle: offer -> invoice_request -> invoice -> payment -> +/// proof creation with derived key signing -> verification -> bech32 round-trip. +/// +/// This exercises the primary API path where a wallet pays a BOLT 12 offer and then creates +/// a payer proof using the derived signing key (same key derivation as the invoice request). +#[test] +fn creates_and_verifies_payer_proof_after_offer_payment() { + let chanmon_cfgs = create_chanmon_cfgs(2); + let node_cfgs = create_node_cfgs(2, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); + let nodes = create_network(2, &node_cfgs, &node_chanmgrs); + + create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 10_000_000, 1_000_000_000); + + let alice = &nodes[0]; // recipient (offer creator) + let alice_id = alice.node.get_our_node_id(); + let bob = &nodes[1]; // payer + let bob_id = bob.node.get_our_node_id(); + + // Alice creates an offer + let offer = alice.node + .create_offer_builder().unwrap() + .amount_msats(10_000_000) + .build().unwrap(); + + // Bob initiates payment + let payment_id = PaymentId([1; 32]); + bob.node.pay_for_offer(&offer, None, payment_id, Default::default()).unwrap(); + expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id); + + // Bob sends invoice request to Alice + let onion_message = bob.onion_messenger.next_onion_message_for_peer(alice_id).unwrap(); + alice.onion_messenger.handle_onion_message(bob_id, &onion_message); + + let (invoice_request, _) = extract_invoice_request(alice, &onion_message); + + // Alice sends invoice back to Bob + let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap(); + bob.onion_messenger.handle_onion_message(alice_id, &onion_message); + + let (invoice, _) = extract_invoice(bob, &onion_message); + assert_eq!(invoice.amount_msats(), 10_000_000); + + // Extract the payer nonce and payment_id from Bob's reply path context. In a real wallet, + // these would be persisted alongside the payment for later payer proof creation. + let (context_payment_id, payer_nonce) = extract_payer_context(bob, &onion_message); + assert_eq!(context_payment_id, payment_id); + + // Route the payment + route_bolt12_payment(bob, &[alice], &invoice); + expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id); + + // Get the payment preimage from Alice's PaymentClaimable event and claim it. + // In a real wallet, the payer receives the preimage via Event::PaymentSent after the + // recipient claims. For the test, we extract it from the recipient's claimable event. + let payment_preimage = match get_event!(alice, Event::PaymentClaimable) { + Event::PaymentClaimable { purpose, .. } => { + match &purpose { + PaymentPurpose::Bolt12OfferPayment { payment_context, .. } => { + assert_eq!(payment_context.offer_id, offer.id()); + assert_eq!( + payment_context.invoice_request.payer_signing_pubkey, + invoice_request.payer_signing_pubkey(), + ); + }, + _ => panic!("Expected Bolt12OfferPayment purpose"), + } + purpose.preimage().unwrap() + }, + _ => panic!("Expected Event::PaymentClaimable"), + }; + + claim_payment(bob, &[alice], payment_preimage); + expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id); + + // --- Payer Proof Creation --- + // Bob (the payer) creates a proof-of-payment with selective disclosure. + // He includes the offer description and invoice amount, but omits other fields for privacy. + let expanded_key = bob.keys_manager.get_expanded_key(); + let proof = PayerProofBuilder::new(&invoice, payment_preimage).unwrap() + .include_offer_description() + .include_invoice_amount() + .include_invoice_created_at() + .build_and_sign_with_derived_key(&expanded_key, payer_nonce, payment_id, None) + .unwrap(); + + // --- Verification --- + // Anyone with the proof can verify it without needing the full invoice. + proof.verify().unwrap(); + + // Check proof contents match the original payment + assert_eq!(proof.preimage(), payment_preimage); + assert_eq!(proof.payment_hash(), invoice.payment_hash()); + assert_eq!(proof.payer_id(), invoice.payer_signing_pubkey()); + assert_eq!(proof.issuer_signing_pubkey(), invoice.signing_pubkey()); + assert!(proof.payer_note().is_none()); + + // --- Serialization Round-Trip --- + // The proof can be serialized to a bech32 string (lnp...) for sharing. + let encoded = proof.to_string(); + assert!(encoded.starts_with("lnp1")); + + // Round-trip through TLV bytes: re-parse the raw bytes and verify. + let decoded = PayerProof::try_from(proof.bytes().to_vec()).unwrap(); + decoded.verify().unwrap(); + assert_eq!(decoded.preimage(), proof.preimage()); + assert_eq!(decoded.payment_hash(), proof.payment_hash()); + assert_eq!(decoded.payer_id(), proof.payer_id()); + assert_eq!(decoded.issuer_signing_pubkey(), proof.issuer_signing_pubkey()); + assert_eq!(decoded.merkle_root(), proof.merkle_root()); +} + +/// Tests payer proof creation with a payer note, selective disclosure of specific invoice +/// fields, and error cases. Verifies that: +/// - A wrong preimage is rejected +/// - A minimal proof (required fields only) works +/// - Selective disclosure with a payer note works +/// - The proof survives a bech32 round-trip with the note intact +#[test] +fn creates_payer_proof_with_note_and_selective_disclosure() { + let chanmon_cfgs = create_chanmon_cfgs(2); + let node_cfgs = create_node_cfgs(2, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); + let nodes = create_network(2, &node_cfgs, &node_chanmgrs); + + create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 10_000_000, 1_000_000_000); + + let alice = &nodes[0]; + let alice_id = alice.node.get_our_node_id(); + let bob = &nodes[1]; + let bob_id = bob.node.get_our_node_id(); + + // Alice creates an offer with a description + let offer = alice.node + .create_offer_builder().unwrap() + .amount_msats(5_000_000) + .description("Coffee beans - 1kg".into()) + .build().unwrap(); + + // Bob pays for the offer + let payment_id = PaymentId([2; 32]); + bob.node.pay_for_offer(&offer, None, payment_id, Default::default()).unwrap(); + expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id); + + // Exchange messages + let onion_message = bob.onion_messenger.next_onion_message_for_peer(alice_id).unwrap(); + alice.onion_messenger.handle_onion_message(bob_id, &onion_message); + let (invoice_request, _) = extract_invoice_request(alice, &onion_message); + + let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap(); + bob.onion_messenger.handle_onion_message(alice_id, &onion_message); + + let (invoice, _) = extract_invoice(bob, &onion_message); + let (context_payment_id, payer_nonce) = extract_payer_context(bob, &onion_message); + assert_eq!(context_payment_id, payment_id); + + // Route and claim the payment, extracting the preimage + route_bolt12_payment(bob, &[alice], &invoice); + expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id); + + let payment_preimage = match get_event!(alice, Event::PaymentClaimable) { + Event::PaymentClaimable { purpose, .. } => { + match &purpose { + PaymentPurpose::Bolt12OfferPayment { payment_context, .. } => { + assert_eq!(payment_context.offer_id, offer.id()); + assert_eq!( + payment_context.invoice_request.payer_signing_pubkey, + invoice_request.payer_signing_pubkey(), + ); + }, + _ => panic!("Expected Bolt12OfferPayment purpose"), + } + purpose.preimage().unwrap() + }, + _ => panic!("Expected Event::PaymentClaimable"), + }; + + claim_payment(bob, &[alice], payment_preimage); + expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id); + + // --- Test 1: Wrong preimage is rejected --- + let wrong_preimage = PaymentPreimage([0xDE; 32]); + assert!(PayerProofBuilder::new(&invoice, wrong_preimage).is_err()); + + // --- Test 2: Wrong payment_id causes key derivation failure --- + let expanded_key = bob.keys_manager.get_expanded_key(); + let wrong_payment_id = PaymentId([0xFF; 32]); + let result = PayerProofBuilder::new(&invoice, payment_preimage).unwrap() + .build_and_sign_with_derived_key(&expanded_key, payer_nonce, wrong_payment_id, None); + assert!(matches!(result, Err(PayerProofError::KeyDerivationFailed))); + + // --- Test 3: Wrong nonce causes key derivation failure --- + let wrong_nonce = Nonce::from_entropy_source(&chanmon_cfgs[0].keys_manager); + let result = PayerProofBuilder::new(&invoice, payment_preimage).unwrap() + .build_and_sign_with_derived_key(&expanded_key, wrong_nonce, payment_id, None); + assert!(matches!(result, Err(PayerProofError::KeyDerivationFailed))); + + // --- Test 4: Minimal proof (only required fields) --- + let minimal_proof = PayerProofBuilder::new(&invoice, payment_preimage).unwrap() + .build_and_sign_with_derived_key(&expanded_key, payer_nonce, payment_id, None) + .unwrap(); + minimal_proof.verify().unwrap(); + + // --- Test 5: Proof with selective disclosure and payer note --- + let proof_with_note = PayerProofBuilder::new(&invoice, payment_preimage).unwrap() + .include_offer_description() + .include_offer_issuer() + .include_invoice_amount() + .include_invoice_created_at() + .build_and_sign_with_derived_key(&expanded_key, payer_nonce, payment_id, Some("Paid for coffee")) + .unwrap(); + proof_with_note.verify().unwrap(); + assert_eq!(proof_with_note.payer_note(), Some("Paid for coffee")); + + // Both proofs should verify and have the same core fields + assert_eq!(minimal_proof.preimage(), proof_with_note.preimage()); + assert_eq!(minimal_proof.payment_hash(), proof_with_note.payment_hash()); + assert_eq!(minimal_proof.payer_id(), proof_with_note.payer_id()); + assert_eq!(minimal_proof.issuer_signing_pubkey(), proof_with_note.issuer_signing_pubkey()); + + // The merkle roots are the same since both reconstruct from the same invoice + assert_eq!(minimal_proof.merkle_root(), proof_with_note.merkle_root()); + + // --- Test 6: Round-trip the proof with note through TLV bytes --- + let encoded = proof_with_note.to_string(); + assert!(encoded.starts_with("lnp1")); + + let decoded = PayerProof::try_from(proof_with_note.bytes().to_vec()).unwrap(); + decoded.verify().unwrap(); + assert_eq!(decoded.payer_note(), Some("Paid for coffee")); + assert_eq!(decoded.preimage(), payment_preimage); +} diff --git a/lightning/src/offers/invoice.rs b/lightning/src/offers/invoice.rs index 8d83225f117..c9f1ab62231 100644 --- a/lightning/src/offers/invoice.rs +++ b/lightning/src/offers/invoice.rs @@ -1032,6 +1032,24 @@ impl Bolt12Invoice { ) } + /// Re-derives the payer's signing keypair for payer proof creation. + /// + /// This performs the same key derivation that occurs during invoice request creation + /// with `deriving_signing_pubkey`, allowing the payer to recover their signing keypair. + /// The `nonce` and `payment_id` must be the same ones used when creating the original + /// invoice request (available from [`OffersContext::OutboundPaymentForOffer`]). + /// + /// [`OffersContext::OutboundPaymentForOffer`]: crate::blinded_path::message::OffersContext::OutboundPaymentForOffer + pub(crate) fn derive_signing_keys( + &self, payment_id: PaymentId, nonce: Nonce, key: &ExpandedKey, secp_ctx: &Secp256k1, + ) -> Result { + let iv_bytes = match &self.contents { + InvoiceContents::ForOffer { .. } => INVOICE_REQUEST_IV_BYTES, + InvoiceContents::ForRefund { .. } => REFUND_IV_BYTES_WITHOUT_METADATA, + }; + self.contents.derive_signing_keys(&self.bytes, payment_id, nonce, key, iv_bytes, secp_ctx) + } + pub(crate) fn as_tlv_stream(&self) -> FullInvoiceTlvStreamRef<'_> { let ( payer_tlv_stream, @@ -1342,6 +1360,36 @@ impl InvoiceContents { ) } + fn derive_signing_keys( + &self, bytes: &[u8], payment_id: PaymentId, nonce: Nonce, key: &ExpandedKey, + iv_bytes: &[u8; IV_LEN], secp_ctx: &Secp256k1, + ) -> Result { + const EXPERIMENTAL_TYPES: core::ops::Range = + EXPERIMENTAL_OFFER_TYPES.start..EXPERIMENTAL_INVOICE_REQUEST_TYPES.end; + + let offer_records = TlvStream::new(bytes).range(OFFER_TYPES); + let invreq_records = TlvStream::new(bytes).range(INVOICE_REQUEST_TYPES).filter(|record| { + match record.r#type { + PAYER_METADATA_TYPE => false, + INVOICE_REQUEST_PAYER_ID_TYPE => false, + _ => true, + } + }); + let experimental_records = TlvStream::new(bytes).range(EXPERIMENTAL_TYPES); + let tlv_stream = offer_records.chain(invreq_records).chain(experimental_records); + + let signing_pubkey = self.payer_signing_pubkey(); + signer::derive_payer_keys( + payment_id, + nonce, + key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + ) + } + fn as_tlv_stream(&self) -> PartialInvoiceTlvStreamRef<'_> { let (payer, offer, invoice_request, experimental_offer, experimental_invoice_request) = match self { diff --git a/lightning/src/offers/merkle.rs b/lightning/src/offers/merkle.rs index 1a38fe5441f..2e2f81ab9f8 100644 --- a/lightning/src/offers/merkle.rs +++ b/lightning/src/offers/merkle.rs @@ -73,6 +73,13 @@ impl TaggedHash { self.merkle_root } + /// Creates a tagged hash from a pre-computed merkle root. + pub(super) fn from_merkle_root(tag: &'static str, merkle_root: sha256::Hash) -> Self { + let tag_hash = sha256::Hash::hash(tag.as_bytes()); + let digest = Message::from_digest(tagged_hash(tag_hash, merkle_root).to_byte_array()); + Self { tag, merkle_root, digest } + } + pub(super) fn to_bytes(&self) -> [u8; 32] { *self.digest.as_ref() } @@ -261,7 +268,6 @@ impl<'a> Iterator for TlvStream<'a> { let offset = self.data.position(); let end = offset + length; - let _value = &self.data.get_ref()[offset as usize..end as usize]; let record_bytes = &self.data.get_ref()[start as usize..end as usize]; self.data.set_position(end); @@ -280,6 +286,462 @@ impl<'a> Writeable for TlvRecord<'a> { } } +// ============================================================================ +// Selective Disclosure for Payer Proofs (BOLT 12 extension) +// ============================================================================ + +use alloc::collections::BTreeSet; + +/// Error during selective disclosure operations. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum SelectiveDisclosureError { + /// The omitted markers are not in strict ascending order. + InvalidOmittedMarkersOrder, + /// The omitted markers contain an invalid marker (0 or signature type). + InvalidOmittedMarkersMarker, + /// The leaf_hashes count doesn't match included TLVs. + LeafHashCountMismatch, + /// Insufficient missing_hashes to reconstruct the tree. + InsufficientMissingHashes, + /// The TLV stream is empty. + EmptyTlvStream, +} + +/// Data needed to reconstruct a merkle root with selective disclosure. +/// +/// This is used in payer proofs to allow verification of an invoice signature +/// without revealing all invoice fields. +#[derive(Clone, Debug, PartialEq)] +pub(super) struct SelectiveDisclosure { + /// Nonce hashes for included TLVs (in TLV type order). + pub(super) leaf_hashes: Vec, + /// Marker numbers for omitted TLVs (excluding implicit TLV0). + pub(super) omitted_markers: Vec, + /// Minimal merkle hashes for omitted subtrees. + pub(super) missing_hashes: Vec, + /// The complete merkle root. + pub(super) merkle_root: sha256::Hash, +} + +/// Internal data for each TLV during tree construction. +struct TlvMerkleData { + tlv_type: u64, + per_tlv_hash: sha256::Hash, + is_included: bool, +} + +/// Compute selective disclosure data from a TLV stream. +/// +/// This builds the full merkle tree and extracts the data needed for a payer proof: +/// - `leaf_hashes`: nonce hashes for included TLVs +/// - `omitted_markers`: marker numbers for omitted TLVs +/// - `missing_hashes`: minimal merkle hashes for omitted subtrees +/// +/// # Arguments +/// * `tlv_bytes` - Complete TLV stream (e.g., invoice bytes without signature) +/// * `included_types` - Set of TLV types to include in the disclosure +pub(super) fn compute_selective_disclosure( + tlv_bytes: &[u8], included_types: &BTreeSet, +) -> Result { + let mut tlv_stream = TlvStream::new(tlv_bytes).peekable(); + let first_record = tlv_stream.peek().ok_or(SelectiveDisclosureError::EmptyTlvStream)?; + let nonce_tag_hash = sha256::Hash::from_engine({ + let mut engine = sha256::Hash::engine(); + engine.input("LnNonce".as_bytes()); + engine.input(first_record.record_bytes); + engine + }); + + let leaf_tag = tagged_hash_engine(sha256::Hash::hash("LnLeaf".as_bytes())); + let nonce_tag = tagged_hash_engine(nonce_tag_hash); + let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes())); + + let mut tlv_data: Vec = Vec::new(); + let mut leaf_hashes: Vec = Vec::new(); + for record in tlv_stream.filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record.record_bytes); + let nonce_hash = tagged_hash_from_engine(nonce_tag.clone(), record.type_bytes); + let per_tlv_hash = + tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + + let is_included = included_types.contains(&record.r#type); + if is_included { + leaf_hashes.push(nonce_hash); + } + tlv_data.push(TlvMerkleData { tlv_type: record.r#type, per_tlv_hash, is_included }); + } + + if tlv_data.is_empty() { + return Err(SelectiveDisclosureError::EmptyTlvStream); + } + let omitted_markers = compute_omitted_markers(&tlv_data); + let (merkle_root, missing_hashes) = build_tree_with_disclosure(&tlv_data, &branch_tag); + + Ok(SelectiveDisclosure { leaf_hashes, omitted_markers, missing_hashes, merkle_root }) +} + +/// Compute omitted markers per BOLT 12 payer proof spec. +fn compute_omitted_markers(tlv_data: &[TlvMerkleData]) -> Vec { + let mut markers = Vec::new(); + let mut prev_included_type: Option = None; + let mut prev_marker: Option = None; + + for data in tlv_data { + if data.tlv_type == 0 { + continue; + } + + if !data.is_included { + let marker = if let Some(prev_type) = prev_included_type { + prev_type + 1 + } else if let Some(last_marker) = prev_marker { + last_marker + 1 + } else { + 1 + }; + + markers.push(marker); + prev_marker = Some(marker); + prev_included_type = None; + } else { + prev_included_type = Some(data.tlv_type); + prev_marker = None; + } + } + + markers +} + +/// A node in the merkle tree during selective disclosure processing. +struct TreeNode { + hash: Option, + included: bool, + min_type: u64, +} + +/// Build merkle tree and collect missing_hashes for omitted subtrees. +/// +/// Returns hashes sorted by ascending TLV type as required by the spec. For internal +/// nodes, the type used for ordering is the minimum TLV type in that subtree. +/// +/// Uses `n` tree nodes (one per TLV) rather than `2n`, since the per-TLV hashes +/// already combine leaf and nonce. The tree traversal starts at level 0 to pair +/// adjacent per-TLV hashes, matching the structure of `root_hash()`. +fn build_tree_with_disclosure( + tlv_data: &[TlvMerkleData], branch_tag: &sha256::HashEngine, +) -> (sha256::Hash, Vec) { + let num_nodes = tlv_data.len(); + debug_assert!(num_nodes > 0, "TLV stream must contain at least one record"); + + let num_omitted = tlv_data.iter().filter(|d| !d.is_included).count(); + + let mut nodes: Vec = tlv_data + .iter() + .map(|data| TreeNode { + hash: Some(data.per_tlv_hash), + included: data.is_included, + min_type: data.tlv_type, + }) + .collect(); + + let mut missing_with_types: Vec<(u64, sha256::Hash)> = Vec::with_capacity(num_omitted); + + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for (left_pos, right_pos) in + (0..num_nodes).step_by(step).zip((offset..num_nodes).step_by(step)) + { + let left_hash = nodes[left_pos].hash; + let right_hash = nodes[right_pos].hash; + let left_incl = nodes[left_pos].included; + let right_incl = nodes[right_pos].included; + let right_min_type = nodes[right_pos].min_type; + + match (left_hash, right_hash, left_incl, right_incl) { + (Some(l), Some(r), true, false) => { + missing_with_types.push((right_min_type, r)); + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + nodes[left_pos].included = true; + nodes[left_pos].min_type = + core::cmp::min(nodes[left_pos].min_type, right_min_type); + }, + (Some(l), Some(r), false, true) => { + missing_with_types.push((nodes[left_pos].min_type, l)); + let left_min = nodes[left_pos].min_type; + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + nodes[left_pos].included = true; + nodes[left_pos].min_type = core::cmp::min(left_min, right_min_type); + }, + (Some(l), Some(r), true, true) => { + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + nodes[left_pos].included = true; + nodes[left_pos].min_type = + core::cmp::min(nodes[left_pos].min_type, right_min_type); + }, + (Some(l), Some(r), false, false) => { + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + nodes[left_pos].min_type = + core::cmp::min(nodes[left_pos].min_type, right_min_type); + }, + (Some(_), None, _, _) => {}, + _ => unreachable!("Invalid state in merkle tree construction"), + } + } + } + + missing_with_types.sort_by_key(|(min_type, _)| *min_type); + let missing_hashes: Vec = + missing_with_types.into_iter().map(|(_, h)| h).collect(); + + (nodes[0].hash.expect("Tree should have a root"), missing_hashes) +} + +/// Reconstruct merkle root from selective disclosure data. +/// +/// The `missing_hashes` must be in ascending type order per spec. +/// +/// Uses `n` tree nodes (one per TLV position) rather than `2n`, since per-TLV +/// hashes already combine leaf and nonce. Two passes over the tree determine +/// where missing hashes are needed and then combine all hashes to the root. +pub(super) fn reconstruct_merkle_root<'a>( + included_records: &[(u64, &'a [u8])], leaf_hashes: &[sha256::Hash], omitted_markers: &[u64], + missing_hashes: &[sha256::Hash], +) -> Result { + // Callers are expected to validate omitted_markers before calling this function + // (e.g., via validate_omitted_markers_for_parsing). Debug-assert for safety. + debug_assert!(validate_omitted_markers(omitted_markers).is_ok()); + + if included_records.len() != leaf_hashes.len() { + return Err(SelectiveDisclosureError::LeafHashCountMismatch); + } + + let leaf_tag = tagged_hash_engine(sha256::Hash::hash("LnLeaf".as_bytes())); + let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes())); + + // Build TreeNode vec directly by interleaving included/omitted positions, + // eliminating the intermediate Vec from reconstruct_positions_from_records. + let num_nodes = 1 + included_records.len() + omitted_markers.len(); + let mut nodes: Vec = Vec::with_capacity(num_nodes); + + // TLV0 is always omitted + nodes.push(TreeNode { hash: None, included: false, min_type: 0 }); + + let mut inc_idx = 0; + let mut mrk_idx = 0; + let mut prev_marker: u64 = 0; + let mut node_idx: u64 = 1; + + while inc_idx < included_records.len() || mrk_idx < omitted_markers.len() { + if mrk_idx >= omitted_markers.len() { + // No more markers, remaining positions are included + let (_, record_bytes) = included_records[inc_idx]; + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes); + let nonce_hash = leaf_hashes[inc_idx]; + let hash = tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx }); + inc_idx += 1; + } else if inc_idx >= included_records.len() { + // No more included types, remaining positions are omitted + nodes.push(TreeNode { hash: None, included: false, min_type: node_idx }); + prev_marker = omitted_markers[mrk_idx]; + mrk_idx += 1; + } else { + let marker = omitted_markers[mrk_idx]; + let (inc_type, _) = included_records[inc_idx]; + + if marker == prev_marker + 1 { + // Continuation of current run -> omitted position + nodes.push(TreeNode { hash: None, included: false, min_type: node_idx }); + prev_marker = marker; + mrk_idx += 1; + } else { + // Jump detected -> included position comes first + let (_, record_bytes) = included_records[inc_idx]; + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes); + let nonce_hash = leaf_hashes[inc_idx]; + let hash = + tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx }); + prev_marker = inc_type; + inc_idx += 1; + } + } + node_idx += 1; + } + + // First pass: walk the tree to discover which positions need missing hashes. + // We mutate nodes[].included and nodes[].min_type directly since the second + // pass only reads nodes[].hash, making this safe without a separate allocation. + let num_omitted = omitted_markers.len() + 1; // +1 for implicit TLV0 + let mut needs_hash: Vec<(u64, usize)> = Vec::with_capacity(num_omitted); + + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for left_pos in (0..num_nodes).step_by(step) { + let right_pos = left_pos + offset; + if right_pos >= num_nodes { + continue; + } + + let r_min = nodes[right_pos].min_type; + + match (nodes[left_pos].included, nodes[right_pos].included) { + (true, false) => { + needs_hash.push((r_min, right_pos)); + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (false, true) => { + needs_hash.push((nodes[left_pos].min_type, left_pos)); + nodes[left_pos].included = true; + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (true, true) => { + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (false, false) => { + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + } + } + } + + needs_hash.sort_by_key(|(min_pos, _)| *min_pos); + + if needs_hash.len() != missing_hashes.len() { + return Err(SelectiveDisclosureError::InsufficientMissingHashes); + } + + // Place missing hashes directly into the nodes array. + for (i, &(_, tree_pos)) in needs_hash.iter().enumerate() { + nodes[tree_pos].hash = Some(missing_hashes[i]); + } + + // Second pass: combine hashes up the tree. + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for left_pos in (0..num_nodes).step_by(step) { + let right_pos = left_pos + offset; + if right_pos >= num_nodes { + continue; + } + + match (nodes[left_pos].hash, nodes[right_pos].hash) { + (Some(l), Some(r)) => { + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + }, + (Some(_), None) => {}, + (None, _) => { + return Err(SelectiveDisclosureError::InsufficientMissingHashes); + }, + }; + } + } + + nodes[0].hash.ok_or(SelectiveDisclosureError::InsufficientMissingHashes) +} + +fn validate_omitted_markers(markers: &[u64]) -> Result<(), SelectiveDisclosureError> { + let mut prev = 0u64; + for &marker in markers { + if marker == 0 { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersMarker); + } + if SIGNATURE_TYPES.contains(&marker) { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersMarker); + } + if marker <= prev { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersOrder); + } + prev = marker; + } + Ok(()) +} + +/// Reconstruct position inclusion map from included types and omitted markers. +/// +/// This reverses the marker encoding algorithm from `compute_omitted_markers`: +/// - Markers form "runs" of consecutive values (e.g., [11, 12] is a run) +/// - A "jump" in markers (e.g., 12 → 41) indicates an included TLV came between +/// - After included type X, the next marker in that run equals X + 1 +/// +/// The algorithm tracks `prev_marker` to detect continuations vs jumps: +/// - If `marker == prev_marker + 1`: continuation → omitted position +/// - Otherwise: jump → included position comes first, then process marker as continuation +/// +/// Example: included=[10, 40], markers=[11, 12, 41, 42] +/// - Position 0: TLV0 (always omitted) +/// - marker=11, prev=0: 11 != 1, jump! Insert included (10), prev=10 +/// - marker=11, prev=10: 11 == 11, continuation → omitted, prev=11 +/// - marker=12, prev=11: 12 == 12, continuation → omitted, prev=12 +/// - marker=41, prev=12: 41 != 13, jump! Insert included (40), prev=40 +/// - marker=41, prev=40: 41 == 41, continuation → omitted, prev=41 +/// - marker=42, prev=41: 42 == 42, continuation → omitted, prev=42 +/// Result: [O, I, O, O, I, O, O] +#[cfg(test)] +fn reconstruct_positions(included_types: &[u64], omitted_markers: &[u64]) -> Vec { + let total = 1 + included_types.len() + omitted_markers.len(); + let mut positions = Vec::with_capacity(total); + positions.push(false); // TLV0 is always omitted + + let mut inc_idx = 0; + let mut mrk_idx = 0; + // After TLV0 (implicit marker 0), next continuation would be marker 1 + let mut prev_marker: u64 = 0; + + while inc_idx < included_types.len() || mrk_idx < omitted_markers.len() { + if mrk_idx >= omitted_markers.len() { + // No more markers, remaining positions are included + positions.push(true); + inc_idx += 1; + } else if inc_idx >= included_types.len() { + // No more included types, remaining positions are omitted + positions.push(false); + prev_marker = omitted_markers[mrk_idx]; + mrk_idx += 1; + } else { + let marker = omitted_markers[mrk_idx]; + let inc_type = included_types[inc_idx]; + + if marker == prev_marker + 1 { + // Continuation of current run → this position is omitted + positions.push(false); + prev_marker = marker; + mrk_idx += 1; + } else { + // Jump detected! An included TLV comes before this marker. + // After the included type, prev_marker resets to that type, + // so the marker will be processed as a continuation next iteration. + positions.push(true); + prev_marker = inc_type; + inc_idx += 1; + // Don't advance mrk_idx - same marker will be continuation next + } + } + } + + positions +} + #[cfg(test)] mod tests { use super::{TlvStream, SIGNATURE_TYPES}; @@ -497,4 +959,205 @@ mod tests { self.fmt_bech32_str(f) } } + + // ============================================================================ + // Tests for selective disclosure / payer proof reconstruction + // ============================================================================ + + /// Test reconstruct_positions with the BOLT 12 payer proof spec example. + /// + /// TLVs: 0(omit), 10(incl), 20(omit), 30(omit), 40(incl), 50(omit), 60(omit) + /// Markers: [11, 12, 41, 42] + /// Expected positions: [O, I, O, O, I, O, O] + #[test] + fn test_reconstruct_positions_spec_example() { + let included_types = vec![10, 40]; + let markers = vec![11, 12, 41, 42]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, true, false, false, true, false, false]); + } + + /// Test reconstruct_positions when there are omitted TLVs before the first included. + /// + /// TLVs: 0(omit), 5(omit), 10(incl), 20(omit) + /// Markers: [1, 11] (1 is first omitted after TLV0, 11 is after included 10) + /// Expected positions: [O, O, I, O] + #[test] + fn test_reconstruct_positions_omitted_before_included() { + let included_types = vec![10]; + let markers = vec![1, 11]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, false, true, false]); + } + + /// Test reconstruct_positions with only included TLVs (no omitted except TLV0). + /// + /// TLVs: 0(omit), 10(incl), 20(incl) + /// Markers: [] (no omitted TLVs after TLV0) + /// Expected positions: [O, I, I] + #[test] + fn test_reconstruct_positions_no_omitted() { + let included_types = vec![10, 20]; + let markers = vec![]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, true, true]); + } + + /// Test reconstruct_positions with only omitted TLVs (no included). + /// + /// TLVs: 0(omit), 5(omit), 10(omit) + /// Markers: [1, 2] (consecutive omitted after TLV0) + /// Expected positions: [O, O, O] + #[test] + fn test_reconstruct_positions_no_included() { + let included_types = vec![]; + let markers = vec![1, 2]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, false, false]); + } + + /// Test round-trip: compute selective disclosure then reconstruct merkle root. + #[test] + fn test_selective_disclosure_round_trip() { + use alloc::collections::BTreeSet; + + // Build TLV stream matching spec example structure + // TLVs: 0, 10, 20, 30, 40, 50, 60 + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); // TLV 40 + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); // TLV 50 + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); // TLV 60 + + // Include types 10 and 40 + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + // Compute selective disclosure + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Verify markers match spec example + assert_eq!(disclosure.omitted_markers, vec![11, 12, 41, 42]); + + // Verify leaf_hashes count matches included TLVs + assert_eq!(disclosure.leaf_hashes.len(), 2); + + // Collect included records for reconstruction + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + // Reconstruct merkle root + let reconstructed = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &disclosure.missing_hashes, + ) + .unwrap(); + + // Must match original + assert_eq!(reconstructed, disclosure.merkle_root); + } + + /// Test that missing_hashes are in ascending type order per spec. + /// + /// Per spec: "MUST include the minimal set of merkle hashes of missing merkle + /// leaves or nodes in `missing_hashes`, in ascending type order." + /// + /// For the spec example with TLVs [0(o), 10(I), 20(o), 30(o), 40(I), 50(o), 60(o)]: + /// - hash(0) covers type 0 + /// - hash(B(20,30)) covers types 20-30 (min=20) + /// - hash(50) covers type 50 + /// - hash(60) covers type 60 + /// + /// Expected order: [type 0, type 20, type 50, type 60] + /// This means 4 missing_hashes in this order. + #[test] + fn test_missing_hashes_ascending_type_order() { + use alloc::collections::BTreeSet; + + // Build TLV stream: 0, 10, 20, 30, 40, 50, 60 + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); // TLV 40 + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); // TLV 50 + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); // TLV 60 + + // Include types 10 and 40 (same as spec example) + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // We should have 4 missing hashes for omitted types: + // - type 0 (single leaf) + // - types 20+30 (combined branch, min_type=20) + // - type 50 (single leaf) + // - type 60 (single leaf) + // + // The spec example only shows 3, but that appears to be incomplete + // (missing hash for type 60). Our implementation should produce 4. + assert_eq!( + disclosure.missing_hashes.len(), + 4, + "Expected 4 missing hashes for omitted types [0, 20+30, 50, 60]" + ); + + // Verify the round-trip still works with the correct ordering + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + let reconstructed = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &disclosure.missing_hashes, + ) + .unwrap(); + + assert_eq!(reconstructed, disclosure.merkle_root); + } + + /// Test that reconstruction fails with wrong number of missing_hashes. + #[test] + fn test_reconstruction_fails_with_wrong_missing_hashes() { + use alloc::collections::BTreeSet; + + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + + let mut included = BTreeSet::new(); + included.insert(10); + + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + // Try with empty missing_hashes (should fail) + let result = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &[], // Wrong! + ); + + assert!(result.is_err()); + } } diff --git a/lightning/src/offers/mod.rs b/lightning/src/offers/mod.rs index 5b5cf6cdc78..bbbf91a1f1c 100644 --- a/lightning/src/offers/mod.rs +++ b/lightning/src/offers/mod.rs @@ -25,6 +25,7 @@ pub mod merkle; pub mod nonce; pub mod parse; mod payer; +pub mod payer_proof; pub mod refund; pub(crate) mod signer; pub mod static_invoice; diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs new file mode 100644 index 00000000000..1570094c1c5 --- /dev/null +++ b/lightning/src/offers/payer_proof.rs @@ -0,0 +1,1070 @@ +// This file is Copyright its original authors, visible in version control +// history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license +// , at your option. +// You may not use this file except in accordance with one or both of these +// licenses. + +//! Payer proofs for BOLT 12 invoices. +//! +//! A [`PayerProof`] cryptographically proves that a BOLT 12 invoice was paid by demonstrating: +//! - Possession of the payment preimage (proving the payment occurred) +//! - A valid invoice signature over a merkle root (proving the invoice is authentic) +//! - The payer's signature (proving who authorized the payment) +//! +//! This implements the payer proof extension to BOLT 12 as specified in +//! . + +use alloc::collections::BTreeSet; + +use crate::io; +use crate::io::Read; +use crate::ln::channelmanager::PaymentId; +use crate::ln::inbound_payment::ExpandedKey; +use crate::offers::invoice::{Bolt12Invoice, SIGNATURE_TAG}; +use crate::offers::invoice_request::INVOICE_REQUEST_PAYER_ID_TYPE; +use crate::offers::merkle::{ + self, SelectiveDisclosure, SelectiveDisclosureError, TaggedHash, TlvStream, SIGNATURE_TYPES, +}; +use crate::offers::nonce::Nonce; +use crate::offers::parse::Bech32Encode; +use crate::types::features::Bolt12InvoiceFeatures; +use crate::types::payment::{PaymentHash, PaymentPreimage}; +use crate::util::ser::{BigSize, Readable, Writeable}; + +use bitcoin::hashes::{sha256, Hash, HashEngine}; +use bitcoin::secp256k1::schnorr::Signature; +use bitcoin::secp256k1::{Message, PublicKey, Secp256k1}; + +use core::convert::TryFrom; + +#[allow(unused_imports)] +use crate::prelude::*; + +const TLV_SIGNATURE: u64 = 240; +const TLV_PREIMAGE: u64 = 242; +const TLV_OMITTED_MARKERS: u64 = 244; +const TLV_MISSING_HASHES: u64 = 246; +const TLV_LEAF_HASHES: u64 = 248; +const TLV_PAYER_SIGNATURE: u64 = 250; + +const TLV_INVREQ_METADATA: u64 = 0; +// Note: Payer ID type (88) is imported as INVOICE_REQUEST_PAYER_ID_TYPE from invoice_request.rs +// TODO: Invoice TLV types (168, 174, 176) could potentially be exported from invoice.rs +const TLV_INVOICE_PAYMENT_HASH: u64 = 168; +const TLV_INVOICE_FEATURES: u64 = 174; +const TLV_ISSUER_SIGNING_PUBKEY: u64 = 176; + +/// Human-readable prefix for payer proofs in bech32 encoding. +pub const PAYER_PROOF_HRP: &str = "lnp"; + +/// Tag for payer signature computation per BOLT 12 signature calculation. +/// Format: "lightning" || messagename || fieldname +const PAYER_SIGNATURE_TAG: &str = concat!("lightning", "payer_proof", "payer_signature"); + +/// Error when building or verifying a payer proof. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PayerProofError { + /// The preimage doesn't match the invoice's payment hash. + PreimageMismatch, + /// Error during merkle tree operations. + MerkleError(SelectiveDisclosureError), + /// The invoice signature is invalid. + InvalidInvoiceSignature, + /// The payer signature is invalid. + InvalidPayerSignature, + /// Failed to re-derive the payer signing key from the provided nonce and payment ID. + KeyDerivationFailed, + /// Error during signing. + SigningError, + /// Missing required field in the proof. + MissingRequiredField(&'static str), + /// The proof contains invalid data. + InvalidData(&'static str), + /// The invreq_metadata field cannot be included (per spec). + InvreqMetadataNotAllowed, + /// The omitted_markers contains an included TLV type. + OmittedMarkersContainIncluded, + /// The omitted_markers has too many trailing markers. + TooManyTrailingOmittedMarkers, + /// Error decoding the payer proof. + DecodeError(crate::ln::msgs::DecodeError), +} + +impl From for PayerProofError { + fn from(e: SelectiveDisclosureError) -> Self { + PayerProofError::MerkleError(e) + } +} + +impl From for PayerProofError { + fn from(e: crate::ln::msgs::DecodeError) -> Self { + PayerProofError::DecodeError(e) + } +} + +/// A cryptographic proof that a BOLT 12 invoice was paid. +/// +/// Contains the payment preimage, selective disclosure of invoice fields, +/// the invoice signature, and a payer signature proving who paid. +#[derive(Clone, Debug)] +pub struct PayerProof { + bytes: Vec, + contents: PayerProofContents, + merkle_root: sha256::Hash, +} + +#[derive(Clone, Debug)] +struct PayerProofContents { + payer_id: PublicKey, + payment_hash: PaymentHash, + issuer_signing_pubkey: PublicKey, + preimage: PaymentPreimage, + invoice_signature: Signature, + payer_signature: Signature, + payer_note: Option, +} + +/// Builds a [`PayerProof`] from a paid invoice and its preimage. +/// +/// By default, only the required fields are included (payer_id, payment_hash, +/// issuer_signing_pubkey). Additional fields can be included for selective disclosure +/// using the `include_*` methods. +pub struct PayerProofBuilder<'a> { + invoice: &'a Bolt12Invoice, + preimage: PaymentPreimage, + included_types: BTreeSet, +} + +impl<'a> PayerProofBuilder<'a> { + /// Create a new builder from a paid invoice and its preimage. + /// + /// Returns an error if the preimage doesn't match the invoice's payment hash. + pub fn new( + invoice: &'a Bolt12Invoice, preimage: PaymentPreimage, + ) -> Result { + let computed_hash = sha256::Hash::hash(&preimage.0); + if computed_hash.as_byte_array() != &invoice.payment_hash().0 { + return Err(PayerProofError::PreimageMismatch); + } + + let mut included_types = BTreeSet::new(); + included_types.insert(INVOICE_REQUEST_PAYER_ID_TYPE); + included_types.insert(TLV_INVOICE_PAYMENT_HASH); + included_types.insert(TLV_ISSUER_SIGNING_PUBKEY); + + if invoice.invoice_features() != &Bolt12InvoiceFeatures::empty() { + included_types.insert(TLV_INVOICE_FEATURES); + } + + Ok(Self { invoice, preimage, included_types }) + } + + /// Check if a TLV type is allowed to be included in the payer proof. + /// + /// Per spec: MUST NOT include invreq_metadata (type 0). + fn is_type_allowed(tlv_type: u64) -> bool { + tlv_type != TLV_INVREQ_METADATA + } + + /// Include a specific TLV type in the proof. + /// + /// Returns an error if the type is not allowed (e.g., invreq_metadata). + pub fn include_type(mut self, tlv_type: u64) -> Result { + if !Self::is_type_allowed(tlv_type) { + return Err(PayerProofError::InvreqMetadataNotAllowed); + } + self.included_types.insert(tlv_type); + Ok(self) + } + + /// Include the offer description in the proof. + pub fn include_offer_description(mut self) -> Self { + self.included_types.insert(10); + self + } + + /// Include the offer issuer in the proof. + pub fn include_offer_issuer(mut self) -> Self { + self.included_types.insert(18); + self + } + + /// Include the invoice amount in the proof. + pub fn include_invoice_amount(mut self) -> Self { + self.included_types.insert(170); + self + } + + /// Include the invoice creation timestamp in the proof. + pub fn include_invoice_created_at(mut self) -> Self { + self.included_types.insert(164); + self + } + + /// Builds a signed [`PayerProof`] using the provided signing function. + /// + /// Use this when you have direct access to the payer's signing key. + pub fn build_and_sign( + self, sign_fn: F, note: Option<&str>, + ) -> Result + where + F: FnOnce(&Message) -> Result, + { + let unsigned = self.build_unsigned()?; + unsigned.sign(sign_fn, note) + } + + /// Builds a signed [`PayerProof`] using a key derived from an [`ExpandedKey`] and [`Nonce`]. + /// + /// This re-derives the payer signing key using the same derivation scheme as invoice requests + /// created with `deriving_signing_pubkey`. The `nonce` and `payment_id` must be the same ones + /// used when creating the original invoice request (available from the + /// [`OffersContext::OutboundPaymentForOffer`]). + /// + /// [`OffersContext::OutboundPaymentForOffer`]: crate::blinded_path::message::OffersContext::OutboundPaymentForOffer + pub fn build_and_sign_with_derived_key( + self, expanded_key: &ExpandedKey, nonce: Nonce, payment_id: PaymentId, note: Option<&str>, + ) -> Result { + let secp_ctx = Secp256k1::new(); + let keys = self + .invoice + .derive_signing_keys(payment_id, nonce, expanded_key, &secp_ctx) + .map_err(|_| PayerProofError::KeyDerivationFailed)?; + + let unsigned = self.build_unsigned()?; + unsigned.sign(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &keys)), note) + } + + fn build_unsigned(self) -> Result { + let mut invoice_bytes = Vec::new(); + self.invoice.write(&mut invoice_bytes).expect("Vec write should not fail"); + let mut bytes_without_sig = Vec::new(); + for r in TlvStream::new(&invoice_bytes).filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { + bytes_without_sig.extend_from_slice(r.record_bytes); + } + + let disclosure = + merkle::compute_selective_disclosure(&bytes_without_sig, &self.included_types)?; + + let included_records: Vec<(u64, Vec)> = TlvStream::new(&invoice_bytes) + .filter(|r| self.included_types.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes.to_vec())) + .collect(); + + let invoice_signature = self.invoice.signature(); + + Ok(UnsignedPayerProof { + invoice_signature, + preimage: self.preimage, + payer_id: self.invoice.payer_signing_pubkey(), + payment_hash: self.invoice.payment_hash().clone(), + issuer_signing_pubkey: self.invoice.signing_pubkey(), + included_records, + disclosure, + }) + } +} + +/// An unsigned [`PayerProof`] ready for signing. +struct UnsignedPayerProof { + invoice_signature: Signature, + preimage: PaymentPreimage, + payer_id: PublicKey, + payment_hash: PaymentHash, + issuer_signing_pubkey: PublicKey, + included_records: Vec<(u64, Vec)>, + disclosure: SelectiveDisclosure, +} + +impl UnsignedPayerProof { + fn sign(self, sign_fn: F, note: Option<&str>) -> Result + where + F: FnOnce(&Message) -> Result, + { + let message = Self::compute_payer_signature_message(note, &self.disclosure.merkle_root); + let payer_signature = sign_fn(&message).map_err(|_| PayerProofError::SigningError)?; + + let secp_ctx = Secp256k1::verification_only(); + secp_ctx + .verify_schnorr(&payer_signature, &message, &self.payer_id.into()) + .map_err(|_| PayerProofError::InvalidPayerSignature)?; + + let bytes = self.serialize_payer_proof(&payer_signature, note); + + Ok(PayerProof { + bytes, + contents: PayerProofContents { + payer_id: self.payer_id, + payment_hash: self.payment_hash, + issuer_signing_pubkey: self.issuer_signing_pubkey, + preimage: self.preimage, + invoice_signature: self.invoice_signature, + payer_signature, + payer_note: note.map(String::from), + }, + merkle_root: self.disclosure.merkle_root, + }) + } + + /// Compute the payer signature message per BOLT 12 signature calculation. + fn compute_payer_signature_message(note: Option<&str>, merkle_root: &sha256::Hash) -> Message { + let mut inner_hasher = sha256::Hash::engine(); + if let Some(n) = note { + inner_hasher.input(n.as_bytes()); + } + inner_hasher.input(merkle_root.as_ref()); + let inner_msg = sha256::Hash::from_engine(inner_hasher); + + let tag_hash = sha256::Hash::hash(PAYER_SIGNATURE_TAG.as_bytes()); + + let mut final_hasher = sha256::Hash::engine(); + final_hasher.input(tag_hash.as_ref()); + final_hasher.input(tag_hash.as_ref()); + final_hasher.input(inner_msg.as_ref()); + let final_digest = sha256::Hash::from_engine(final_hasher); + + Message::from_digest(*final_digest.as_byte_array()) + } + + fn serialize_payer_proof(&self, payer_signature: &Signature, note: Option<&str>) -> Vec { + let mut bytes = Vec::new(); + + for (_, record_bytes) in &self.included_records { + bytes.extend_from_slice(record_bytes); + } + + BigSize(TLV_SIGNATURE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(64).write(&mut bytes).expect("Vec write should not fail"); + self.invoice_signature.write(&mut bytes).expect("Vec write should not fail"); + + BigSize(TLV_PREIMAGE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(32).write(&mut bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(&self.preimage.0); + + if !self.disclosure.omitted_markers.is_empty() { + let omitted_len: u64 = self + .disclosure + .omitted_markers + .iter() + .map(|m| BigSize(*m).serialized_length() as u64) + .sum(); + BigSize(TLV_OMITTED_MARKERS).write(&mut bytes).expect("Vec write should not fail"); + BigSize(omitted_len).write(&mut bytes).expect("Vec write should not fail"); + for marker in &self.disclosure.omitted_markers { + BigSize(*marker).write(&mut bytes).expect("Vec write should not fail"); + } + } + + if !self.disclosure.missing_hashes.is_empty() { + let len = self.disclosure.missing_hashes.len() * 32; + BigSize(TLV_MISSING_HASHES).write(&mut bytes).expect("Vec write should not fail"); + BigSize(len as u64).write(&mut bytes).expect("Vec write should not fail"); + for hash in &self.disclosure.missing_hashes { + bytes.extend_from_slice(hash.as_ref()); + } + } + + if !self.disclosure.leaf_hashes.is_empty() { + let len = self.disclosure.leaf_hashes.len() * 32; + BigSize(TLV_LEAF_HASHES).write(&mut bytes).expect("Vec write should not fail"); + BigSize(len as u64).write(&mut bytes).expect("Vec write should not fail"); + for hash in &self.disclosure.leaf_hashes { + bytes.extend_from_slice(hash.as_ref()); + } + } + + let note_bytes = note.map(|n| n.as_bytes()).unwrap_or(&[]); + let payer_sig_len = 64 + note_bytes.len(); + BigSize(TLV_PAYER_SIGNATURE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(payer_sig_len as u64).write(&mut bytes).expect("Vec write should not fail"); + payer_signature.write(&mut bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(note_bytes); + + bytes + } +} + +impl PayerProof { + /// Verify the payer proof. + pub fn verify(&self) -> Result<(), PayerProofError> { + let computed = sha256::Hash::hash(&self.contents.preimage.0); + if computed.as_byte_array() != &self.contents.payment_hash.0 { + return Err(PayerProofError::PreimageMismatch); + } + + let tagged_hash = TaggedHash::from_merkle_root(SIGNATURE_TAG, self.merkle_root); + merkle::verify_signature( + &self.contents.invoice_signature, + &tagged_hash, + self.contents.issuer_signing_pubkey, + ) + .map_err(|_| PayerProofError::InvalidInvoiceSignature)?; + + let message = UnsignedPayerProof::compute_payer_signature_message( + self.contents.payer_note.as_deref(), + &self.merkle_root, + ); + + let secp_ctx = Secp256k1::verification_only(); + secp_ctx + .verify_schnorr( + &self.contents.payer_signature, + &message, + &self.contents.payer_id.into(), + ) + .map_err(|_| PayerProofError::InvalidPayerSignature)?; + + Ok(()) + } + + /// The payment preimage proving the invoice was paid. + pub fn preimage(&self) -> PaymentPreimage { + self.contents.preimage + } + + /// The payer's public key (who paid). + pub fn payer_id(&self) -> PublicKey { + self.contents.payer_id + } + + /// The issuer's signing public key (the key that signed the invoice). + pub fn issuer_signing_pubkey(&self) -> PublicKey { + self.contents.issuer_signing_pubkey + } + + /// The payment hash. + pub fn payment_hash(&self) -> PaymentHash { + self.contents.payment_hash + } + + /// The payer's note, if any. + pub fn payer_note(&self) -> Option<&str> { + self.contents.payer_note.as_deref() + } + + /// The merkle root of the original invoice. + pub fn merkle_root(&self) -> sha256::Hash { + self.merkle_root + } + + /// The raw bytes of the payer proof. + pub fn bytes(&self) -> &[u8] { + &self.bytes + } +} + +impl Bech32Encode for PayerProof { + const BECH32_HRP: &'static str = PAYER_PROOF_HRP; +} + +impl AsRef<[u8]> for PayerProof { + fn as_ref(&self) -> &[u8] { + &self.bytes + } +} + +// TODO: This uses manual TLV parsing rather than the standard `ParsedMessage` + +// `tlv_stream!` pattern (used by Offer, InvoiceRequest, Bolt12Invoice) because payer +// proofs have a hybrid structure: a dynamic set of included invoice TLV records +// (preserved as raw bytes for merkle reconstruction) plus payer-proof-specific TLVs +// (240-250) with non-standard encodings (BigSize lists, concatenated hashes). +// Possible improvements: separate parsing from semantic validation into two layers, +// extract helpers for repeated cursor+read patterns (read_tlv_value, read_hash_list). +impl TryFrom> for PayerProof { + type Error = crate::offers::parse::Bolt12ParseError; + + fn try_from(bytes: Vec) -> Result { + use crate::ln::msgs::DecodeError; + use crate::offers::parse::Bolt12ParseError; + + let mut payer_id: Option = None; + let mut payment_hash: Option = None; + let mut issuer_signing_pubkey: Option = None; + let mut invoice_signature: Option = None; + let mut preimage: Option = None; + let mut payer_signature: Option = None; + let mut payer_note: Option = None; + + let mut leaf_hashes: Vec = Vec::new(); + let mut omitted_markers: Vec = Vec::new(); + let mut missing_hashes: Vec = Vec::new(); + + let mut included_types: BTreeSet = BTreeSet::new(); + let mut included_records: Vec<(u64, usize, usize)> = Vec::new(); + + let mut prev_tlv_type: u64 = 0; + + for record in TlvStream::new(&bytes) { + let tlv_type = record.r#type; + + // Strict ascending order check covers both ordering and duplicates. + if tlv_type <= prev_tlv_type && prev_tlv_type != 0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + prev_tlv_type = tlv_type; + + match tlv_type { + INVOICE_REQUEST_PAYER_ID_TYPE => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let _len: BigSize = Readable::read(&mut record_cursor)?; + payer_id = Some(Readable::read(&mut record_cursor)?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + TLV_INVOICE_PAYMENT_HASH => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let _len: BigSize = Readable::read(&mut record_cursor)?; + payment_hash = Some(Readable::read(&mut record_cursor)?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + TLV_ISSUER_SIGNING_PUBKEY => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let _len: BigSize = Readable::read(&mut record_cursor)?; + issuer_signing_pubkey = Some(Readable::read(&mut record_cursor)?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + TLV_SIGNATURE => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let _len: BigSize = Readable::read(&mut record_cursor)?; + invoice_signature = Some(Readable::read(&mut record_cursor)?); + }, + TLV_PREIMAGE => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let _len: BigSize = Readable::read(&mut record_cursor)?; + let mut preimage_bytes = [0u8; 32]; + record_cursor + .read_exact(&mut preimage_bytes) + .map_err(|_| DecodeError::ShortRead)?; + preimage = Some(PaymentPreimage(preimage_bytes)); + }, + TLV_OMITTED_MARKERS => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let len: BigSize = Readable::read(&mut record_cursor)?; + let end_pos = record_cursor.position() + len.0; + while record_cursor.position() < end_pos { + let marker: BigSize = Readable::read(&mut record_cursor)?; + omitted_markers.push(marker.0); + } + }, + TLV_MISSING_HASHES => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let len: BigSize = Readable::read(&mut record_cursor)?; + if len.0 % 32 != 0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + let num_hashes = len.0 / 32; + for _ in 0..num_hashes { + let mut hash_bytes = [0u8; 32]; + record_cursor + .read_exact(&mut hash_bytes) + .map_err(|_| DecodeError::ShortRead)?; + missing_hashes.push(sha256::Hash::from_byte_array(hash_bytes)); + } + }, + TLV_LEAF_HASHES => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let len: BigSize = Readable::read(&mut record_cursor)?; + if len.0 % 32 != 0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + let num_hashes = len.0 / 32; + for _ in 0..num_hashes { + let mut hash_bytes = [0u8; 32]; + record_cursor + .read_exact(&mut hash_bytes) + .map_err(|_| DecodeError::ShortRead)?; + leaf_hashes.push(sha256::Hash::from_byte_array(hash_bytes)); + } + }, + TLV_PAYER_SIGNATURE => { + let mut record_cursor = io::Cursor::new(record.record_bytes); + let _type: BigSize = Readable::read(&mut record_cursor)?; + let len: BigSize = Readable::read(&mut record_cursor)?; + payer_signature = Some(Readable::read(&mut record_cursor)?); + let note_len = len.0.saturating_sub(64); + if note_len > 0 { + let mut note_bytes = vec![0u8; note_len as usize]; + record_cursor + .read_exact(&mut note_bytes) + .map_err(|_| DecodeError::ShortRead)?; + payer_note = Some( + String::from_utf8(note_bytes).map_err(|_| DecodeError::InvalidValue)?, + ); + } + }, + _ => { + if tlv_type == TLV_INVREQ_METADATA { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + if !SIGNATURE_TYPES.contains(&tlv_type) { + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + } + }, + } + } + + let payer_id = payer_id.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingPayerSigningPubkey, + ))?; + let payment_hash = payment_hash.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingPaymentHash, + ))?; + let issuer_signing_pubkey = + issuer_signing_pubkey.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSigningPubkey, + ))?; + let invoice_signature = invoice_signature.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSignature, + ))?; + let preimage = preimage.ok_or(Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + let payer_signature = payer_signature.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSignature, + ))?; + + validate_omitted_markers_for_parsing(&omitted_markers, &included_types) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + if leaf_hashes.len() != included_records.len() { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + + let included_refs: Vec<(u64, &[u8])> = + included_records.iter().map(|&(t, start, end)| (t, &bytes[start..end])).collect(); + let merkle_root = merkle::reconstruct_merkle_root( + &included_refs, + &leaf_hashes, + &omitted_markers, + &missing_hashes, + ) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + Ok(PayerProof { + bytes, + contents: PayerProofContents { + payer_id, + payment_hash, + issuer_signing_pubkey, + preimage, + invoice_signature, + payer_signature, + payer_note, + }, + merkle_root, + }) + } +} + +/// Validate omitted markers during parsing. +/// +/// Per spec: +/// - MUST NOT contain 0 +/// - MUST NOT contain signature TLV element numbers (240-1000) +/// - MUST be in strict ascending order +/// - MUST NOT contain the number of an included TLV field +/// - MUST NOT contain more than one number larger than the largest included non-signature TLV +/// - Markers MUST be minimized: each marker must be exactly prev_value + 1 within +/// a run, and the first marker after an included type X must be X + 1 +fn validate_omitted_markers_for_parsing( + omitted_markers: &[u64], included_types: &BTreeSet, +) -> Result<(), PayerProofError> { + let mut inc_iter = included_types.iter().copied().peekable(); + // After implicit TLV0 (marker 0), the first minimized marker would be 1 + let mut expected_next: u64 = 1; + let mut trailing_count = 0; + let max_included = included_types.iter().copied().max().unwrap_or(0); + let mut prev = 0u64; + + for &marker in omitted_markers { + // MUST NOT contain 0 + if marker == 0 { + return Err(PayerProofError::InvalidData("omitted_markers contains 0")); + } + + // MUST NOT contain signature TLV types + if SIGNATURE_TYPES.contains(&marker) { + return Err(PayerProofError::InvalidData("omitted_markers contains signature type")); + } + + // MUST be strictly ascending + if marker <= prev { + return Err(PayerProofError::InvalidData("omitted_markers not strictly ascending")); + } + + // MUST NOT contain included TLV types + if included_types.contains(&marker) { + return Err(PayerProofError::OmittedMarkersContainIncluded); + } + + // Validate minimization: marker must equal expected_next (continuation + // of current run), or there must be an included type X between the + // previous position and this marker such that X + 1 == marker. + if marker != expected_next { + let mut found = false; + for inc_type in inc_iter.by_ref() { + if inc_type + 1 == marker { + found = true; + break; + } + if inc_type >= marker { + return Err(PayerProofError::InvalidData("omitted_markers not minimized")); + } + } + if !found { + return Err(PayerProofError::InvalidData("omitted_markers not minimized")); + } + } + + expected_next = marker + 1; + + // Count markers larger than largest included + if marker > max_included { + trailing_count += 1; + } + + prev = marker; + } + + // MUST NOT contain more than one number larger than largest included + if trailing_count > 1 { + return Err(PayerProofError::TooManyTrailingOmittedMarkers); + } + + Ok(()) +} + +impl core::fmt::Display for PayerProof { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + self.fmt_bech32_str(f) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::offers::merkle::compute_selective_disclosure; + + #[test] + fn test_selective_disclosure_computation() { + // Test that the merkle selective disclosure works correctly + // Simple TLV stream with types 1, 2 + let tlv_bytes = vec![ + 0x01, 0x03, 0xe8, 0x03, 0xe8, // type 1, length 3, value + 0x02, 0x08, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0x00, 0x03, // type 2 + ]; + + let mut included = BTreeSet::new(); + included.insert(1); + + let result = compute_selective_disclosure(&tlv_bytes, &included); + assert!(result.is_ok()); + + let disclosure = result.unwrap(); + assert_eq!(disclosure.leaf_hashes.len(), 1); // One included TLV + assert!(!disclosure.missing_hashes.is_empty()); // Should have missing hashes for omitted + } + + /// Test the omitted_markers marker algorithm per BOLT 12 payer proof spec. + /// + /// From the spec example: + /// TLVs: 0 (omitted), 10 (included), 20 (omitted), 30 (omitted), + /// 40 (included), 50 (omitted), 60 (omitted), 240 (signature) + /// + /// Expected markers: [11, 12, 41, 42] + /// + /// The algorithm: + /// - TLV 0 is always omitted and implicit (not in markers) + /// - For omitted TLV after included: marker = prev_included_type + 1 + /// - For consecutive omitted TLVs: marker = prev_marker + 1 + #[test] + fn test_omitted_markers_spec_example() { + // Build a synthetic TLV stream matching the spec example + // TLV format: type (BigSize) || length (BigSize) || value + let mut tlv_bytes = Vec::new(); + + // TLV 0: type=0, len=4, value=dummy + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); + // TLV 10: type=10, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + // TLV 20: type=20, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); + // TLV 30: type=30, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); + // TLV 40: type=40, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); + // TLV 50: type=50, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); + // TLV 60: type=60, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); + + // Include types 10 and 40 + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Per spec example, omitted_markers should be [11, 12, 41, 42] + assert_eq!(disclosure.omitted_markers, vec![11, 12, 41, 42]); + + // leaf_hashes should have 2 entries (one for each included TLV) + assert_eq!(disclosure.leaf_hashes.len(), 2); + } + + /// Test that the marker algorithm handles edge cases correctly. + #[test] + fn test_omitted_markers_edge_cases() { + // Test with only one included TLV at the start + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + + let mut included = BTreeSet::new(); + included.insert(10); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // After included type 10, omitted types 20 and 30 get markers 11 and 12 + assert_eq!(disclosure.omitted_markers, vec![11, 12]); + } + + /// Test that all included TLVs produce no omitted markers (except implicit TLV0). + #[test] + fn test_omitted_markers_all_included() { + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 (always omitted) + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(20); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Only TLV 0 is omitted (implicit), so no markers needed + assert!(disclosure.omitted_markers.is_empty()); + } + + /// Test validation of omitted_markers - must not contain 0. + #[test] + fn test_validate_omitted_markers_rejects_zero() { + let omitted = vec![0, 11, 12]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::InvalidData(_)))); + } + + /// Test validation of omitted_markers - must not contain signature types. + #[test] + fn test_validate_omitted_markers_rejects_signature_types() { + // included=[10], markers=[1, 2, 250] — 250 is a signature type + let omitted = vec![1, 2, 250]; + let included: BTreeSet = [10].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::InvalidData(_)))); + } + + /// Test validation of omitted_markers - must be strictly ascending. + #[test] + fn test_validate_omitted_markers_rejects_non_ascending() { + // markers=[1, 11, 9]: 1 ok, 11 ok (after included 10), but 9 <= 11 fails ascending + let omitted = vec![1, 11, 9]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::InvalidData(_)))); + } + + /// Test validation of omitted_markers - must not contain included types. + #[test] + fn test_validate_omitted_markers_rejects_included_types() { + // included=[10, 30], markers=[1, 10] — 10 is in included set + let omitted = vec![1, 10]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::OmittedMarkersContainIncluded))); + } + + /// Test validation of omitted_markers - must not have too many trailing markers. + #[test] + fn test_validate_omitted_markers_rejects_too_many_trailing() { + // included=[10, 20], markers=[1, 21, 22] — both 21 and 22 > max included (20) + let omitted = vec![1, 21, 22]; + let included: BTreeSet = [10, 20].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::TooManyTrailingOmittedMarkers))); + } + + /// Test that valid minimized omitted_markers pass validation. + #[test] + fn test_validate_omitted_markers_accepts_valid() { + // Realistic payer proof: included types include required fields (88, 168, 176) + // so max_included=176 and markers are well below it. + // Layout: 0(omit), 10(incl), 20(omit), 30(omit), 40(incl), 50(omit), 88(incl), + // 168(incl), 176(incl) + // markers=[11, 12, 41, 89] + let omitted = vec![11, 12, 41, 89]; + let included: BTreeSet = [10, 40, 88, 168, 176].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test that non-minimized markers are rejected. + #[test] + fn test_validate_omitted_markers_rejects_non_minimized() { + // included=[10, 40], markers=[11, 15, 41, 42] + // marker 15 should be 12 (continuation of run after 11) + let omitted = vec![11, 15, 41, 42]; + let included: BTreeSet = [10, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::InvalidData(_)))); + } + + /// Test that non-minimized first marker in a run is rejected. + #[test] + fn test_validate_omitted_markers_rejects_non_minimized_run_start() { + // included=[10, 40], markers=[11, 12, 45, 46] + // marker 45 should be 41 (first omitted after included 40) + let omitted = vec![11, 12, 45, 46]; + let included: BTreeSet = [10, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(PayerProofError::InvalidData(_)))); + } + + /// Test minimized markers with omitted TLVs before any included type. + #[test] + fn test_validate_omitted_markers_accepts_leading_run() { + // included=[40], markers=[1, 2, 41] + // Two omitted before any included type, one after 40 + let omitted = vec![1, 2, 41]; + let included: BTreeSet = [40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test minimized markers with consecutive included types (no markers between them). + #[test] + fn test_validate_omitted_markers_accepts_consecutive_included() { + // included=[10, 20, 40], markers=[1, 41] + // One omitted before 10, no omitted between 10-20 or 20-40, one after 40 + let omitted = vec![1, 41]; + let included: BTreeSet = [10, 20, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test that invreq_metadata (type 0) cannot be explicitly included. + #[test] + fn test_invreq_metadata_not_allowed() { + assert!(!PayerProofBuilder::<'_>::is_type_allowed(TLV_INVREQ_METADATA)); + assert!(PayerProofBuilder::<'_>::is_type_allowed(INVOICE_REQUEST_PAYER_ID_TYPE)); + } + + /// Test that out-of-order TLVs are rejected during parsing. + #[test] + fn test_parsing_rejects_out_of_order_tlvs() { + use core::convert::TryFrom; + + // Create a malformed TLV stream with out-of-order types (20 before 10) + // TLV format: type (BigSize) || length (BigSize) || value + let mut bytes = Vec::new(); + // TLV type 20, length 2, value + bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); + // TLV type 10, length 2, value (OUT OF ORDER!) + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that duplicate TLVs are rejected during parsing. + #[test] + fn test_parsing_rejects_duplicate_tlvs() { + use core::convert::TryFrom; + + // Create a malformed TLV stream with duplicate type 10 + let mut bytes = Vec::new(); + // TLV type 10, length 2, value + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + // TLV type 10 again (DUPLICATE!) + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that invalid hash lengths (not multiple of 32) are rejected. + #[test] + fn test_parsing_rejects_invalid_hash_length() { + use core::convert::TryFrom; + + // Create a TLV stream with missing_hashes (type 246) that has invalid length + // BigSize encoding: values 0-252 are single byte, 253-65535 use 0xFD prefix + let mut bytes = Vec::new(); + // TLV type 246 (missing_hashes) - 246 < 253 so single byte + bytes.push(0xf6); // type 246 + bytes.push(0x21); // length 33 (not multiple of 32!) + bytes.extend_from_slice(&[0x00; 33]); // 33 bytes of zeros + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that invalid leaf_hashes length (not multiple of 32) is rejected. + #[test] + fn test_parsing_rejects_invalid_leaf_hashes_length() { + use core::convert::TryFrom; + + // Create a TLV stream with leaf_hashes (type 248) that has invalid length + // BigSize encoding: values 0-252 are single byte, 253-65535 use 0xFD prefix + let mut bytes = Vec::new(); + // TLV type 248 (leaf_hashes) - 248 < 253 so single byte + bytes.push(0xf8); // type 248 + bytes.push(0x1f); // length 31 (not multiple of 32!) + bytes.extend_from_slice(&[0x00; 31]); // 31 bytes of zeros + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } +} diff --git a/lightning/src/offers/signer.rs b/lightning/src/offers/signer.rs index e51a120b6d7..bc0442ba093 100644 --- a/lightning/src/offers/signer.rs +++ b/lightning/src/offers/signer.rs @@ -321,6 +321,38 @@ pub(super) fn derive_keys(nonce: Nonce, expanded_key: &ExpandedKey) -> Keypair { Keypair::from_secret_key(&secp_ctx, &privkey) } +/// Re-derives the payer signing keypair from the given components. +/// +/// This re-performs the same key derivation that occurs during invoice request creation with +/// [`InvoiceRequestBuilder::deriving_signing_pubkey`], allowing the payer to recover their +/// signing keypair for creating payer proofs. +/// +/// The `tlv_stream` must contain the offer and invoice request TLV records (excluding +/// payer metadata type 0 and payer_id type 88), matching what was used during +/// the original key derivation. +/// +/// [`InvoiceRequestBuilder::deriving_signing_pubkey`]: crate::offers::invoice_request::InvoiceRequestBuilder +pub(super) fn derive_payer_keys<'a, T: secp256k1::Signing>( + payment_id: PaymentId, nonce: Nonce, expanded_key: &ExpandedKey, iv_bytes: &[u8; IV_LEN], + signing_pubkey: PublicKey, tlv_stream: impl core::iter::Iterator>, + secp_ctx: &Secp256k1, +) -> Result { + let metadata = Metadata::payer_data(payment_id, nonce, expanded_key); + let metadata_ref = metadata.as_ref(); + + match verify_payer_metadata_inner( + metadata_ref, + expanded_key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + )? { + Some(keys) => Ok(keys), + None => Err(()), + } +} + /// Verifies data given in a TLV stream was used to produce the given metadata, consisting of: /// - a 256-bit [`PaymentId`], /// - a 128-bit [`Nonce`], and possibly @@ -339,6 +371,34 @@ pub(super) fn verify_payer_metadata<'a, T: secp256k1::Signing>( return Err(()); } + verify_payer_metadata_inner( + metadata, + expanded_key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + )?; + + let mut encrypted_payment_id = [0u8; PaymentId::LENGTH]; + encrypted_payment_id.copy_from_slice(&metadata[..PaymentId::LENGTH]); + let nonce = Nonce::try_from(&metadata[PaymentId::LENGTH..][..Nonce::LENGTH]).unwrap(); + let payment_id = expanded_key.crypt_for_offer(encrypted_payment_id, nonce); + + Ok(PaymentId(payment_id)) +} + +/// Shared core of [`verify_payer_metadata`] and [`derive_payer_keys`]. +/// +/// Builds the payer HMAC from the given metadata and TLV stream, then verifies it against the +/// `signing_pubkey`. The `metadata` must be at least `PaymentId::LENGTH` bytes, with the first +/// `PaymentId::LENGTH` bytes being the encrypted payment ID and the remainder being the nonce +/// (and possibly an HMAC). +fn verify_payer_metadata_inner<'a, T: secp256k1::Signing>( + metadata: &[u8], expanded_key: &ExpandedKey, iv_bytes: &[u8; IV_LEN], + signing_pubkey: PublicKey, tlv_stream: impl core::iter::Iterator>, + secp_ctx: &Secp256k1, +) -> Result, ()> { let mut encrypted_payment_id = [0u8; PaymentId::LENGTH]; encrypted_payment_id.copy_from_slice(&metadata[..PaymentId::LENGTH]); @@ -352,12 +412,7 @@ pub(super) fn verify_payer_metadata<'a, T: secp256k1::Signing>( Hmac::from_engine(hmac), signing_pubkey, secp_ctx, - )?; - - let nonce = Nonce::try_from(&metadata[PaymentId::LENGTH..][..Nonce::LENGTH]).unwrap(); - let payment_id = expanded_key.crypt_for_offer(encrypted_payment_id, nonce); - - Ok(PaymentId(payment_id)) + ) } /// Verifies data given in a TLV stream was used to produce the given metadata, consisting of: