From b098ebca90b37879afabdb1d3e47956a28aea53f Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Tue, 24 Feb 2026 18:43:09 +0100 Subject: [PATCH 1/9] refactor(offers): extract payer key derivation helpers Move the invoice/refund payer key derivation logic into reusable helpers so payer proofs can derive the same signing keys without duplicating the metadata and signer flow. --- lightning/src/offers/invoice.rs | 84 +++++++++++++++++++++++++++------ lightning/src/offers/signer.rs | 67 +++++++++++++++++++++++--- 2 files changed, 130 insertions(+), 21 deletions(-) diff --git a/lightning/src/offers/invoice.rs b/lightning/src/offers/invoice.rs index fd77595ca7d..481b84e5c70 100644 --- a/lightning/src/offers/invoice.rs +++ b/lightning/src/offers/invoice.rs @@ -131,7 +131,8 @@ use crate::offers::invoice_request::{ IV_BYTES as INVOICE_REQUEST_IV_BYTES, }; use crate::offers::merkle::{ - self, SignError, SignFn, SignatureTlvStream, SignatureTlvStreamRef, TaggedHash, TlvStream, + self, SignError, SignFn, SignatureTlvStream, SignatureTlvStreamRef, TaggedHash, TlvRecord, + TlvStream, }; use crate::offers::nonce::Nonce; use crate::offers::offer::{ @@ -1032,6 +1033,31 @@ impl Bolt12Invoice { ) } + /// Re-derives the payer's signing keypair for payer proof creation. + /// + /// This performs the same key derivation that occurs during invoice request creation + /// with `deriving_signing_pubkey`, allowing the payer to recover their signing keypair. + /// The `nonce` and `payment_id` must be the same ones used when creating the original + /// invoice request (available from [`OffersContext::OutboundPaymentForOffer`]). + /// + /// [`OffersContext::OutboundPaymentForOffer`]: crate::blinded_path::message::OffersContext::OutboundPaymentForOffer + pub(crate) fn derive_payer_signing_keys( + &self, payment_id: PaymentId, nonce: Nonce, key: &ExpandedKey, secp_ctx: &Secp256k1, + ) -> Result { + let iv_bytes = match &self.contents { + InvoiceContents::ForOffer { .. } => INVOICE_REQUEST_IV_BYTES, + InvoiceContents::ForRefund { .. } => REFUND_IV_BYTES_WITHOUT_METADATA, + }; + self.contents.derive_payer_signing_keys( + &self.bytes, + payment_id, + nonce, + key, + iv_bytes, + secp_ctx, + ) + } + pub(crate) fn as_tlv_stream(&self) -> FullInvoiceTlvStreamRef<'_> { let ( payer_tlv_stream, @@ -1317,20 +1343,8 @@ impl InvoiceContents { &self, bytes: &[u8], metadata: &Metadata, key: &ExpandedKey, iv_bytes: &[u8; IV_LEN], secp_ctx: &Secp256k1, ) -> Result { - const EXPERIMENTAL_TYPES: core::ops::Range = - EXPERIMENTAL_OFFER_TYPES.start..EXPERIMENTAL_INVOICE_REQUEST_TYPES.end; - - let offer_records = TlvStream::new(bytes).range(OFFER_TYPES); - let invreq_records = TlvStream::new(bytes).range(INVOICE_REQUEST_TYPES).filter(|record| { - match record.r#type { - PAYER_METADATA_TYPE => false, // Should be outside range - INVOICE_REQUEST_PAYER_ID_TYPE => !metadata.derives_payer_keys(), - _ => true, - } - }); - let experimental_records = TlvStream::new(bytes).range(EXPERIMENTAL_TYPES); - let tlv_stream = offer_records.chain(invreq_records).chain(experimental_records); - + let exclude_payer_id = metadata.derives_payer_keys(); + let tlv_stream = Self::payer_tlv_stream(bytes, exclude_payer_id); let signing_pubkey = self.payer_signing_pubkey(); signer::verify_payer_metadata( metadata.as_ref(), @@ -1342,6 +1356,46 @@ impl InvoiceContents { ) } + fn derive_payer_signing_keys( + &self, bytes: &[u8], payment_id: PaymentId, nonce: Nonce, key: &ExpandedKey, + iv_bytes: &[u8; IV_LEN], secp_ctx: &Secp256k1, + ) -> Result { + let tlv_stream = Self::payer_tlv_stream(bytes, true); + let signing_pubkey = self.payer_signing_pubkey(); + signer::derive_payer_keys( + payment_id, + nonce, + key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + ) + } + + /// Builds the TLV stream used for payer metadata verification and key derivation. + /// + /// When `exclude_payer_id` is true, the payer signing pubkey (type 88) is excluded + /// from the stream, which is needed when deriving payer keys. + fn payer_tlv_stream( + bytes: &[u8], exclude_payer_id: bool, + ) -> impl core::iter::Iterator> { + const EXPERIMENTAL_TYPES: core::ops::Range = + EXPERIMENTAL_OFFER_TYPES.start..EXPERIMENTAL_INVOICE_REQUEST_TYPES.end; + + let offer_records = TlvStream::new(bytes).range(OFFER_TYPES); + let invreq_records = + TlvStream::new(bytes).range(INVOICE_REQUEST_TYPES).filter(move |record| { + match record.r#type { + PAYER_METADATA_TYPE => false, + INVOICE_REQUEST_PAYER_ID_TYPE => !exclude_payer_id, + _ => true, + } + }); + let experimental_records = TlvStream::new(bytes).range(EXPERIMENTAL_TYPES); + offer_records.chain(invreq_records).chain(experimental_records) + } + fn as_tlv_stream(&self) -> PartialInvoiceTlvStreamRef<'_> { let (payer, offer, invoice_request, experimental_offer, experimental_invoice_request) = match self { diff --git a/lightning/src/offers/signer.rs b/lightning/src/offers/signer.rs index e51a120b6d7..bc0442ba093 100644 --- a/lightning/src/offers/signer.rs +++ b/lightning/src/offers/signer.rs @@ -321,6 +321,38 @@ pub(super) fn derive_keys(nonce: Nonce, expanded_key: &ExpandedKey) -> Keypair { Keypair::from_secret_key(&secp_ctx, &privkey) } +/// Re-derives the payer signing keypair from the given components. +/// +/// This re-performs the same key derivation that occurs during invoice request creation with +/// [`InvoiceRequestBuilder::deriving_signing_pubkey`], allowing the payer to recover their +/// signing keypair for creating payer proofs. +/// +/// The `tlv_stream` must contain the offer and invoice request TLV records (excluding +/// payer metadata type 0 and payer_id type 88), matching what was used during +/// the original key derivation. +/// +/// [`InvoiceRequestBuilder::deriving_signing_pubkey`]: crate::offers::invoice_request::InvoiceRequestBuilder +pub(super) fn derive_payer_keys<'a, T: secp256k1::Signing>( + payment_id: PaymentId, nonce: Nonce, expanded_key: &ExpandedKey, iv_bytes: &[u8; IV_LEN], + signing_pubkey: PublicKey, tlv_stream: impl core::iter::Iterator>, + secp_ctx: &Secp256k1, +) -> Result { + let metadata = Metadata::payer_data(payment_id, nonce, expanded_key); + let metadata_ref = metadata.as_ref(); + + match verify_payer_metadata_inner( + metadata_ref, + expanded_key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + )? { + Some(keys) => Ok(keys), + None => Err(()), + } +} + /// Verifies data given in a TLV stream was used to produce the given metadata, consisting of: /// - a 256-bit [`PaymentId`], /// - a 128-bit [`Nonce`], and possibly @@ -339,6 +371,34 @@ pub(super) fn verify_payer_metadata<'a, T: secp256k1::Signing>( return Err(()); } + verify_payer_metadata_inner( + metadata, + expanded_key, + iv_bytes, + signing_pubkey, + tlv_stream, + secp_ctx, + )?; + + let mut encrypted_payment_id = [0u8; PaymentId::LENGTH]; + encrypted_payment_id.copy_from_slice(&metadata[..PaymentId::LENGTH]); + let nonce = Nonce::try_from(&metadata[PaymentId::LENGTH..][..Nonce::LENGTH]).unwrap(); + let payment_id = expanded_key.crypt_for_offer(encrypted_payment_id, nonce); + + Ok(PaymentId(payment_id)) +} + +/// Shared core of [`verify_payer_metadata`] and [`derive_payer_keys`]. +/// +/// Builds the payer HMAC from the given metadata and TLV stream, then verifies it against the +/// `signing_pubkey`. The `metadata` must be at least `PaymentId::LENGTH` bytes, with the first +/// `PaymentId::LENGTH` bytes being the encrypted payment ID and the remainder being the nonce +/// (and possibly an HMAC). +fn verify_payer_metadata_inner<'a, T: secp256k1::Signing>( + metadata: &[u8], expanded_key: &ExpandedKey, iv_bytes: &[u8; IV_LEN], + signing_pubkey: PublicKey, tlv_stream: impl core::iter::Iterator>, + secp_ctx: &Secp256k1, +) -> Result, ()> { let mut encrypted_payment_id = [0u8; PaymentId::LENGTH]; encrypted_payment_id.copy_from_slice(&metadata[..PaymentId::LENGTH]); @@ -352,12 +412,7 @@ pub(super) fn verify_payer_metadata<'a, T: secp256k1::Signing>( Hmac::from_engine(hmac), signing_pubkey, secp_ctx, - )?; - - let nonce = Nonce::try_from(&metadata[PaymentId::LENGTH..][..Nonce::LENGTH]).unwrap(); - let payment_id = expanded_key.crypt_for_offer(encrypted_payment_id, nonce); - - Ok(PaymentId(payment_id)) + ) } /// Verifies data given in a TLV stream was used to produce the given metadata, consisting of: From 6393941ea8419c79d5d39b113e641354996e0e5f Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Tue, 24 Feb 2026 18:44:00 +0100 Subject: [PATCH 2/9] feat(offers): add BOLT 12 payer proof primitives Add the payer proof types, selective disclosure merkle support, parsing, and tests for constructing and validating BOLT 12 payer proofs from invoices. --- lightning/src/ln/offers_tests.rs | 241 +++++ lightning/src/offers/invoice.rs | 28 + lightning/src/offers/merkle.rs | 681 +++++++++++++- lightning/src/offers/mod.rs | 1 + lightning/src/offers/offer.rs | 6 + lightning/src/offers/payer_proof.rs | 1266 +++++++++++++++++++++++++++ 6 files changed, 2221 insertions(+), 2 deletions(-) create mode 100644 lightning/src/offers/payer_proof.rs diff --git a/lightning/src/ln/offers_tests.rs b/lightning/src/ln/offers_tests.rs index de08af5d276..d657c4e0ac4 100644 --- a/lightning/src/ln/offers_tests.rs +++ b/lightning/src/ln/offers_tests.rs @@ -61,6 +61,8 @@ use crate::offers::invoice_error::InvoiceError; use crate::offers::invoice_request::{InvoiceRequest, InvoiceRequestFields, InvoiceRequestVerifiedFromOffer}; use crate::offers::nonce::Nonce; use crate::offers::parse::Bolt12SemanticError; +use crate::offers::payer_proof::{PayerProof, PayerProofError}; +use crate::types::payment::PaymentPreimage; use crate::onion_message::messenger::{DefaultMessageRouter, Destination, MessageSendInstructions, NodeIdMessageRouter, NullMessageRouter, PeeledOnion, DUMMY_HOPS_PATH_LENGTH, QR_CODED_DUMMY_HOPS_PATH_LENGTH}; use crate::onion_message::offers::OffersMessage; use crate::routing::gossip::{NodeAlias, NodeId}; @@ -264,6 +266,21 @@ fn extract_offer_nonce<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, message: &OnionMessa } } +/// Extract the payer's nonce from an invoice onion message received by the payer. +/// +/// When the payer receives an invoice through their reply path, the blinded path context +/// contains the nonce originally used for deriving their payer signing key. This nonce is +/// needed to build a [`PayerProof`] using [`PayerProofBuilder::build_with_derived_key`]. +fn extract_payer_context<'a, 'b, 'c>(node: &Node<'a, 'b, 'c>, message: &OnionMessage) -> (PaymentId, Nonce) { + match node.onion_messenger.peel_onion_message(message) { + Ok(PeeledOnion::Offers(_, Some(OffersContext::OutboundPaymentForOffer { payment_id, nonce, .. }), _)) => (payment_id, nonce), + Ok(PeeledOnion::Offers(_, context, _)) => panic!("Expected OutboundPaymentForOffer context, got: {:?}", context), + Ok(PeeledOnion::Forward(_, _)) => panic!("Unexpected onion message forward"), + Ok(_) => panic!("Unexpected onion message"), + Err(e) => panic!("Failed to process onion message {:?}", e), + } +} + pub(super) fn extract_invoice_request<'a, 'b, 'c>( node: &Node<'a, 'b, 'c>, message: &OnionMessage ) -> (InvoiceRequest, BlindedMessagePath) { @@ -2667,3 +2684,227 @@ fn creates_and_pays_for_phantom_offer() { assert!(nodes[0].onion_messenger.next_onion_message_for_peer(node_c_id).is_none()); } } + +/// Tests the full payer proof lifecycle: offer -> invoice_request -> invoice -> payment -> +/// proof creation with derived key signing -> verification -> bech32 round-trip. +/// +/// This exercises the primary API path where a wallet pays a BOLT 12 offer and then creates +/// a payer proof using the derived signing key (same key derivation as the invoice request). +#[test] +fn creates_and_verifies_payer_proof_after_offer_payment() { + let chanmon_cfgs = create_chanmon_cfgs(2); + let node_cfgs = create_node_cfgs(2, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); + let nodes = create_network(2, &node_cfgs, &node_chanmgrs); + + create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 10_000_000, 1_000_000_000); + + let alice = &nodes[0]; // recipient (offer creator) + let alice_id = alice.node.get_our_node_id(); + let bob = &nodes[1]; // payer + let bob_id = bob.node.get_our_node_id(); + + // Alice creates an offer + let offer = alice.node + .create_offer_builder().unwrap() + .amount_msats(10_000_000) + .build().unwrap(); + + // Bob initiates payment + let payment_id = PaymentId([1; 32]); + bob.node.pay_for_offer(&offer, None, payment_id, Default::default()).unwrap(); + expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id); + + // Bob sends invoice request to Alice + let onion_message = bob.onion_messenger.next_onion_message_for_peer(alice_id).unwrap(); + alice.onion_messenger.handle_onion_message(bob_id, &onion_message); + + let (invoice_request, _) = extract_invoice_request(alice, &onion_message); + + // Alice sends invoice back to Bob + let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap(); + bob.onion_messenger.handle_onion_message(alice_id, &onion_message); + + let (invoice, _) = extract_invoice(bob, &onion_message); + assert_eq!(invoice.amount_msats(), 10_000_000); + + // Extract the payer nonce and payment_id from Bob's reply path context. In a real wallet, + // these would be persisted alongside the payment for later payer proof creation. + let (context_payment_id, payer_nonce) = extract_payer_context(bob, &onion_message); + assert_eq!(context_payment_id, payment_id); + + // Route the payment + route_bolt12_payment(bob, &[alice], &invoice); + expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id); + + // Get the payment preimage from Alice's PaymentClaimable event and claim it. + // In a real wallet, the payer receives the preimage via Event::PaymentSent after the + // recipient claims. For the test, we extract it from the recipient's claimable event. + let payment_preimage = match get_event!(alice, Event::PaymentClaimable) { + Event::PaymentClaimable { purpose, .. } => { + match &purpose { + PaymentPurpose::Bolt12OfferPayment { payment_context, .. } => { + assert_eq!(payment_context.offer_id, offer.id()); + assert_eq!( + payment_context.invoice_request.payer_signing_pubkey, + invoice_request.payer_signing_pubkey(), + ); + }, + _ => panic!("Expected Bolt12OfferPayment purpose"), + } + purpose.preimage().unwrap() + }, + _ => panic!("Expected Event::PaymentClaimable"), + }; + + claim_payment(bob, &[alice], payment_preimage); + expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id); + + // --- Payer Proof Creation --- + // Bob (the payer) creates a proof-of-payment with selective disclosure. + // He includes the offer description and invoice amount, but omits other fields for privacy. + let expanded_key = bob.keys_manager.get_expanded_key(); + let proof = invoice.payer_proof_builder(payment_preimage).unwrap() + .include_offer_description() + .include_invoice_amount() + .include_invoice_created_at() + .build_with_derived_key(&expanded_key, payer_nonce, payment_id, None) + .unwrap(); + + // Check proof contents match the original payment + assert_eq!(proof.preimage(), payment_preimage); + assert_eq!(proof.payment_hash(), invoice.payment_hash()); + assert_eq!(proof.payer_id(), invoice.payer_signing_pubkey()); + assert_eq!(proof.issuer_signing_pubkey(), invoice.signing_pubkey()); + assert!(proof.payer_note().is_none()); + + // --- Serialization Round-Trip --- + // The proof can be serialized to a bech32 string (lnp...) for sharing. + let encoded = proof.to_string(); + assert!(encoded.starts_with("lnp1")); + + // Round-trip through TLV bytes: re-parse the raw bytes (verification happens at parse time). + let decoded = PayerProof::try_from(proof.bytes().to_vec()).unwrap(); + assert_eq!(decoded.preimage(), proof.preimage()); + assert_eq!(decoded.payment_hash(), proof.payment_hash()); + assert_eq!(decoded.payer_id(), proof.payer_id()); + assert_eq!(decoded.issuer_signing_pubkey(), proof.issuer_signing_pubkey()); + assert_eq!(decoded.merkle_root(), proof.merkle_root()); +} + +/// Tests payer proof creation with a payer note, selective disclosure of specific invoice +/// fields, and error cases. Verifies that: +/// - A wrong preimage is rejected +/// - A minimal proof (required fields only) works +/// - Selective disclosure with a payer note works +/// - The proof survives a bech32 round-trip with the note intact +#[test] +fn creates_payer_proof_with_note_and_selective_disclosure() { + let chanmon_cfgs = create_chanmon_cfgs(2); + let node_cfgs = create_node_cfgs(2, &chanmon_cfgs); + let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]); + let nodes = create_network(2, &node_cfgs, &node_chanmgrs); + + create_announced_chan_between_nodes_with_value(&nodes, 0, 1, 10_000_000, 1_000_000_000); + + let alice = &nodes[0]; + let alice_id = alice.node.get_our_node_id(); + let bob = &nodes[1]; + let bob_id = bob.node.get_our_node_id(); + + // Alice creates an offer with a description + let offer = alice.node + .create_offer_builder().unwrap() + .amount_msats(5_000_000) + .description("Coffee beans - 1kg".into()) + .build().unwrap(); + + // Bob pays for the offer + let payment_id = PaymentId([2; 32]); + bob.node.pay_for_offer(&offer, None, payment_id, Default::default()).unwrap(); + expect_recent_payment!(bob, RecentPaymentDetails::AwaitingInvoice, payment_id); + + // Exchange messages + let onion_message = bob.onion_messenger.next_onion_message_for_peer(alice_id).unwrap(); + alice.onion_messenger.handle_onion_message(bob_id, &onion_message); + let (invoice_request, _) = extract_invoice_request(alice, &onion_message); + + let onion_message = alice.onion_messenger.next_onion_message_for_peer(bob_id).unwrap(); + bob.onion_messenger.handle_onion_message(alice_id, &onion_message); + + let (invoice, _) = extract_invoice(bob, &onion_message); + let (context_payment_id, payer_nonce) = extract_payer_context(bob, &onion_message); + assert_eq!(context_payment_id, payment_id); + + // Route and claim the payment, extracting the preimage + route_bolt12_payment(bob, &[alice], &invoice); + expect_recent_payment!(bob, RecentPaymentDetails::Pending, payment_id); + + let payment_preimage = match get_event!(alice, Event::PaymentClaimable) { + Event::PaymentClaimable { purpose, .. } => { + match &purpose { + PaymentPurpose::Bolt12OfferPayment { payment_context, .. } => { + assert_eq!(payment_context.offer_id, offer.id()); + assert_eq!( + payment_context.invoice_request.payer_signing_pubkey, + invoice_request.payer_signing_pubkey(), + ); + }, + _ => panic!("Expected Bolt12OfferPayment purpose"), + } + purpose.preimage().unwrap() + }, + _ => panic!("Expected Event::PaymentClaimable"), + }; + + claim_payment(bob, &[alice], payment_preimage); + expect_recent_payment!(bob, RecentPaymentDetails::Fulfilled, payment_id); + + // --- Test 1: Wrong preimage is rejected --- + let wrong_preimage = PaymentPreimage([0xDE; 32]); + assert!(invoice.payer_proof_builder(wrong_preimage).is_err()); + + // --- Test 2: Wrong payment_id causes key derivation failure --- + let expanded_key = bob.keys_manager.get_expanded_key(); + let wrong_payment_id = PaymentId([0xFF; 32]); + let result = invoice.payer_proof_builder(payment_preimage).unwrap() + .build_with_derived_key(&expanded_key, payer_nonce, wrong_payment_id, None); + assert!(matches!(result, Err(PayerProofError::KeyDerivationFailed))); + + // --- Test 3: Wrong nonce causes key derivation failure --- + let wrong_nonce = Nonce::from_entropy_source(&chanmon_cfgs[0].keys_manager); + let result = invoice.payer_proof_builder(payment_preimage).unwrap() + .build_with_derived_key(&expanded_key, wrong_nonce, payment_id, None); + assert!(matches!(result, Err(PayerProofError::KeyDerivationFailed))); + + // --- Test 4: Minimal proof (only required fields) --- + let minimal_proof = invoice.payer_proof_builder(payment_preimage).unwrap() + .build_with_derived_key(&expanded_key, payer_nonce, payment_id, None) + .unwrap(); + // --- Test 5: Proof with selective disclosure and payer note --- + let proof_with_note = invoice.payer_proof_builder(payment_preimage).unwrap() + .include_offer_description() + .include_offer_issuer() + .include_invoice_amount() + .include_invoice_created_at() + .build_with_derived_key(&expanded_key, payer_nonce, payment_id, Some("Paid for coffee")) + .unwrap(); + assert_eq!(proof_with_note.payer_note().map(|p| p.0), Some("Paid for coffee")); + + // Both proofs should verify and have the same core fields + assert_eq!(minimal_proof.preimage(), proof_with_note.preimage()); + assert_eq!(minimal_proof.payment_hash(), proof_with_note.payment_hash()); + assert_eq!(minimal_proof.payer_id(), proof_with_note.payer_id()); + assert_eq!(minimal_proof.issuer_signing_pubkey(), proof_with_note.issuer_signing_pubkey()); + + // The merkle roots are the same since both reconstruct from the same invoice + assert_eq!(minimal_proof.merkle_root(), proof_with_note.merkle_root()); + + // --- Test 6: Round-trip the proof with note through TLV bytes --- + let encoded = proof_with_note.to_string(); + assert!(encoded.starts_with("lnp1")); + + let decoded = PayerProof::try_from(proof_with_note.bytes().to_vec()).unwrap(); + assert_eq!(decoded.payer_note().map(|p| p.0), Some("Paid for coffee")); + assert_eq!(decoded.preimage(), payment_preimage); +} diff --git a/lightning/src/offers/invoice.rs b/lightning/src/offers/invoice.rs index 481b84e5c70..83252070098 100644 --- a/lightning/src/offers/invoice.rs +++ b/lightning/src/offers/invoice.rs @@ -141,6 +141,7 @@ use crate::offers::offer::{ }; use crate::offers::parse::{Bolt12ParseError, Bolt12SemanticError, ParsedMessage}; use crate::offers::payer::{PayerTlvStream, PayerTlvStreamRef, PAYER_METADATA_TYPE}; +use crate::offers::payer_proof::{PayerProofBuilder, PayerProofError}; use crate::offers::refund::{ Refund, RefundContents, IV_BYTES_WITHOUT_METADATA as REFUND_IV_BYTES_WITHOUT_METADATA, IV_BYTES_WITH_METADATA as REFUND_IV_BYTES_WITH_METADATA, @@ -148,6 +149,7 @@ use crate::offers::refund::{ use crate::offers::signer::{self, Metadata}; use crate::types::features::{Bolt12InvoiceFeatures, InvoiceRequestFeatures, OfferFeatures}; use crate::types::payment::PaymentHash; +use crate::types::payment::PaymentPreimage; use crate::types::string::PrintableString; use crate::util::ser::{ CursorReadable, HighZeroBytesDroppedBigSize, Iterable, LengthLimitedRead, LengthReadable, @@ -1033,6 +1035,17 @@ impl Bolt12Invoice { ) } + /// Creates a [`PayerProofBuilder`] for this invoice using the given payment preimage. + /// + /// Returns an error if the preimage doesn't match the invoice's payment hash. + /// + /// [`PayerProofBuilder`]: crate::offers::payer_proof::PayerProofBuilder + pub fn payer_proof_builder( + &self, preimage: PaymentPreimage, + ) -> Result, PayerProofError> { + PayerProofBuilder::new(self, preimage) + } + /// Re-derives the payer's signing keypair for payer proof creation. /// /// This performs the same key derivation that occurs during invoice request creation @@ -1554,6 +1567,21 @@ impl TryFrom> for Bolt12Invoice { /// Valid type range for invoice TLV records. pub(super) const INVOICE_TYPES: core::ops::Range = 160..240; +/// TLV record type for the invoice creation timestamp. +pub(super) const INVOICE_CREATED_AT_TYPE: u64 = 164; + +/// TLV record type for [`Bolt12Invoice::payment_hash`]. +pub(super) const INVOICE_PAYMENT_HASH_TYPE: u64 = 168; + +/// TLV record type for [`Bolt12Invoice::amount_msats`]. +pub(super) const INVOICE_AMOUNT_TYPE: u64 = 170; + +/// TLV record type for [`Bolt12Invoice::invoice_features`]. +pub(super) const INVOICE_FEATURES_TYPE: u64 = 174; + +/// TLV record type for [`Bolt12Invoice::signing_pubkey`]. +pub(super) const INVOICE_NODE_ID_TYPE: u64 = 176; + tlv_stream!(InvoiceTlvStream, InvoiceTlvStreamRef<'a>, INVOICE_TYPES, { (160, paths: (Vec, WithoutLength, Iterable<'a, BlindedPathIter<'a>, BlindedPath>)), (162, blindedpay: (Vec, WithoutLength, Iterable<'a, BlindedPayInfoIter<'a>, BlindedPayInfo>)), diff --git a/lightning/src/offers/merkle.rs b/lightning/src/offers/merkle.rs index 1a38fe5441f..76170feb042 100644 --- a/lightning/src/offers/merkle.rs +++ b/lightning/src/offers/merkle.rs @@ -73,6 +73,13 @@ impl TaggedHash { self.merkle_root } + /// Creates a tagged hash from a pre-computed merkle root. + pub(super) fn from_merkle_root(tag: &'static str, merkle_root: sha256::Hash) -> Self { + let tag_hash = sha256::Hash::hash(tag.as_bytes()); + let digest = Message::from_digest(tagged_hash(tag_hash, merkle_root).to_byte_array()); + Self { tag, merkle_root, digest } + } + pub(super) fn to_bytes(&self) -> [u8; 32] { *self.digest.as_ref() } @@ -243,9 +250,23 @@ pub(super) struct TlvRecord<'a> { type_bytes: &'a [u8], // The entire TLV record. pub(super) record_bytes: &'a [u8], + // The value portion of the TLV record (after type and length). + pub(super) value_bytes: &'a [u8], pub(super) end: usize, } +impl<'a> TlvRecord<'a> { + /// Read a value from this TLV record's value bytes using [`Readable`]. + pub(super) fn read_value(&self) -> Result { + let mut cursor = io::Cursor::new(self.value_bytes); + let value = Readable::read(&mut cursor)?; + if cursor.position() as usize != self.value_bytes.len() { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + Ok(value) + } +} + impl<'a> Iterator for TlvStream<'a> { type Item = TlvRecord<'a>; @@ -261,12 +282,12 @@ impl<'a> Iterator for TlvStream<'a> { let offset = self.data.position(); let end = offset + length; - let _value = &self.data.get_ref()[offset as usize..end as usize]; let record_bytes = &self.data.get_ref()[start as usize..end as usize]; + let value_bytes = &self.data.get_ref()[offset as usize..end as usize]; self.data.set_position(end); - Some(TlvRecord { r#type, type_bytes, record_bytes, end: end as usize }) + Some(TlvRecord { r#type, type_bytes, record_bytes, value_bytes, end: end as usize }) } else { None } @@ -280,6 +301,442 @@ impl<'a> Writeable for TlvRecord<'a> { } } +// ============================================================================ +// Selective Disclosure for Payer Proofs (BOLT 12 extension) +// ============================================================================ + +use alloc::collections::BTreeSet; + +/// Error during selective disclosure operations. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum SelectiveDisclosureError { + /// The omitted markers are not in strict ascending order. + InvalidOmittedMarkersOrder, + /// The omitted markers contain an invalid marker (0 or signature type). + InvalidOmittedMarkersMarker, + /// The leaf_hashes count doesn't match included TLVs. + LeafHashCountMismatch, + /// Insufficient missing_hashes to reconstruct the tree. + InsufficientMissingHashes, + /// The TLV stream is empty. + EmptyTlvStream, +} + +/// Data needed to reconstruct a merkle root with selective disclosure. +/// +/// This is used in payer proofs to allow verification of an invoice signature +/// without revealing all invoice fields. +#[derive(Clone, Debug, PartialEq)] +pub(super) struct SelectiveDisclosure { + /// Nonce hashes for included TLVs (in TLV type order). + pub(super) leaf_hashes: Vec, + /// Marker numbers for omitted TLVs (excluding implicit TLV0). + pub(super) omitted_markers: Vec, + /// Minimal merkle hashes for omitted subtrees. + pub(super) missing_hashes: Vec, + /// The complete merkle root. + pub(super) merkle_root: sha256::Hash, +} + +/// Internal data for each TLV during tree construction. +struct TlvMerkleData { + tlv_type: u64, + per_tlv_hash: sha256::Hash, + is_included: bool, +} + +/// Compute selective disclosure data from a TLV stream. +/// +/// This builds the full merkle tree and extracts the data needed for a payer proof: +/// - `leaf_hashes`: nonce hashes for included TLVs +/// - `omitted_markers`: marker numbers for omitted TLVs +/// - `missing_hashes`: minimal merkle hashes for omitted subtrees +/// +/// # Arguments +/// * `tlv_bytes` - Complete TLV stream (e.g., invoice bytes without signature) +/// * `included_types` - Set of TLV types to include in the disclosure +pub(super) fn compute_selective_disclosure( + tlv_bytes: &[u8], included_types: &BTreeSet, +) -> Result { + let mut tlv_stream = TlvStream::new(tlv_bytes).peekable(); + let first_record = tlv_stream.peek().ok_or(SelectiveDisclosureError::EmptyTlvStream)?; + let nonce_tag_hash = sha256::Hash::from_engine({ + let mut engine = sha256::Hash::engine(); + engine.input("LnNonce".as_bytes()); + engine.input(first_record.record_bytes); + engine + }); + + let leaf_tag = tagged_hash_engine(sha256::Hash::hash("LnLeaf".as_bytes())); + let nonce_tag = tagged_hash_engine(nonce_tag_hash); + let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes())); + + let mut tlv_data: Vec = Vec::new(); + let mut leaf_hashes: Vec = Vec::new(); + for record in tlv_stream.filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record.record_bytes); + let nonce_hash = tagged_hash_from_engine(nonce_tag.clone(), record.type_bytes); + let per_tlv_hash = + tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + + let is_included = included_types.contains(&record.r#type); + if is_included { + leaf_hashes.push(nonce_hash); + } + tlv_data.push(TlvMerkleData { tlv_type: record.r#type, per_tlv_hash, is_included }); + } + + if tlv_data.is_empty() { + return Err(SelectiveDisclosureError::EmptyTlvStream); + } + let omitted_markers = compute_omitted_markers(&tlv_data); + let (merkle_root, missing_hashes) = build_tree_with_disclosure(&tlv_data, &branch_tag); + + Ok(SelectiveDisclosure { leaf_hashes, omitted_markers, missing_hashes, merkle_root }) +} + +/// Compute omitted markers per BOLT 12 payer proof spec. +/// +/// Each omitted TLV gets a marker equal to `prev_value + 1`, where `prev_value` +/// tracks the last included type or last marker. TLV type 0 is implicitly +/// omitted (never included in markers). +fn compute_omitted_markers(tlv_data: &[TlvMerkleData]) -> Vec { + let mut markers = Vec::new(); + let mut prev_value: u64 = 0; + + for data in tlv_data { + if data.tlv_type == 0 { + continue; + } + + if data.is_included { + prev_value = data.tlv_type; + } else { + let marker = prev_value + 1; + markers.push(marker); + prev_value = marker; + } + } + + markers +} + +/// A node in the merkle tree during selective disclosure processing. +struct TreeNode { + hash: Option, + included: bool, + min_type: u64, +} + +/// Build merkle tree and collect missing_hashes for omitted subtrees. +/// +/// Returns hashes sorted by ascending TLV type as required by the spec. For internal +/// nodes, the type used for ordering is the minimum TLV type in that subtree. +/// +/// Uses `n` tree nodes (one per TLV) rather than `2n`, since the per-TLV hashes +/// already combine leaf and nonce. The tree traversal starts at level 0 to pair +/// adjacent per-TLV hashes, matching the structure of `root_hash()`. +fn build_tree_with_disclosure( + tlv_data: &[TlvMerkleData], branch_tag: &sha256::HashEngine, +) -> (sha256::Hash, Vec) { + let num_nodes = tlv_data.len(); + debug_assert!(num_nodes > 0, "TLV stream must contain at least one record"); + + let num_omitted = tlv_data.iter().filter(|d| !d.is_included).count(); + + let mut nodes: Vec = tlv_data + .iter() + .map(|data| TreeNode { + hash: Some(data.per_tlv_hash), + included: data.is_included, + min_type: data.tlv_type, + }) + .collect(); + + let mut missing_with_types: Vec<(u64, sha256::Hash)> = Vec::with_capacity(num_omitted); + + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for (left_pos, right_pos) in + (0..num_nodes).step_by(step).zip((offset..num_nodes).step_by(step)) + { + let left_hash = nodes[left_pos].hash; + let right_hash = nodes[right_pos].hash; + let left_incl = nodes[left_pos].included; + let right_incl = nodes[right_pos].included; + let right_min_type = nodes[right_pos].min_type; + + match (left_hash, right_hash) { + (Some(l), Some(r)) => { + if left_incl != right_incl { + let (missing_type, missing_hash) = if right_incl { + (nodes[left_pos].min_type, l) + } else { + (right_min_type, r) + }; + missing_with_types.push((missing_type, missing_hash)); + } + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + nodes[left_pos].included |= left_incl || right_incl; + nodes[left_pos].min_type = + core::cmp::min(nodes[left_pos].min_type, right_min_type); + }, + (Some(_), None) => {}, + _ => unreachable!("Invalid state in merkle tree construction"), + } + } + } + + missing_with_types.sort_by_key(|(min_type, _)| *min_type); + let missing_hashes: Vec = + missing_with_types.into_iter().map(|(_, h)| h).collect(); + + (nodes[0].hash.expect("Tree should have a root"), missing_hashes) +} + +/// Reconstruct merkle root from selective disclosure data. +/// +/// The `missing_hashes` must be in ascending type order per spec. +/// +/// Uses `n` tree nodes (one per TLV position) rather than `2n`, since per-TLV +/// hashes already combine leaf and nonce. Two passes over the tree determine +/// where missing hashes are needed and then combine all hashes to the root. +pub(super) fn reconstruct_merkle_root<'a>( + included_records: &[(u64, &'a [u8])], leaf_hashes: &[sha256::Hash], omitted_markers: &[u64], + missing_hashes: &[sha256::Hash], +) -> Result { + // Callers are expected to validate omitted_markers before calling this function + // (e.g., via validate_omitted_markers_for_parsing). Debug-assert for safety. + debug_assert!(validate_omitted_markers(omitted_markers).is_ok()); + + if included_records.len() != leaf_hashes.len() { + return Err(SelectiveDisclosureError::LeafHashCountMismatch); + } + + let leaf_tag = tagged_hash_engine(sha256::Hash::hash("LnLeaf".as_bytes())); + let branch_tag = tagged_hash_engine(sha256::Hash::hash("LnBranch".as_bytes())); + + // Build TreeNode vec directly by interleaving included/omitted positions, + // eliminating the intermediate Vec from reconstruct_positions_from_records. + let num_nodes = 1 + included_records.len() + omitted_markers.len(); + let mut nodes: Vec = Vec::with_capacity(num_nodes); + + // TLV0 is always omitted + nodes.push(TreeNode { hash: None, included: false, min_type: 0 }); + + let mut inc_idx = 0; + let mut mrk_idx = 0; + let mut prev_marker: u64 = 0; + let mut node_idx: u64 = 1; + + while inc_idx < included_records.len() || mrk_idx < omitted_markers.len() { + if mrk_idx >= omitted_markers.len() { + // No more markers, remaining positions are included + let (_, record_bytes) = included_records[inc_idx]; + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes); + let nonce_hash = leaf_hashes[inc_idx]; + let hash = tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx }); + inc_idx += 1; + } else if inc_idx >= included_records.len() { + // No more included types, remaining positions are omitted + nodes.push(TreeNode { hash: None, included: false, min_type: node_idx }); + prev_marker = omitted_markers[mrk_idx]; + mrk_idx += 1; + } else { + let marker = omitted_markers[mrk_idx]; + let (inc_type, _) = included_records[inc_idx]; + + if marker == prev_marker + 1 { + // Continuation of current run -> omitted position + nodes.push(TreeNode { hash: None, included: false, min_type: node_idx }); + prev_marker = marker; + mrk_idx += 1; + } else { + // Jump detected -> included position comes first + let (_, record_bytes) = included_records[inc_idx]; + let leaf_hash = tagged_hash_from_engine(leaf_tag.clone(), record_bytes); + let nonce_hash = leaf_hashes[inc_idx]; + let hash = + tagged_branch_hash_from_engine(branch_tag.clone(), leaf_hash, nonce_hash); + nodes.push(TreeNode { hash: Some(hash), included: true, min_type: node_idx }); + prev_marker = inc_type; + inc_idx += 1; + } + } + node_idx += 1; + } + + // First pass: walk the tree to discover which positions need missing hashes. + // We mutate nodes[].included and nodes[].min_type directly since the second + // pass only reads nodes[].hash, making this safe without a separate allocation. + let num_omitted = omitted_markers.len() + 1; // +1 for implicit TLV0 + let mut needs_hash: Vec<(u64, usize)> = Vec::with_capacity(num_omitted); + + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for left_pos in (0..num_nodes).step_by(step) { + let right_pos = left_pos + offset; + if right_pos >= num_nodes { + continue; + } + + let r_min = nodes[right_pos].min_type; + + match (nodes[left_pos].included, nodes[right_pos].included) { + (true, false) => { + needs_hash.push((r_min, right_pos)); + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (false, true) => { + needs_hash.push((nodes[left_pos].min_type, left_pos)); + nodes[left_pos].included = true; + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (true, true) => { + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + (false, false) => { + nodes[left_pos].min_type = core::cmp::min(nodes[left_pos].min_type, r_min); + }, + } + } + } + + needs_hash.sort_by_key(|(min_pos, _)| *min_pos); + + if needs_hash.len() != missing_hashes.len() { + return Err(SelectiveDisclosureError::InsufficientMissingHashes); + } + + // Place missing hashes directly into the nodes array. + for (i, &(_, tree_pos)) in needs_hash.iter().enumerate() { + nodes[tree_pos].hash = Some(missing_hashes[i]); + } + + // Second pass: combine hashes up the tree. + for level in 0.. { + let step = 2 << level; + let offset = step / 2; + if offset >= num_nodes { + break; + } + + for left_pos in (0..num_nodes).step_by(step) { + let right_pos = left_pos + offset; + if right_pos >= num_nodes { + continue; + } + + match (nodes[left_pos].hash, nodes[right_pos].hash) { + (Some(l), Some(r)) => { + nodes[left_pos].hash = + Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); + }, + (Some(_), None) => {}, + (None, _) => { + return Err(SelectiveDisclosureError::InsufficientMissingHashes); + }, + }; + } + } + + nodes[0].hash.ok_or(SelectiveDisclosureError::InsufficientMissingHashes) +} + +fn validate_omitted_markers(markers: &[u64]) -> Result<(), SelectiveDisclosureError> { + let mut prev = 0u64; + for &marker in markers { + if marker == 0 { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersMarker); + } + if SIGNATURE_TYPES.contains(&marker) { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersMarker); + } + if marker <= prev { + return Err(SelectiveDisclosureError::InvalidOmittedMarkersOrder); + } + prev = marker; + } + Ok(()) +} + +/// Reconstruct position inclusion map from included types and omitted markers. +/// +/// This reverses the marker encoding algorithm from `compute_omitted_markers`: +/// - Markers form "runs" of consecutive values (e.g., [11, 12] is a run) +/// - A "jump" in markers (e.g., 12 → 41) indicates an included TLV came between +/// - After included type X, the next marker in that run equals X + 1 +/// +/// The algorithm tracks `prev_marker` to detect continuations vs jumps: +/// - If `marker == prev_marker + 1`: continuation → omitted position +/// - Otherwise: jump → included position comes first, then process marker as continuation +/// +/// Example: included=[10, 40], markers=[11, 12, 41, 42] +/// - Position 0: TLV0 (always omitted) +/// - marker=11, prev=0: 11 != 1, jump! Insert included (10), prev=10 +/// - marker=11, prev=10: 11 == 11, continuation → omitted, prev=11 +/// - marker=12, prev=11: 12 == 12, continuation → omitted, prev=12 +/// - marker=41, prev=12: 41 != 13, jump! Insert included (40), prev=40 +/// - marker=41, prev=40: 41 == 41, continuation → omitted, prev=41 +/// - marker=42, prev=41: 42 == 42, continuation → omitted, prev=42 +/// Result: [O, I, O, O, I, O, O] +#[cfg(test)] +fn reconstruct_positions(included_types: &[u64], omitted_markers: &[u64]) -> Vec { + let total = 1 + included_types.len() + omitted_markers.len(); + let mut positions = Vec::with_capacity(total); + positions.push(false); // TLV0 is always omitted + + let mut inc_idx = 0; + let mut mrk_idx = 0; + // After TLV0 (implicit marker 0), next continuation would be marker 1 + let mut prev_marker: u64 = 0; + + while inc_idx < included_types.len() || mrk_idx < omitted_markers.len() { + if mrk_idx >= omitted_markers.len() { + // No more markers, remaining positions are included + positions.push(true); + inc_idx += 1; + } else if inc_idx >= included_types.len() { + // No more included types, remaining positions are omitted + positions.push(false); + prev_marker = omitted_markers[mrk_idx]; + mrk_idx += 1; + } else { + let marker = omitted_markers[mrk_idx]; + let inc_type = included_types[inc_idx]; + + if marker == prev_marker + 1 { + // Continuation of current run → this position is omitted + positions.push(false); + prev_marker = marker; + mrk_idx += 1; + } else { + // Jump detected! An included TLV comes before this marker. + // After the included type, prev_marker resets to that type, + // so the marker will be processed as a continuation next iteration. + positions.push(true); + prev_marker = inc_type; + inc_idx += 1; + // Don't advance mrk_idx - same marker will be continuation next + } + } + } + + positions +} + #[cfg(test)] mod tests { use super::{TlvStream, SIGNATURE_TYPES}; @@ -497,4 +954,224 @@ mod tests { self.fmt_bech32_str(f) } } + + // ============================================================================ + // Tests for selective disclosure / payer proof reconstruction + // ============================================================================ + + /// Test reconstruct_positions with the BOLT 12 payer proof spec example. + /// + /// TLVs: 0(omit), 10(incl), 20(omit), 30(omit), 40(incl), 50(omit), 60(omit) + /// Markers: [11, 12, 41, 42] + /// Expected positions: [O, I, O, O, I, O, O] + #[test] + fn test_reconstruct_positions_spec_example() { + let included_types = vec![10, 40]; + let markers = vec![11, 12, 41, 42]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, true, false, false, true, false, false]); + } + + /// Test reconstruct_positions when there are omitted TLVs before the first included. + /// + /// TLVs: 0(omit), 5(omit), 10(incl), 20(omit) + /// Markers: [1, 11] (1 is first omitted after TLV0, 11 is after included 10) + /// Expected positions: [O, O, I, O] + #[test] + fn test_reconstruct_positions_omitted_before_included() { + let included_types = vec![10]; + let markers = vec![1, 11]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, false, true, false]); + } + + /// Test reconstruct_positions with only included TLVs (no omitted except TLV0). + /// + /// TLVs: 0(omit), 10(incl), 20(incl) + /// Markers: [] (no omitted TLVs after TLV0) + /// Expected positions: [O, I, I] + #[test] + fn test_reconstruct_positions_no_omitted() { + let included_types = vec![10, 20]; + let markers = vec![]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, true, true]); + } + + /// Test reconstruct_positions with only omitted TLVs (no included). + /// + /// TLVs: 0(omit), 5(omit), 10(omit) + /// Markers: [1, 2] (consecutive omitted after TLV0) + /// Expected positions: [O, O, O] + #[test] + fn test_reconstruct_positions_no_included() { + let included_types = vec![]; + let markers = vec![1, 2]; + let positions = super::reconstruct_positions(&included_types, &markers); + assert_eq!(positions, vec![false, false, false]); + } + + /// Test round-trip: compute selective disclosure then reconstruct merkle root. + #[test] + fn test_selective_disclosure_round_trip() { + use alloc::collections::BTreeSet; + + // Build TLV stream matching spec example structure + // TLVs: 0, 10, 20, 30, 40, 50, 60 + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); // TLV 40 + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); // TLV 50 + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); // TLV 60 + + // Include types 10 and 40 + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + // Compute selective disclosure + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Verify markers match spec example + assert_eq!(disclosure.omitted_markers, vec![11, 12, 41, 42]); + + // Verify leaf_hashes count matches included TLVs + assert_eq!(disclosure.leaf_hashes.len(), 2); + + // Collect included records for reconstruction + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + // Reconstruct merkle root + let reconstructed = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &disclosure.missing_hashes, + ) + .unwrap(); + + // Must match original + assert_eq!(reconstructed, disclosure.merkle_root); + } + + /// Test that missing_hashes are in ascending type order per spec. + /// + /// Per spec: "MUST include the minimal set of merkle hashes of missing merkle + /// leaves or nodes in `missing_hashes`, in ascending type order." + /// + /// For the spec example with TLVs [0(o), 10(I), 20(o), 30(o), 40(I), 50(o), 60(o)]: + /// - hash(0) covers type 0 + /// - hash(B(20,30)) covers types 20-30 (min=20) + /// - hash(50) covers type 50 + /// - hash(60) covers type 60 + /// + /// Expected order: [type 0, type 20, type 50, type 60] + /// This means 4 missing_hashes in this order. + #[test] + fn test_missing_hashes_ascending_type_order() { + use alloc::collections::BTreeSet; + + // Build TLV stream: 0, 10, 20, 30, 40, 50, 60 + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); // TLV 40 + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); // TLV 50 + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); // TLV 60 + + // Include types 10 and 40 (same as spec example) + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // We should have 4 missing hashes for omitted types: + // - type 0 (single leaf) + // - types 20+30 (combined branch, min_type=20) + // - type 50 (single leaf) + // - type 60 (single leaf) + // + // The spec example only shows 3, but that appears to be incomplete + // (missing hash for type 60). Our implementation should produce 4. + assert_eq!( + disclosure.missing_hashes.len(), + 4, + "Expected 4 missing hashes for omitted types [0, 20+30, 50, 60]" + ); + + // Verify the round-trip still works with the correct ordering + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + let reconstructed = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &disclosure.missing_hashes, + ) + .unwrap(); + + assert_eq!(reconstructed, disclosure.merkle_root); + } + + /// Test that reconstruction fails with wrong number of missing_hashes. + #[test] + fn test_reconstruction_fails_with_wrong_missing_hashes() { + use alloc::collections::BTreeSet; + + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + + let mut included = BTreeSet::new(); + included.insert(10); + + let disclosure = super::compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + let included_records: Vec<(u64, &[u8])> = TlvStream::new(&tlv_bytes) + .filter(|r| included.contains(&r.r#type)) + .map(|r| (r.r#type, r.record_bytes)) + .collect(); + + // Try with empty missing_hashes (should fail) + let result = super::reconstruct_merkle_root( + &included_records, + &disclosure.leaf_hashes, + &disclosure.omitted_markers, + &[], // Wrong! + ); + + assert!(result.is_err()); + } + + #[test] + fn test_tlv_record_read_value_rejects_trailing_bytes() { + use bitcoin::secp256k1::PublicKey; + + use crate::offers::test_utils::payer_pubkey; + use crate::util::ser::{BigSize, Writeable}; + + let pubkey = payer_pubkey(); + let mut tlv_bytes = Vec::new(); + BigSize(88).write(&mut tlv_bytes).unwrap(); + BigSize(35).write(&mut tlv_bytes).unwrap(); + pubkey.write(&mut tlv_bytes).unwrap(); + tlv_bytes.extend_from_slice(&[0x00, 0x01]); + + let record = TlvStream::new(&tlv_bytes).next().unwrap(); + let result: Result = record.read_value(); + assert!(matches!(result, Err(crate::ln::msgs::DecodeError::InvalidValue))); + } } diff --git a/lightning/src/offers/mod.rs b/lightning/src/offers/mod.rs index 5b5cf6cdc78..bbbf91a1f1c 100644 --- a/lightning/src/offers/mod.rs +++ b/lightning/src/offers/mod.rs @@ -25,6 +25,7 @@ pub mod merkle; pub mod nonce; pub mod parse; mod payer; +pub mod payer_proof; pub mod refund; pub(crate) mod signer; pub mod static_invoice; diff --git a/lightning/src/offers/offer.rs b/lightning/src/offers/offer.rs index b2703454169..2763df4940b 100644 --- a/lightning/src/offers/offer.rs +++ b/lightning/src/offers/offer.rs @@ -1211,6 +1211,12 @@ pub(super) const OFFER_TYPES: core::ops::Range = 1..80; /// TLV record type for [`Offer::metadata`]. const OFFER_METADATA_TYPE: u64 = 4; +/// TLV record type for [`Offer::description`]. +pub(super) const OFFER_DESCRIPTION_TYPE: u64 = 10; + +/// TLV record type for [`Offer::issuer`]. +pub(super) const OFFER_ISSUER_TYPE: u64 = 18; + /// TLV record type for [`Offer::issuer_signing_pubkey`]. const OFFER_ISSUER_ID_TYPE: u64 = 22; diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs new file mode 100644 index 00000000000..39962125b35 --- /dev/null +++ b/lightning/src/offers/payer_proof.rs @@ -0,0 +1,1266 @@ +// This file is Copyright its original authors, visible in version control +// history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license +// , at your option. +// You may not use this file except in accordance with one or both of these +// licenses. + +//! Payer proofs for BOLT 12 invoices. +//! +//! A [`PayerProof`] cryptographically proves that a BOLT 12 invoice was paid by demonstrating: +//! - Possession of the payment preimage (proving the payment occurred) +//! - A valid invoice signature over a merkle root (proving the invoice is authentic) +//! - The payer's signature (proving who authorized the payment) +//! +//! This implements the payer proof extension to BOLT 12 as specified in +//! . + +use alloc::collections::BTreeSet; + +use crate::io; +use crate::ln::channelmanager::PaymentId; +use crate::ln::inbound_payment::ExpandedKey; +use crate::offers::invoice::{ + Bolt12Invoice, INVOICE_AMOUNT_TYPE, INVOICE_CREATED_AT_TYPE, INVOICE_FEATURES_TYPE, + INVOICE_NODE_ID_TYPE, INVOICE_PAYMENT_HASH_TYPE, SIGNATURE_TAG, +}; +use crate::offers::invoice_request::INVOICE_REQUEST_PAYER_ID_TYPE; +use crate::offers::merkle::{ + self, SelectiveDisclosure, SelectiveDisclosureError, TaggedHash, TlvStream, SIGNATURE_TYPES, +}; +use crate::offers::nonce::Nonce; +use crate::offers::offer::{OFFER_DESCRIPTION_TYPE, OFFER_ISSUER_TYPE}; +use crate::offers::parse::Bech32Encode; +use crate::offers::payer::PAYER_METADATA_TYPE; +use crate::types::payment::{PaymentHash, PaymentPreimage}; +use crate::util::ser::{BigSize, Readable, Writeable}; +use lightning_types::string::PrintableString; + +use bitcoin::hashes::{sha256, Hash, HashEngine}; +use bitcoin::secp256k1::schnorr::Signature; +use bitcoin::secp256k1::{Message, PublicKey, Secp256k1}; + +use core::convert::TryFrom; + +#[allow(unused_imports)] +use crate::prelude::*; + +const TLV_SIGNATURE: u64 = 240; +const TLV_PREIMAGE: u64 = 242; +const TLV_OMITTED_TLVS: u64 = 244; +const TLV_MISSING_HASHES: u64 = 246; +const TLV_LEAF_HASHES: u64 = 248; +const TLV_PAYER_SIGNATURE: u64 = 250; + +/// Human-readable prefix for payer proofs in bech32 encoding. +pub const PAYER_PROOF_HRP: &str = "lnp"; + +/// Tag for payer signature computation per BOLT 12 signature calculation. +/// Format: "lightning" || messagename || fieldname +const PAYER_SIGNATURE_TAG: &str = concat!("lightning", "payer_proof", "payer_signature"); + +/// Error when building or verifying a payer proof. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum PayerProofError { + /// The preimage doesn't match the invoice's payment hash. + PreimageMismatch, + /// Error during merkle tree operations. + MerkleError(SelectiveDisclosureError), + /// The invoice signature is invalid. + InvalidInvoiceSignature, + /// The payer signature is invalid. + InvalidPayerSignature, + /// Failed to re-derive the payer signing key from the provided nonce and payment ID. + KeyDerivationFailed, + /// Error during signing. + SigningError, + /// The invreq_metadata field cannot be included (per spec). + InvreqMetadataNotAllowed, + /// TLV types >= 240 cannot be included — they are in the + /// signature/payer-proof range and handled separately. + SignatureTypeNotAllowed, + + /// Error decoding the payer proof. + DecodeError(crate::ln::msgs::DecodeError), +} + +impl From for PayerProofError { + fn from(e: SelectiveDisclosureError) -> Self { + PayerProofError::MerkleError(e) + } +} + +impl From for PayerProofError { + fn from(e: crate::ln::msgs::DecodeError) -> Self { + PayerProofError::DecodeError(e) + } +} + +/// A cryptographic proof that a BOLT 12 invoice was paid. +/// +/// Contains the payment preimage, selective disclosure of invoice fields, +/// the invoice signature, and a payer signature proving who paid. +#[derive(Clone, Debug)] +pub struct PayerProof { + bytes: Vec, + contents: PayerProofContents, + merkle_root: sha256::Hash, +} + +#[derive(Clone, Debug)] +struct PayerProofContents { + payer_id: PublicKey, + payment_hash: PaymentHash, + issuer_signing_pubkey: PublicKey, + preimage: PaymentPreimage, + invoice_signature: Signature, + payer_signature: Signature, + payer_note: Option, +} + +/// Builds a [`PayerProof`] from a paid invoice and its preimage. +/// +/// By default, only the required fields are included (payer_id, payment_hash, +/// issuer_signing_pubkey). Additional fields can be included for selective disclosure +/// using the `include_*` methods. +pub struct PayerProofBuilder<'a> { + invoice: &'a Bolt12Invoice, + preimage: PaymentPreimage, + included_types: BTreeSet, + invoice_bytes: Vec, +} + +impl<'a> PayerProofBuilder<'a> { + /// Create a new builder from a paid invoice and its preimage. + /// + /// Returns an error if the preimage doesn't match the invoice's payment hash. + pub(super) fn new( + invoice: &'a Bolt12Invoice, preimage: PaymentPreimage, + ) -> Result { + let computed_hash = sha256::Hash::hash(&preimage.0); + if computed_hash.as_byte_array() != &invoice.payment_hash().0 { + return Err(PayerProofError::PreimageMismatch); + } + + let mut invoice_bytes = Vec::new(); + invoice.write(&mut invoice_bytes).expect("Vec write should not fail"); + + let mut included_types = BTreeSet::new(); + included_types.insert(INVOICE_REQUEST_PAYER_ID_TYPE); + included_types.insert(INVOICE_PAYMENT_HASH_TYPE); + included_types.insert(INVOICE_NODE_ID_TYPE); + + // Per spec, invoice_features MUST be included "if present" — meaning if the + // TLV exists in the invoice byte stream, regardless of whether the parsed + // value is empty. Check the raw bytes so we handle invoices from other + // implementations that may serialize empty features. + let has_features_tlv = + TlvStream::new(&invoice_bytes).any(|r| r.r#type == INVOICE_FEATURES_TYPE); + if has_features_tlv { + included_types.insert(INVOICE_FEATURES_TYPE); + } + + Ok(Self { invoice, preimage, included_types, invoice_bytes }) + } + + /// Include a specific TLV type in the proof. + /// + /// Returns an error if the type is not allowed (e.g., invreq_metadata or + /// types >= 240 which are in the signature/payer-proof range and handled + /// separately). + pub fn include_type(mut self, tlv_type: u64) -> Result { + if tlv_type == PAYER_METADATA_TYPE { + return Err(PayerProofError::InvreqMetadataNotAllowed); + } + if tlv_type >= TLV_SIGNATURE { + return Err(PayerProofError::SignatureTypeNotAllowed); + } + self.included_types.insert(tlv_type); + Ok(self) + } + + /// Include the offer description in the proof. + pub fn include_offer_description(mut self) -> Self { + self.included_types.insert(OFFER_DESCRIPTION_TYPE); + self + } + + /// Include the offer issuer in the proof. + pub fn include_offer_issuer(mut self) -> Self { + self.included_types.insert(OFFER_ISSUER_TYPE); + self + } + + /// Include the invoice amount in the proof. + pub fn include_invoice_amount(mut self) -> Self { + self.included_types.insert(INVOICE_AMOUNT_TYPE); + self + } + + /// Include the invoice creation timestamp in the proof. + pub fn include_invoice_created_at(mut self) -> Self { + self.included_types.insert(INVOICE_CREATED_AT_TYPE); + self + } + + /// Builds a signed [`PayerProof`] using the provided signing function. + /// + /// Use this when you have direct access to the payer's signing key. + pub fn build(self, sign_fn: F, note: Option<&str>) -> Result + where + F: FnOnce(&Message) -> Result, + { + let unsigned = self.build_unsigned()?; + unsigned.sign(sign_fn, note) + } + + /// Builds a signed [`PayerProof`] using a key derived from an [`ExpandedKey`] and [`Nonce`]. + /// + /// This re-derives the payer signing key using the same derivation scheme as invoice requests + /// created with `deriving_signing_pubkey`. The `nonce` and `payment_id` must be the same ones + /// used when creating the original invoice request (available from the + /// [`OffersContext::OutboundPaymentForOffer`]). + /// + /// [`OffersContext::OutboundPaymentForOffer`]: crate::blinded_path::message::OffersContext::OutboundPaymentForOffer + pub fn build_with_derived_key( + self, expanded_key: &ExpandedKey, nonce: Nonce, payment_id: PaymentId, note: Option<&str>, + ) -> Result { + let secp_ctx = Secp256k1::signing_only(); + let keys = self + .invoice + .derive_payer_signing_keys(payment_id, nonce, expanded_key, &secp_ctx) + .map_err(|_| PayerProofError::KeyDerivationFailed)?; + + let unsigned = self.build_unsigned()?; + unsigned.sign(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &keys)), note) + } + + fn build_unsigned(self) -> Result { + let invoice_bytes = self.invoice_bytes; + let mut bytes_without_sig = Vec::with_capacity(invoice_bytes.len()); + for r in TlvStream::new(&invoice_bytes).filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { + bytes_without_sig.extend_from_slice(r.record_bytes); + } + + let disclosure = + merkle::compute_selective_disclosure(&bytes_without_sig, &self.included_types)?; + + let invoice_signature = self.invoice.signature(); + + Ok(UnsignedPayerProof { + invoice_signature, + preimage: self.preimage, + payer_id: self.invoice.payer_signing_pubkey(), + payment_hash: self.invoice.payment_hash().clone(), + issuer_signing_pubkey: self.invoice.signing_pubkey(), + invoice_bytes, + included_types: self.included_types, + disclosure, + }) + } +} + +/// An unsigned [`PayerProof`] ready for signing. +struct UnsignedPayerProof { + invoice_signature: Signature, + preimage: PaymentPreimage, + payer_id: PublicKey, + payment_hash: PaymentHash, + issuer_signing_pubkey: PublicKey, + invoice_bytes: Vec, + included_types: BTreeSet, + disclosure: SelectiveDisclosure, +} + +impl UnsignedPayerProof { + fn sign(self, sign_fn: F, note: Option<&str>) -> Result + where + F: FnOnce(&Message) -> Result, + { + let message = Self::compute_payer_signature_message(note, &self.disclosure.merkle_root); + let payer_signature = sign_fn(&message).map_err(|_| PayerProofError::SigningError)?; + + let secp_ctx = Secp256k1::verification_only(); + secp_ctx + .verify_schnorr(&payer_signature, &message, &self.payer_id.into()) + .map_err(|_| PayerProofError::InvalidPayerSignature)?; + + let bytes = self.serialize_payer_proof(&payer_signature, note); + + Ok(PayerProof { + bytes, + contents: PayerProofContents { + payer_id: self.payer_id, + payment_hash: self.payment_hash, + issuer_signing_pubkey: self.issuer_signing_pubkey, + preimage: self.preimage, + invoice_signature: self.invoice_signature, + payer_signature, + payer_note: note.map(String::from), + }, + merkle_root: self.disclosure.merkle_root, + }) + } + + /// Compute the payer signature message per BOLT 12 signature calculation. + fn compute_payer_signature_message(note: Option<&str>, merkle_root: &sha256::Hash) -> Message { + let mut inner_hasher = sha256::Hash::engine(); + if let Some(n) = note { + inner_hasher.input(n.as_bytes()); + } + inner_hasher.input(merkle_root.as_ref()); + let inner_msg = sha256::Hash::from_engine(inner_hasher); + + let tag_hash = sha256::Hash::hash(PAYER_SIGNATURE_TAG.as_bytes()); + + let mut final_hasher = sha256::Hash::engine(); + final_hasher.input(tag_hash.as_ref()); + final_hasher.input(tag_hash.as_ref()); + final_hasher.input(inner_msg.as_ref()); + let final_digest = sha256::Hash::from_engine(final_hasher); + + Message::from_digest(*final_digest.as_byte_array()) + } + + fn serialize_payer_proof(&self, payer_signature: &Signature, note: Option<&str>) -> Vec { + let mut bytes = Vec::new(); + + // Filter out SIGNATURE_TYPES defensively: the invoice bytes contain the + // invoice's own signature (type 240) which must not appear as an included + // invoice record — the payer proof writes its own signature TLV below. + for record in TlvStream::new(&self.invoice_bytes).filter(|r| { + self.included_types.contains(&r.r#type) && !SIGNATURE_TYPES.contains(&r.r#type) + }) { + bytes.extend_from_slice(record.record_bytes); + } + + BigSize(TLV_SIGNATURE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(64).write(&mut bytes).expect("Vec write should not fail"); + self.invoice_signature.write(&mut bytes).expect("Vec write should not fail"); + + BigSize(TLV_PREIMAGE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(32).write(&mut bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(&self.preimage.0); + + if !self.disclosure.omitted_markers.is_empty() { + let omitted_len: u64 = self + .disclosure + .omitted_markers + .iter() + .map(|m| BigSize(*m).serialized_length() as u64) + .sum(); + BigSize(TLV_OMITTED_TLVS).write(&mut bytes).expect("Vec write should not fail"); + BigSize(omitted_len).write(&mut bytes).expect("Vec write should not fail"); + for marker in &self.disclosure.omitted_markers { + BigSize(*marker).write(&mut bytes).expect("Vec write should not fail"); + } + } + + if !self.disclosure.missing_hashes.is_empty() { + let len = self.disclosure.missing_hashes.len() * 32; + BigSize(TLV_MISSING_HASHES).write(&mut bytes).expect("Vec write should not fail"); + BigSize(len as u64).write(&mut bytes).expect("Vec write should not fail"); + for hash in &self.disclosure.missing_hashes { + bytes.extend_from_slice(hash.as_ref()); + } + } + + if !self.disclosure.leaf_hashes.is_empty() { + let len = self.disclosure.leaf_hashes.len() * 32; + BigSize(TLV_LEAF_HASHES).write(&mut bytes).expect("Vec write should not fail"); + BigSize(len as u64).write(&mut bytes).expect("Vec write should not fail"); + for hash in &self.disclosure.leaf_hashes { + bytes.extend_from_slice(hash.as_ref()); + } + } + + let note_bytes = note.map(|n| n.as_bytes()).unwrap_or(&[]); + let payer_sig_len = 64 + note_bytes.len(); + BigSize(TLV_PAYER_SIGNATURE).write(&mut bytes).expect("Vec write should not fail"); + BigSize(payer_sig_len as u64).write(&mut bytes).expect("Vec write should not fail"); + payer_signature.write(&mut bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(note_bytes); + + bytes + } +} + +impl PayerProof { + /// The payment preimage proving the invoice was paid. + pub fn preimage(&self) -> PaymentPreimage { + self.contents.preimage + } + + /// The payer's public key (who paid). + pub fn payer_id(&self) -> PublicKey { + self.contents.payer_id + } + + /// The issuer's signing public key (the key that signed the invoice). + pub fn issuer_signing_pubkey(&self) -> PublicKey { + self.contents.issuer_signing_pubkey + } + + /// The payment hash. + pub fn payment_hash(&self) -> PaymentHash { + self.contents.payment_hash + } + + /// The invoice signature over the merkle root. + pub fn invoice_signature(&self) -> Signature { + self.contents.invoice_signature + } + + /// The payer's schnorr signature proving who authorized the payment. + pub fn payer_signature(&self) -> Signature { + self.contents.payer_signature + } + + /// The payer's note, if any. + pub fn payer_note(&self) -> Option> { + self.contents.payer_note.as_deref().map(PrintableString) + } + + /// The merkle root of the original invoice. + pub fn merkle_root(&self) -> sha256::Hash { + self.merkle_root + } + + /// The raw bytes of the payer proof. + pub fn bytes(&self) -> &[u8] { + &self.bytes + } +} + +impl Bech32Encode for PayerProof { + const BECH32_HRP: &'static str = PAYER_PROOF_HRP; +} + +impl AsRef<[u8]> for PayerProof { + fn as_ref(&self) -> &[u8] { + &self.bytes + } +} + +/// Validate that the byte slice is a well-formed TLV stream. +/// +/// `TlvStream::new()` assumes well-formed input and panics on malformed BigSize +/// values or out-of-bounds lengths. This function validates the framing first, +/// returning an error instead of panicking on untrusted input. +fn validate_tlv_framing(bytes: &[u8]) -> Result<(), crate::ln::msgs::DecodeError> { + use crate::ln::msgs::DecodeError; + let mut cursor = io::Cursor::new(bytes); + while (cursor.position() as usize) < bytes.len() { + let _type: BigSize = Readable::read(&mut cursor).map_err(|_| DecodeError::InvalidValue)?; + let length: BigSize = Readable::read(&mut cursor).map_err(|_| DecodeError::InvalidValue)?; + let end = cursor.position().checked_add(length.0).ok_or(DecodeError::InvalidValue)?; + let end_usize = usize::try_from(end).map_err(|_| DecodeError::InvalidValue)?; + if end_usize > bytes.len() { + return Err(DecodeError::ShortRead); + } + cursor.set_position(end); + } + Ok(()) +} + +// Payer proofs use manual TLV parsing rather than `ParsedMessage` / `tlv_stream!` +// because of their hybrid structure: a dynamic, variable set of included invoice +// TLV records (types 0-239, preserved as raw bytes for merkle reconstruction) plus +// payer-proof-specific TLVs (types 240-250) with non-standard encodings such as +// BigSize lists (`omitted_tlvs`) and concatenated 32-byte hashes +// (`missing_hashes`, `leaf_hashes`). The `tlv_stream!` macro assumes a fixed set +// of known fields with standard `Readable`/`Writeable` encodings, so it cannot +// express the passthrough-or-parse logic required here. +impl TryFrom> for PayerProof { + type Error = crate::offers::parse::Bolt12ParseError; + + fn try_from(bytes: Vec) -> Result { + use crate::ln::msgs::DecodeError; + use crate::offers::parse::Bolt12ParseError; + + // Validate TLV framing before passing to TlvStream, which assumes + // well-formed input and panics on malformed BigSize or out-of-bounds + // lengths. This mirrors the validation that ParsedMessage / CursorReadable + // provides for other BOLT 12 types. + validate_tlv_framing(&bytes) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + let mut payer_id: Option = None; + let mut payment_hash: Option = None; + let mut issuer_signing_pubkey: Option = None; + let mut invoice_signature: Option = None; + let mut preimage: Option = None; + let mut payer_signature: Option = None; + let mut payer_note: Option = None; + + let mut leaf_hashes: Vec = Vec::new(); + let mut omitted_markers: Vec = Vec::new(); + let mut missing_hashes: Vec = Vec::new(); + + let mut included_types: BTreeSet = BTreeSet::new(); + let mut included_records: Vec<(u64, usize, usize)> = Vec::new(); + + let mut prev_tlv_type: Option = None; + + for record in TlvStream::new(&bytes) { + let tlv_type = record.r#type; + + // Strict ascending order check covers both ordering and duplicates. + if let Some(prev) = prev_tlv_type { + if tlv_type <= prev { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + } + prev_tlv_type = Some(tlv_type); + + match tlv_type { + INVOICE_REQUEST_PAYER_ID_TYPE => { + payer_id = Some(record.read_value()?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + INVOICE_PAYMENT_HASH_TYPE => { + payment_hash = Some(record.read_value()?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + INVOICE_NODE_ID_TYPE => { + issuer_signing_pubkey = Some(record.read_value()?); + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + }, + TLV_SIGNATURE => { + invoice_signature = Some(record.read_value()?); + }, + TLV_PREIMAGE => { + preimage = Some(record.read_value()?); + }, + TLV_OMITTED_TLVS => { + let mut cursor = io::Cursor::new(record.value_bytes); + while (cursor.position() as usize) < record.value_bytes.len() { + let marker: BigSize = Readable::read(&mut cursor)?; + omitted_markers.push(marker.0); + } + }, + TLV_MISSING_HASHES => { + if record.value_bytes.len() % 32 != 0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + for chunk in record.value_bytes.chunks_exact(32) { + let hash_bytes: [u8; 32] = chunk.try_into().expect("chunks_exact(32)"); + missing_hashes.push(sha256::Hash::from_byte_array(hash_bytes)); + } + }, + TLV_LEAF_HASHES => { + if record.value_bytes.len() % 32 != 0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + for chunk in record.value_bytes.chunks_exact(32) { + let hash_bytes: [u8; 32] = chunk.try_into().expect("chunks_exact(32)"); + leaf_hashes.push(sha256::Hash::from_byte_array(hash_bytes)); + } + }, + TLV_PAYER_SIGNATURE => { + if record.value_bytes.len() < 64 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + let mut cursor = io::Cursor::new(record.value_bytes); + payer_signature = Some(Readable::read(&mut cursor)?); + if record.value_bytes.len() > 64 { + let note_bytes = &record.value_bytes[64..]; + payer_note = Some( + String::from_utf8(note_bytes.to_vec()) + .map_err(|_| DecodeError::InvalidValue)?, + ); + } + }, + _ => { + if tlv_type == PAYER_METADATA_TYPE { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + if tlv_type < TLV_SIGNATURE { + // Included invoice TLV record (passthrough for merkle + // reconstruction). + included_types.insert(tlv_type); + included_records.push(( + tlv_type, + record.end - record.record_bytes.len(), + record.end, + )); + } else if tlv_type % 2 == 0 { + // Unknown even types are mandatory-to-understand per + // BOLT convention — reject them. + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + // Unknown odd types can be safely ignored. + }, + } + } + + let payer_id = payer_id.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingPayerSigningPubkey, + ))?; + let payment_hash = payment_hash.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingPaymentHash, + ))?; + let issuer_signing_pubkey = + issuer_signing_pubkey.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSigningPubkey, + ))?; + let invoice_signature = invoice_signature.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSignature, + ))?; + let preimage = preimage.ok_or(Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + let payer_signature = payer_signature.ok_or(Bolt12ParseError::InvalidSemantics( + crate::offers::parse::Bolt12SemanticError::MissingSignature, + ))?; + + validate_omitted_markers_for_parsing(&omitted_markers, &included_types) + .map_err(Bolt12ParseError::Decode)?; + + if leaf_hashes.len() != included_records.len() { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + + let included_refs: Vec<(u64, &[u8])> = + included_records.iter().map(|&(t, start, end)| (t, &bytes[start..end])).collect(); + let merkle_root = merkle::reconstruct_merkle_root( + &included_refs, + &leaf_hashes, + &omitted_markers, + &missing_hashes, + ) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + // Verify preimage matches payment hash. + let computed = sha256::Hash::hash(&preimage.0); + if computed.as_byte_array() != &payment_hash.0 { + return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); + } + + // Verify the invoice signature against the issuer signing pubkey. + let tagged_hash = TaggedHash::from_merkle_root(SIGNATURE_TAG, merkle_root); + merkle::verify_signature(&invoice_signature, &tagged_hash, issuer_signing_pubkey) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + // Verify the payer signature. + let message = UnsignedPayerProof::compute_payer_signature_message( + payer_note.as_deref(), + &merkle_root, + ); + let secp_ctx = Secp256k1::verification_only(); + secp_ctx + .verify_schnorr(&payer_signature, &message, &payer_id.into()) + .map_err(|_| Bolt12ParseError::Decode(DecodeError::InvalidValue))?; + + Ok(PayerProof { + bytes, + contents: PayerProofContents { + payer_id, + payment_hash, + issuer_signing_pubkey, + preimage, + invoice_signature, + payer_signature, + payer_note, + }, + merkle_root, + }) + } +} + +/// Validate omitted markers during parsing. +/// +/// Per spec: +/// - MUST NOT contain 0 +/// - MUST NOT contain signature TLV element numbers (240-1000) +/// - MUST be in strict ascending order +/// - MUST NOT contain the number of an included TLV field +/// - Markers MUST be minimized: each marker must be exactly prev_value + 1 within +/// a run, and the first marker after an included type X must be X + 1. This +/// naturally allows a trailing run of omitted TLVs after the final included +/// type. +fn validate_omitted_markers_for_parsing( + omitted_markers: &[u64], included_types: &BTreeSet, +) -> Result<(), crate::ln::msgs::DecodeError> { + let mut inc_iter = included_types.iter().copied().peekable(); + // After implicit TLV0 (marker 0), the first minimized marker would be 1 + let mut expected_next: u64 = 1; + let mut prev = 0u64; + + for &marker in omitted_markers { + // MUST NOT contain 0 + if marker == 0 { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + + // MUST NOT contain signature TLV types + if SIGNATURE_TYPES.contains(&marker) { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + + // MUST be strictly ascending + if marker <= prev { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + + // MUST NOT contain included TLV types + if included_types.contains(&marker) { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + + // Validate minimization: marker must equal expected_next (continuation + // of current run), or there must be an included type X between the + // previous position and this marker such that X + 1 == marker. + if marker != expected_next { + let mut found = false; + for inc_type in inc_iter.by_ref() { + if inc_type + 1 == marker { + found = true; + break; + } + if inc_type >= marker { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + } + if !found { + return Err(crate::ln::msgs::DecodeError::InvalidValue); + } + } + + expected_next = marker + 1; + prev = marker; + } + + Ok(()) +} + +impl core::fmt::Display for PayerProof { + fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { + self.fmt_bech32_str(f) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ln::channelmanager::PaymentId; + use crate::ln::inbound_payment::ExpandedKey; + use crate::offers::merkle::compute_selective_disclosure; + use crate::offers::nonce::Nonce; + #[cfg(not(c_bindings))] + use crate::offers::refund::RefundBuilder; + #[cfg(c_bindings)] + use crate::offers::refund::RefundMaybeWithDerivedMetadataBuilder as RefundBuilder; + use crate::offers::test_utils::*; + + #[test] + fn test_selective_disclosure_computation() { + // Test that the merkle selective disclosure works correctly + // Simple TLV stream with types 1, 2 + let tlv_bytes = vec![ + 0x01, 0x03, 0xe8, 0x03, 0xe8, // type 1, length 3, value + 0x02, 0x08, 0x00, 0x00, 0x01, 0x00, 0x00, 0x02, 0x00, 0x03, // type 2 + ]; + + let mut included = BTreeSet::new(); + included.insert(1); + + let result = compute_selective_disclosure(&tlv_bytes, &included); + assert!(result.is_ok()); + + let disclosure = result.unwrap(); + assert_eq!(disclosure.leaf_hashes.len(), 1); // One included TLV + assert!(!disclosure.missing_hashes.is_empty()); // Should have missing hashes for omitted + } + + /// Test the omitted_markers marker algorithm per BOLT 12 payer proof spec. + /// + /// From the spec example: + /// TLVs: 0 (omitted), 10 (included), 20 (omitted), 30 (omitted), + /// 40 (included), 50 (omitted), 60 (omitted), 240 (signature) + /// + /// Expected markers: [11, 12, 41, 42] + /// + /// The algorithm: + /// - TLV 0 is always omitted and implicit (not in markers) + /// - For omitted TLV after included: marker = prev_included_type + 1 + /// - For consecutive omitted TLVs: marker = prev_marker + 1 + #[test] + fn test_omitted_markers_spec_example() { + // Build a synthetic TLV stream matching the spec example + // TLV format: type (BigSize) || length (BigSize) || value + let mut tlv_bytes = Vec::new(); + + // TLV 0: type=0, len=4, value=dummy + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); + // TLV 10: type=10, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + // TLV 20: type=20, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); + // TLV 30: type=30, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); + // TLV 40: type=40, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x28, 0x02, 0x00, 0x00]); + // TLV 50: type=50, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x32, 0x02, 0x00, 0x00]); + // TLV 60: type=60, len=2, value=dummy + tlv_bytes.extend_from_slice(&[0x3c, 0x02, 0x00, 0x00]); + + // Include types 10 and 40 + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(40); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Per spec example, omitted_markers should be [11, 12, 41, 42] + assert_eq!(disclosure.omitted_markers, vec![11, 12, 41, 42]); + + // leaf_hashes should have 2 entries (one for each included TLV) + assert_eq!(disclosure.leaf_hashes.len(), 2); + } + + /// Test that the marker algorithm handles edge cases correctly. + #[test] + fn test_omitted_markers_edge_cases() { + // Test with only one included TLV at the start + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + tlv_bytes.extend_from_slice(&[0x1e, 0x02, 0x00, 0x00]); // TLV 30 + + let mut included = BTreeSet::new(); + included.insert(10); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // After included type 10, omitted types 20 and 30 get markers 11 and 12 + assert_eq!(disclosure.omitted_markers, vec![11, 12]); + } + + /// Test that all included TLVs produce no omitted markers (except implicit TLV0). + #[test] + fn test_omitted_markers_all_included() { + let mut tlv_bytes = Vec::new(); + tlv_bytes.extend_from_slice(&[0x00, 0x04, 0x00, 0x00, 0x00, 0x00]); // TLV 0 (always omitted) + tlv_bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); // TLV 10 + tlv_bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); // TLV 20 + + let mut included = BTreeSet::new(); + included.insert(10); + included.insert(20); + + let disclosure = compute_selective_disclosure(&tlv_bytes, &included).unwrap(); + + // Only TLV 0 is omitted (implicit), so no markers needed + assert!(disclosure.omitted_markers.is_empty()); + } + + /// Test validation of omitted_markers - must not contain 0. + #[test] + fn test_validate_omitted_markers_rejects_zero() { + let omitted = vec![0, 11, 12]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_err()); + } + + /// Test validation of omitted_markers - must not contain signature types. + #[test] + fn test_validate_omitted_markers_rejects_signature_types() { + // included=[10], markers=[1, 2, 250] — 250 is a signature type + let omitted = vec![1, 2, 250]; + let included: BTreeSet = [10].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_err()); + } + + /// Test validation of omitted_markers - must be strictly ascending. + #[test] + fn test_validate_omitted_markers_rejects_non_ascending() { + // markers=[1, 11, 9]: 1 ok, 11 ok (after included 10), but 9 <= 11 fails ascending + let omitted = vec![1, 11, 9]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_err()); + } + + /// Test validation of omitted_markers - must not contain included types. + #[test] + fn test_validate_omitted_markers_rejects_included_types() { + // included=[10, 30], markers=[1, 10] — 10 is in included set + let omitted = vec![1, 10]; + let included: BTreeSet = [10, 30].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(matches!(result, Err(crate::ln::msgs::DecodeError::InvalidValue))); + } + + /// Test that a minimized trailing run is accepted. + #[test] + fn test_validate_omitted_markers_accepts_trailing_run() { + // included=[10, 20], markers=[1, 21, 22] — both 21 and 22 > max included (20) + let omitted = vec![1, 21, 22]; + let included: BTreeSet = [10, 20].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test that valid minimized omitted_markers pass validation. + #[test] + fn test_validate_omitted_markers_accepts_valid() { + // Realistic payer proof: included types include required fields (88, 168, 176) + // so max_included=176 and markers are well below it. + // Layout: 0(omit), 10(incl), 20(omit), 30(omit), 40(incl), 50(omit), 88(incl), + // 168(incl), 176(incl) + // markers=[11, 12, 41, 89] + let omitted = vec![11, 12, 41, 89]; + let included: BTreeSet = [10, 40, 88, 168, 176].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test that non-minimized markers are rejected. + #[test] + fn test_validate_omitted_markers_rejects_non_minimized() { + // included=[10, 40], markers=[11, 15, 41, 42] + // marker 15 should be 12 (continuation of run after 11) + let omitted = vec![11, 15, 41, 42]; + let included: BTreeSet = [10, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_err()); + } + + /// Test that non-minimized first marker in a run is rejected. + #[test] + fn test_validate_omitted_markers_rejects_non_minimized_run_start() { + // included=[10, 40], markers=[11, 12, 45, 46] + // marker 45 should be 41 (first omitted after included 40) + let omitted = vec![11, 12, 45, 46]; + let included: BTreeSet = [10, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_err()); + } + + /// Test minimized markers with omitted TLVs before any included type. + #[test] + fn test_validate_omitted_markers_accepts_leading_run() { + // included=[40], markers=[1, 2, 41] + // Two omitted before any included type, one after 40 + let omitted = vec![1, 2, 41]; + let included: BTreeSet = [40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test minimized markers with consecutive included types (no markers between them). + #[test] + fn test_validate_omitted_markers_accepts_consecutive_included() { + // included=[10, 20, 40], markers=[1, 41] + // One omitted before 10, no omitted between 10-20 or 20-40, one after 40 + let omitted = vec![1, 41]; + let included: BTreeSet = [10, 20, 40].iter().copied().collect(); + + let result = validate_omitted_markers_for_parsing(&omitted, &included); + assert!(result.is_ok()); + } + + /// Test that invreq_metadata (type 0) cannot be explicitly included via include_type. + #[test] + fn test_invreq_metadata_not_allowed() { + assert_eq!(PAYER_METADATA_TYPE, 0); + } + + /// Test that out-of-order TLVs are rejected during parsing. + #[test] + fn test_parsing_rejects_out_of_order_tlvs() { + use core::convert::TryFrom; + + // Create a malformed TLV stream with out-of-order types (20 before 10) + // TLV format: type (BigSize) || length (BigSize) || value + let mut bytes = Vec::new(); + // TLV type 20, length 2, value + bytes.extend_from_slice(&[0x14, 0x02, 0x00, 0x00]); + // TLV type 10, length 2, value (OUT OF ORDER!) + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that duplicate TLVs are rejected during parsing. + #[test] + fn test_parsing_rejects_duplicate_tlvs() { + use core::convert::TryFrom; + + // Create a malformed TLV stream with duplicate type 10 + let mut bytes = Vec::new(); + // TLV type 10, length 2, value + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + // TLV type 10 again (DUPLICATE!) + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that invalid hash lengths (not multiple of 32) are rejected. + #[test] + fn test_parsing_rejects_invalid_hash_length() { + use core::convert::TryFrom; + + // Create a TLV stream with missing_hashes (type 246) that has invalid length + // BigSize encoding: values 0-252 are single byte, 253-65535 use 0xFD prefix + let mut bytes = Vec::new(); + // TLV type 246 (missing_hashes) - 246 < 253 so single byte + bytes.push(0xf6); // type 246 + bytes.push(0x21); // length 33 (not multiple of 32!) + bytes.extend_from_slice(&[0x00; 33]); // 33 bytes of zeros + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that invalid leaf_hashes length (not multiple of 32) is rejected. + #[test] + fn test_parsing_rejects_invalid_leaf_hashes_length() { + use core::convert::TryFrom; + + // Create a TLV stream with leaf_hashes (type 248) that has invalid length + // BigSize encoding: values 0-252 are single byte, 253-65535 use 0xFD prefix + let mut bytes = Vec::new(); + // TLV type 248 (leaf_hashes) - 248 < 253 so single byte + bytes.push(0xf8); // type 248 + bytes.push(0x1f); // length 31 (not multiple of 32!) + bytes.extend_from_slice(&[0x00; 31]); // 31 bytes of zeros + + let result = PayerProof::try_from(bytes); + assert!(result.is_err()); + } + + /// Test that TLV types >= 240 are rejected by include_type. + /// + /// Per spec, all types >= 240 are in the signature/payer-proof range and + /// handled separately. This includes types > 1000 (experimental range) + /// which were previously allowed through. + #[test] + fn test_include_type_rejects_signature_types() { + // Test the type validation logic directly. + fn check_include_type(tlv_type: u64) -> Result<(), PayerProofError> { + if tlv_type == PAYER_METADATA_TYPE { + return Err(PayerProofError::InvreqMetadataNotAllowed); + } + if tlv_type >= TLV_SIGNATURE { + return Err(PayerProofError::SignatureTypeNotAllowed); + } + Ok(()) + } + + // All types >= 240 must be rejected + assert!(matches!(check_include_type(240), Err(PayerProofError::SignatureTypeNotAllowed))); + assert!(matches!(check_include_type(250), Err(PayerProofError::SignatureTypeNotAllowed))); + assert!(matches!(check_include_type(1000), Err(PayerProofError::SignatureTypeNotAllowed))); + // Types > 1000 (experimental) must also be rejected + assert!(matches!(check_include_type(1001), Err(PayerProofError::SignatureTypeNotAllowed))); + assert!(matches!( + check_include_type(u64::MAX), + Err(PayerProofError::SignatureTypeNotAllowed) + )); + // Just below the boundary + assert!(check_include_type(239).is_ok()); + // Payer metadata still rejected + assert!(matches!(check_include_type(0), Err(PayerProofError::InvreqMetadataNotAllowed))); + } + + /// Test that unknown even TLV types >= 240 are rejected during parsing. + /// + /// Per BOLT convention, even types are mandatory-to-understand. The parser + /// must reject unknown even types in the signature range to prevent + /// accepting malformed proofs. + #[test] + fn test_parsing_rejects_unknown_even_signature_range_types() { + use core::convert::TryFrom; + + // Craft a payer proof with an unknown even type 252 (in signature range, + // but not one of the known payer proof TLVs) + let mut bytes = Vec::new(); + // Some included invoice TLV first (type 10) + bytes.extend_from_slice(&[0x0a, 0x02, 0x00, 0x00]); + // Unknown even type 252 (in signature range 240-1000) + bytes.push(0xfc); // type 252 + bytes.push(0x02); // length 2 + bytes.extend_from_slice(&[0x00, 0x00]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err(), "Unknown even type 252 should be rejected"); + } + + /// Test that malformed TLV framing is rejected without panicking. + /// + /// TlvStream::new() panics on malformed BigSize values or out-of-bounds + /// lengths. The parser must validate framing before constructing TlvStream. + #[test] + fn test_parsing_rejects_malformed_tlv_framing() { + use core::convert::TryFrom; + + // Truncated BigSize type (0xFD prefix requires 2 more bytes) + let result = PayerProof::try_from(vec![0xFD, 0x01]); + assert!(result.is_err(), "Truncated BigSize type should be rejected"); + + // Valid type but truncated length + let result = PayerProof::try_from(vec![0x0a]); + assert!(result.is_err(), "Missing length should be rejected"); + + // Length exceeds remaining bytes + let result = PayerProof::try_from(vec![0x0a, 0x04, 0x00, 0x00]); + assert!(result.is_err(), "Length exceeding data should be rejected"); + + // Empty input should not panic + let result = PayerProof::try_from(vec![]); + assert!(result.is_err(), "Empty input should be rejected"); + + // Completely invalid bytes + let result = PayerProof::try_from(vec![0xFF, 0xFF]); + assert!(result.is_err(), "Invalid bytes should be rejected"); + } + + /// Test that duplicate type-0 TLVs are rejected. + /// + /// Previously the ordering check used `u64` initialized to 0, which + /// skipped the check for the first TLV if its type was 0, allowing + /// duplicate type-0 records. + #[test] + fn test_parsing_rejects_duplicate_type_zero() { + use core::convert::TryFrom; + + // Two TLV records both with type 0 + let mut bytes = Vec::new(); + bytes.extend_from_slice(&[0x00, 0x02, 0x00, 0x00]); // type 0, len 2 + bytes.extend_from_slice(&[0x00, 0x02, 0x00, 0x00]); // type 0 again (DUPLICATE!) + + let result = PayerProof::try_from(bytes); + assert!(result.is_err(), "Duplicate type-0 TLVs should be rejected"); + } + + /// Test that payer_signature TLV with length < 64 is rejected. + /// + /// The payer_signature value contains a 64-byte schnorr signature + /// followed by an optional note. A length < 64 is always invalid. + #[test] + fn test_parsing_rejects_short_payer_signature() { + use core::convert::TryFrom; + + // Craft a TLV with type 250 (payer_signature) but only 32 bytes of value + let mut bytes = Vec::new(); + bytes.push(0xfa); // type 250 + bytes.push(0x20); // length 32 (too short for 64-byte signature) + bytes.extend_from_slice(&[0x00; 32]); + + let result = PayerProof::try_from(bytes); + assert!(result.is_err(), "payer_signature with len < 64 should be rejected"); + } + + #[test] + fn test_round_trip_with_trailing_experimental_tlvs() { + use core::convert::TryFrom; + + let preimage = PaymentPreimage([1; 32]); + let payment_hash = PaymentHash(*sha256::Hash::hash(&preimage.0).as_byte_array()); + let invoice = RefundBuilder::new(vec![1; 32], payer_pubkey(), 1000) + .unwrap() + .experimental_foo(42) + .experimental_bar(43) + .build() + .unwrap() + .respond_with_no_std(payment_paths(), payment_hash, recipient_pubkey(), now()) + .unwrap() + .experimental_baz(44) + .build() + .unwrap() + .sign(recipient_sign) + .unwrap(); + + let secp_ctx = Secp256k1::signing_only(); + let payer_keys = payer_keys(); + let proof = invoice + .payer_proof_builder(preimage) + .unwrap() + .build(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &payer_keys)), None) + .unwrap(); + let parsed = PayerProof::try_from(proof.bytes().to_vec()).unwrap(); + + assert_eq!(parsed.bytes(), proof.bytes()); + assert_eq!(parsed.preimage(), preimage); + assert_eq!(parsed.payment_hash(), payment_hash); + } + + #[test] + fn test_build_with_derived_key_for_refund_invoice() { + use core::convert::TryFrom; + + let expanded_key = ExpandedKey::new([42; 32]); + let entropy = FixedEntropy {}; + let nonce = Nonce::from_entropy_source(&entropy); + let secp_ctx = Secp256k1::new(); + let payment_id = PaymentId([1; 32]); + let preimage = PaymentPreimage([2; 32]); + let payment_hash = PaymentHash(*sha256::Hash::hash(&preimage.0).as_byte_array()); + + let invoice = RefundBuilder::deriving_signing_pubkey( + payer_pubkey(), + &expanded_key, + nonce, + &secp_ctx, + 1000, + payment_id, + ) + .unwrap() + .path(blinded_path()) + .experimental_foo(42) + .experimental_bar(43) + .build() + .unwrap() + .respond_with_no_std(payment_paths(), payment_hash, recipient_pubkey(), now()) + .unwrap() + .experimental_baz(44) + .build() + .unwrap() + .sign(recipient_sign) + .unwrap(); + + let proof = invoice + .payer_proof_builder(preimage) + .unwrap() + .build_with_derived_key(&expanded_key, nonce, payment_id, Some("refund")) + .unwrap(); + let parsed = PayerProof::try_from(proof.bytes().to_vec()).unwrap(); + + assert_eq!(parsed.preimage(), preimage); + assert_eq!(parsed.payment_hash(), payment_hash); + assert_eq!(parsed.payer_note().map(|note| note.to_string()), Some("refund".to_string())); + } +} From a1beb26ef6e0797ef0fdbeb7dafde10ca9f2ac65 Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Mon, 23 Mar 2026 12:37:45 +0100 Subject: [PATCH 3/9] test: cover experimental payer proof tlvs Add regression coverage for payer proofs that disclose experimental TLVs above the reserved signature range. The test builds a proof that includes an experimental invoice TLV and asserts that the proof survives a full byte round-trip. This captures the spec-compliance issue found during review against BOLTs PR #1295, where only 240..=1000 is reserved for signature-related TLVs. --- lightning/src/offers/payer_proof.rs | 98 ++++++++++++++++++++++++++--- 1 file changed, 90 insertions(+), 8 deletions(-) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index 39962125b35..5233c79e899 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -767,6 +767,80 @@ mod tests { #[cfg(c_bindings)] use crate::offers::refund::RefundMaybeWithDerivedMetadataBuilder as RefundBuilder; use crate::offers::test_utils::*; + use bitcoin::hashes::Hash; + use bitcoin::secp256k1::{Keypair, Secp256k1, SecretKey}; + + const EXPERIMENTAL_TEST_TLV_TYPE: u64 = 1_000_000_001; + + fn write_tlv_record(bytes: &mut Vec, tlv_type: u64, value: &T) { + let mut value_bytes = Vec::new(); + value.write(&mut value_bytes).expect("Vec write should not fail"); + + BigSize(tlv_type).write(bytes).expect("Vec write should not fail"); + BigSize(value_bytes.len() as u64).write(bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(&value_bytes); + } + + fn write_tlv_record_bytes(bytes: &mut Vec, tlv_type: u64, value_bytes: &[u8]) { + BigSize(tlv_type).write(bytes).expect("Vec write should not fail"); + BigSize(value_bytes.len() as u64).write(bytes).expect("Vec write should not fail"); + bytes.extend_from_slice(value_bytes); + } + + fn build_round_trip_proof_with_included_experimental_tlv() -> PayerProof { + let secp_ctx = Secp256k1::new(); + + let payer_secret = SecretKey::from_slice(&[42; 32]).unwrap(); + let payer_keys = Keypair::from_secret_key(&secp_ctx, &payer_secret); + let payer_id = payer_keys.public_key(); + + let issuer_secret = SecretKey::from_slice(&[43; 32]).unwrap(); + let issuer_keys = Keypair::from_secret_key(&secp_ctx, &issuer_secret); + let issuer_signing_pubkey = issuer_keys.public_key(); + + let preimage = PaymentPreimage([44; 32]); + let payment_hash = PaymentHash(sha256::Hash::hash(&preimage.0).to_byte_array()); + + let mut invoice_bytes = Vec::new(); + write_tlv_record_bytes(&mut invoice_bytes, PAYER_METADATA_TYPE, &[45; 32]); + write_tlv_record(&mut invoice_bytes, INVOICE_REQUEST_PAYER_ID_TYPE, &payer_id); + write_tlv_record(&mut invoice_bytes, INVOICE_PAYMENT_HASH_TYPE, &payment_hash); + write_tlv_record(&mut invoice_bytes, INVOICE_NODE_ID_TYPE, &issuer_signing_pubkey); + write_tlv_record_bytes( + &mut invoice_bytes, + EXPERIMENTAL_TEST_TLV_TYPE, + b"experimental-payer-proof-field", + ); + + let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_signature = + secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); + + let included_types: BTreeSet = [ + INVOICE_REQUEST_PAYER_ID_TYPE, + INVOICE_PAYMENT_HASH_TYPE, + INVOICE_NODE_ID_TYPE, + EXPERIMENTAL_TEST_TLV_TYPE, + ] + .into_iter() + .collect(); + let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); + + let unsigned = UnsignedPayerProof { + invoice_signature, + preimage, + payer_id, + payment_hash, + issuer_signing_pubkey, + invoice_bytes, + included_types, + disclosure, + }; + + unsigned + .sign(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &payer_keys)), None) + .unwrap() + } #[test] fn test_selective_disclosure_computation() { @@ -1074,28 +1148,36 @@ mod tests { if tlv_type == PAYER_METADATA_TYPE { return Err(PayerProofError::InvreqMetadataNotAllowed); } - if tlv_type >= TLV_SIGNATURE { + if SIGNATURE_TYPES.contains(&tlv_type) { return Err(PayerProofError::SignatureTypeNotAllowed); } Ok(()) } - // All types >= 240 must be rejected + // Signature-range types 240..=1000 must be rejected. assert!(matches!(check_include_type(240), Err(PayerProofError::SignatureTypeNotAllowed))); assert!(matches!(check_include_type(250), Err(PayerProofError::SignatureTypeNotAllowed))); assert!(matches!(check_include_type(1000), Err(PayerProofError::SignatureTypeNotAllowed))); - // Types > 1000 (experimental) must also be rejected - assert!(matches!(check_include_type(1001), Err(PayerProofError::SignatureTypeNotAllowed))); - assert!(matches!( - check_include_type(u64::MAX), - Err(PayerProofError::SignatureTypeNotAllowed) - )); + // Types above 1000 are experimental/non-signature TLVs and should remain includable. + assert!(check_include_type(1001).is_ok()); + assert!(check_include_type(u64::MAX).is_ok()); // Just below the boundary assert!(check_include_type(239).is_ok()); // Payer metadata still rejected assert!(matches!(check_include_type(0), Err(PayerProofError::InvreqMetadataNotAllowed))); } + #[test] + fn test_round_trip_accepts_included_experimental_tlv() { + let proof = build_round_trip_proof_with_included_experimental_tlv(); + let result = PayerProof::try_from(proof.bytes().to_vec()); + assert!( + result.is_ok(), + "Included experimental TLVs should survive payer proof parsing: {:?}", + result + ); + } + /// Test that unknown even TLV types >= 240 are rejected during parsing. /// /// Per BOLT convention, even types are mandatory-to-understand. The parser From 6702c8f4ab9844d570464708fc35099eb1a5a119 Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Mon, 23 Mar 2026 12:37:49 +0100 Subject: [PATCH 4/9] fix: allow experimental tlvs in payer proofs Treat only the 240..=1000 range as reserved for payer-proof and signature TLVs. This updates the inclusion gate, preserves TLV ordering when serializing proofs that contain experimental records, and teaches the parser to accept non-signature invoice TLVs above 1000. The change makes the implementation match the spec text reviewed in BOLTs PR #1295 while keeping the experimental round-trip test green. --- lightning/src/offers/payer_proof.rs | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index 5233c79e899..b6613589ca6 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -168,13 +168,13 @@ impl<'a> PayerProofBuilder<'a> { /// Include a specific TLV type in the proof. /// /// Returns an error if the type is not allowed (e.g., invreq_metadata or - /// types >= 240 which are in the signature/payer-proof range and handled - /// separately). + /// types in the signature/payer-proof range (240..=1000), which are handled + /// separately. pub fn include_type(mut self, tlv_type: u64) -> Result { if tlv_type == PAYER_METADATA_TYPE { return Err(PayerProofError::InvreqMetadataNotAllowed); } - if tlv_type >= TLV_SIGNATURE { + if SIGNATURE_TYPES.contains(&tlv_type) { return Err(PayerProofError::SignatureTypeNotAllowed); } self.included_types.insert(tlv_type); @@ -327,11 +327,11 @@ impl UnsignedPayerProof { fn serialize_payer_proof(&self, payer_signature: &Signature, note: Option<&str>) -> Vec { let mut bytes = Vec::new(); - // Filter out SIGNATURE_TYPES defensively: the invoice bytes contain the - // invoice's own signature (type 240) which must not appear as an included - // invoice record — the payer proof writes its own signature TLV below. + // Preserve TLV ordering by emitting included invoice records below the + // payer-proof range first, then payer-proof TLVs (240..=250), then any + // disclosed experimental invoice records above the reserved range. for record in TlvStream::new(&self.invoice_bytes).filter(|r| { - self.included_types.contains(&r.r#type) && !SIGNATURE_TYPES.contains(&r.r#type) + self.included_types.contains(&r.r#type) && r.r#type < TLV_SIGNATURE }) { bytes.extend_from_slice(record.record_bytes); } @@ -383,6 +383,14 @@ impl UnsignedPayerProof { payer_signature.write(&mut bytes).expect("Vec write should not fail"); bytes.extend_from_slice(note_bytes); + for record in TlvStream::new(&self.invoice_bytes).filter(|r| { + self.included_types.contains(&r.r#type) + && !SIGNATURE_TYPES.contains(&r.r#type) + && r.r#type > *SIGNATURE_TYPES.end() + }) { + bytes.extend_from_slice(record.record_bytes); + } + bytes } } @@ -592,7 +600,7 @@ impl TryFrom> for PayerProof { if tlv_type == PAYER_METADATA_TYPE { return Err(Bolt12ParseError::Decode(DecodeError::InvalidValue)); } - if tlv_type < TLV_SIGNATURE { + if !SIGNATURE_TYPES.contains(&tlv_type) { // Included invoice TLV record (passthrough for merkle // reconstruction). included_types.insert(tlv_type); From 446435281915191a525a6089d6b66890b505cc65 Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Mon, 23 Mar 2026 12:37:53 +0100 Subject: [PATCH 5/9] test: cover trailing omitted payer proof markers Add a round-trip test for proofs that omit multiple TLVs after the last disclosed field. The writer already emits this shape when trailing fields are withheld, so the new test documents that the parser must accept it as well. This locks in the review finding from the BOLTs PR discussion before the parser change lands. --- lightning/src/offers/payer_proof.rs | 60 +++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index b6613589ca6..8385af2a2f2 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -850,6 +850,55 @@ mod tests { .unwrap() } + fn build_round_trip_proof_with_multiple_trailing_omitted_tlvs() -> PayerProof { + let secp_ctx = Secp256k1::new(); + + let payer_secret = SecretKey::from_slice(&[52; 32]).unwrap(); + let payer_keys = Keypair::from_secret_key(&secp_ctx, &payer_secret); + let payer_id = payer_keys.public_key(); + + let issuer_secret = SecretKey::from_slice(&[53; 32]).unwrap(); + let issuer_keys = Keypair::from_secret_key(&secp_ctx, &issuer_secret); + let issuer_signing_pubkey = issuer_keys.public_key(); + + let preimage = PaymentPreimage([54; 32]); + let payment_hash = PaymentHash(sha256::Hash::hash(&preimage.0).to_byte_array()); + + let mut invoice_bytes = Vec::new(); + write_tlv_record_bytes(&mut invoice_bytes, PAYER_METADATA_TYPE, &[55; 32]); + write_tlv_record(&mut invoice_bytes, INVOICE_REQUEST_PAYER_ID_TYPE, &payer_id); + write_tlv_record(&mut invoice_bytes, INVOICE_PAYMENT_HASH_TYPE, &payment_hash); + write_tlv_record(&mut invoice_bytes, INVOICE_NODE_ID_TYPE, &issuer_signing_pubkey); + write_tlv_record_bytes(&mut invoice_bytes, 1_000_000_001, b"first-omitted-experimental"); + write_tlv_record_bytes(&mut invoice_bytes, 1_000_000_003, b"second-omitted-experimental"); + + let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_signature = + secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); + + let included_types: BTreeSet = + [INVOICE_REQUEST_PAYER_ID_TYPE, INVOICE_PAYMENT_HASH_TYPE, INVOICE_NODE_ID_TYPE] + .into_iter() + .collect(); + let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); + assert_eq!(disclosure.omitted_markers, vec![177, 178]); + + let unsigned = UnsignedPayerProof { + invoice_signature, + preimage, + payer_id, + payment_hash, + issuer_signing_pubkey, + invoice_bytes, + included_types, + disclosure, + }; + + unsigned + .sign(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &payer_keys)), None) + .unwrap() + } + #[test] fn test_selective_disclosure_computation() { // Test that the merkle selective disclosure works correctly @@ -1186,6 +1235,17 @@ mod tests { ); } + #[test] + fn test_round_trip_accepts_multiple_trailing_omitted_tlvs() { + let proof = build_round_trip_proof_with_multiple_trailing_omitted_tlvs(); + let result = PayerProof::try_from(proof.bytes().to_vec()); + assert!( + result.is_ok(), + "Multiple trailing omitted TLVs should survive payer proof parsing: {:?}", + result + ); + } + /// Test that unknown even TLV types >= 240 are rejected during parsing. /// /// Per BOLT convention, even types are mandatory-to-understand. The parser From d93176233820555bc1eb6e923fc54c01604af174 Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Mon, 23 Mar 2026 12:38:05 +0100 Subject: [PATCH 6/9] test: expose disclosed payer proof fields Add a compile-time and runtime regression test for selectively disclosed fields. The new test constructs a proof that reveals description, issuer, invoice amount, and creation time, then asserts that a parsed proof can expose those values. This captures the API gap found during review: the proof carried the bytes needed for verification, but discarded the disclosed values themselves. --- lightning/src/offers/payer_proof.rs | 78 +++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index 8385af2a2f2..d0f79f324a7 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -775,8 +775,10 @@ mod tests { #[cfg(c_bindings)] use crate::offers::refund::RefundMaybeWithDerivedMetadataBuilder as RefundBuilder; use crate::offers::test_utils::*; + use crate::util::ser::HighZeroBytesDroppedBigSize; use bitcoin::hashes::Hash; use bitcoin::secp256k1::{Keypair, Secp256k1, SecretKey}; + use core::time::Duration; const EXPERIMENTAL_TEST_TLV_TYPE: u64 = 1_000_000_001; @@ -899,6 +901,71 @@ mod tests { .unwrap() } + fn build_round_trip_proof_with_disclosed_fields() -> PayerProof { + let secp_ctx = Secp256k1::new(); + + let payer_secret = SecretKey::from_slice(&[62; 32]).unwrap(); + let payer_keys = Keypair::from_secret_key(&secp_ctx, &payer_secret); + let payer_id = payer_keys.public_key(); + + let issuer_secret = SecretKey::from_slice(&[63; 32]).unwrap(); + let issuer_keys = Keypair::from_secret_key(&secp_ctx, &issuer_secret); + let issuer_signing_pubkey = issuer_keys.public_key(); + + let preimage = PaymentPreimage([64; 32]); + let payment_hash = PaymentHash(sha256::Hash::hash(&preimage.0).to_byte_array()); + + let mut invoice_bytes = Vec::new(); + write_tlv_record_bytes(&mut invoice_bytes, PAYER_METADATA_TYPE, &[65; 32]); + write_tlv_record_bytes(&mut invoice_bytes, OFFER_DESCRIPTION_TYPE, b"coffee beans"); + write_tlv_record_bytes(&mut invoice_bytes, OFFER_ISSUER_TYPE, b"LDK Roastery"); + write_tlv_record(&mut invoice_bytes, INVOICE_REQUEST_PAYER_ID_TYPE, &payer_id); + write_tlv_record( + &mut invoice_bytes, + INVOICE_CREATED_AT_TYPE, + &HighZeroBytesDroppedBigSize(1_700_000_000u64), + ); + write_tlv_record(&mut invoice_bytes, INVOICE_PAYMENT_HASH_TYPE, &payment_hash); + write_tlv_record( + &mut invoice_bytes, + INVOICE_AMOUNT_TYPE, + &HighZeroBytesDroppedBigSize(42_000u64), + ); + write_tlv_record(&mut invoice_bytes, INVOICE_NODE_ID_TYPE, &issuer_signing_pubkey); + + let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_signature = + secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); + + let included_types: BTreeSet = [ + OFFER_DESCRIPTION_TYPE, + OFFER_ISSUER_TYPE, + INVOICE_REQUEST_PAYER_ID_TYPE, + INVOICE_CREATED_AT_TYPE, + INVOICE_PAYMENT_HASH_TYPE, + INVOICE_AMOUNT_TYPE, + INVOICE_NODE_ID_TYPE, + ] + .into_iter() + .collect(); + let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); + + let unsigned = UnsignedPayerProof { + invoice_signature, + preimage, + payer_id, + payment_hash, + issuer_signing_pubkey, + invoice_bytes, + included_types, + disclosure, + }; + + unsigned + .sign(|message| Ok(secp_ctx.sign_schnorr_no_aux_rand(message, &payer_keys)), None) + .unwrap() + } + #[test] fn test_selective_disclosure_computation() { // Test that the merkle selective disclosure works correctly @@ -1246,6 +1313,17 @@ mod tests { ); } + #[test] + fn test_parsed_proof_exposes_disclosed_fields() { + let proof = build_round_trip_proof_with_disclosed_fields(); + let parsed = PayerProof::try_from(proof.bytes().to_vec()).unwrap(); + + assert_eq!(parsed.offer_description().map(|s| s.0), Some("coffee beans")); + assert_eq!(parsed.offer_issuer().map(|s| s.0), Some("LDK Roastery")); + assert_eq!(parsed.invoice_amount_msats(), Some(42_000)); + assert_eq!(parsed.invoice_created_at(), Some(Duration::from_secs(1_700_000_000))); + } + /// Test that unknown even TLV types >= 240 are rejected during parsing. /// /// Per BOLT convention, even types are mandatory-to-understand. The parser From 51c77e1b41653191bf010478fb7b51d41ad4224f Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Mon, 23 Mar 2026 12:38:14 +0100 Subject: [PATCH 7/9] fix: expose disclosed payer proof fields Preserve selected invoice fields inside PayerProof and add accessors for verifiers. The parser and builder now both populate a shared disclosed-field structure so locally produced proofs and parsed proofs expose the same API surface. This keeps selective disclosure useful to callers instead of limiting the proof to merkle reconstruction and signature verification only. --- lightning/src/offers/payer_proof.rs | 120 +++++++++++++++++++++++++--- 1 file changed, 108 insertions(+), 12 deletions(-) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index d0f79f324a7..0f7e62b92c4 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -35,7 +35,7 @@ use crate::offers::offer::{OFFER_DESCRIPTION_TYPE, OFFER_ISSUER_TYPE}; use crate::offers::parse::Bech32Encode; use crate::offers::payer::PAYER_METADATA_TYPE; use crate::types::payment::{PaymentHash, PaymentPreimage}; -use crate::util::ser::{BigSize, Readable, Writeable}; +use crate::util::ser::{BigSize, HighZeroBytesDroppedBigSize, Readable, Writeable}; use lightning_types::string::PrintableString; use bitcoin::hashes::{sha256, Hash, HashEngine}; @@ -43,6 +43,7 @@ use bitcoin::secp256k1::schnorr::Signature; use bitcoin::secp256k1::{Message, PublicKey, Secp256k1}; use core::convert::TryFrom; +use core::time::Duration; #[allow(unused_imports)] use crate::prelude::*; @@ -118,6 +119,15 @@ struct PayerProofContents { invoice_signature: Signature, payer_signature: Signature, payer_note: Option, + disclosed_fields: DisclosedFields, +} + +#[derive(Clone, Debug, Default)] +struct DisclosedFields { + offer_description: Option, + offer_issuer: Option, + invoice_amount_msats: Option, + invoice_created_at: Option, } /// Builds a [`PayerProof`] from a paid invoice and its preimage. @@ -243,6 +253,10 @@ impl<'a> PayerProofBuilder<'a> { for r in TlvStream::new(&invoice_bytes).filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { bytes_without_sig.extend_from_slice(r.record_bytes); } + let disclosed_fields = extract_disclosed_fields( + TlvStream::new(&invoice_bytes) + .filter(|r| self.included_types.contains(&r.r#type) && !SIGNATURE_TYPES.contains(&r.r#type)), + )?; let disclosure = merkle::compute_selective_disclosure(&bytes_without_sig, &self.included_types)?; @@ -257,6 +271,7 @@ impl<'a> PayerProofBuilder<'a> { issuer_signing_pubkey: self.invoice.signing_pubkey(), invoice_bytes, included_types: self.included_types, + disclosed_fields, disclosure, }) } @@ -271,6 +286,7 @@ struct UnsignedPayerProof { issuer_signing_pubkey: PublicKey, invoice_bytes: Vec, included_types: BTreeSet, + disclosed_fields: DisclosedFields, disclosure: SelectiveDisclosure, } @@ -299,6 +315,7 @@ impl UnsignedPayerProof { invoice_signature: self.invoice_signature, payer_signature, payer_note: note.map(String::from), + disclosed_fields: self.disclosed_fields, }, merkle_root: self.disclosure.merkle_root, }) @@ -426,6 +443,26 @@ impl PayerProof { self.contents.payer_signature } + /// The disclosed offer description, if included in the proof. + pub fn offer_description(&self) -> Option> { + self.contents.disclosed_fields.offer_description.as_deref().map(PrintableString) + } + + /// The disclosed offer issuer, if included in the proof. + pub fn offer_issuer(&self) -> Option> { + self.contents.disclosed_fields.offer_issuer.as_deref().map(PrintableString) + } + + /// The disclosed invoice amount, if included in the proof. + pub fn invoice_amount_msats(&self) -> Option { + self.contents.disclosed_fields.invoice_amount_msats + } + + /// The disclosed invoice creation time, if included in the proof. + pub fn invoice_created_at(&self) -> Option { + self.contents.disclosed_fields.invoice_created_at + } + /// The payer's note, if any. pub fn payer_note(&self) -> Option> { self.contents.payer_note.as_deref().map(PrintableString) @@ -473,6 +510,47 @@ fn validate_tlv_framing(bytes: &[u8]) -> Result<(), crate::ln::msgs::DecodeError Ok(()) } +fn update_disclosed_fields( + record: &crate::offers::merkle::TlvRecord<'_>, disclosed_fields: &mut DisclosedFields, +) -> Result<(), crate::ln::msgs::DecodeError> { + use crate::ln::msgs::DecodeError; + + match record.r#type { + OFFER_DESCRIPTION_TYPE => { + disclosed_fields.offer_description = Some( + String::from_utf8(record.value_bytes.to_vec()).map_err(|_| DecodeError::InvalidValue)?, + ); + }, + OFFER_ISSUER_TYPE => { + disclosed_fields.offer_issuer = Some( + String::from_utf8(record.value_bytes.to_vec()).map_err(|_| DecodeError::InvalidValue)?, + ); + }, + INVOICE_CREATED_AT_TYPE => { + disclosed_fields.invoice_created_at = Some(Duration::from_secs( + record.read_value::>()?.0, + )); + }, + INVOICE_AMOUNT_TYPE => { + disclosed_fields.invoice_amount_msats = + Some(record.read_value::>()?.0); + }, + _ => {}, + } + + Ok(()) +} + +fn extract_disclosed_fields<'a>( + records: impl core::iter::Iterator>, +) -> Result { + let mut disclosed_fields = DisclosedFields::default(); + for record in records { + update_disclosed_fields(&record, &mut disclosed_fields)?; + } + Ok(disclosed_fields) +} + // Payer proofs use manual TLV parsing rather than `ParsedMessage` / `tlv_stream!` // because of their hybrid structure: a dynamic, variable set of included invoice // TLV records (types 0-239, preserved as raw bytes for merkle reconstruction) plus @@ -502,6 +580,7 @@ impl TryFrom> for PayerProof { let mut preimage: Option = None; let mut payer_signature: Option = None; let mut payer_note: Option = None; + let mut disclosed_fields = DisclosedFields::default(); let mut leaf_hashes: Vec = Vec::new(); let mut omitted_markers: Vec = Vec::new(); @@ -522,6 +601,7 @@ impl TryFrom> for PayerProof { } } prev_tlv_type = Some(tlv_type); + update_disclosed_fields(&record, &mut disclosed_fields)?; match tlv_type { INVOICE_REQUEST_PAYER_ID_TYPE => { @@ -677,17 +757,18 @@ impl TryFrom> for PayerProof { Ok(PayerProof { bytes, - contents: PayerProofContents { - payer_id, - payment_hash, - issuer_signing_pubkey, - preimage, - invoice_signature, - payer_signature, - payer_note, - }, - merkle_root, - }) + contents: PayerProofContents { + payer_id, + payment_hash, + issuer_signing_pubkey, + preimage, + invoice_signature, + payer_signature, + payer_note, + disclosed_fields, + }, + merkle_root, + }) } } @@ -834,6 +915,10 @@ mod tests { ] .into_iter() .collect(); + let disclosed_fields = extract_disclosed_fields( + TlvStream::new(&invoice_bytes).filter(|r| included_types.contains(&r.r#type)), + ) + .unwrap(); let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); let unsigned = UnsignedPayerProof { @@ -844,6 +929,7 @@ mod tests { issuer_signing_pubkey, invoice_bytes, included_types, + disclosed_fields, disclosure, }; @@ -882,6 +968,10 @@ mod tests { [INVOICE_REQUEST_PAYER_ID_TYPE, INVOICE_PAYMENT_HASH_TYPE, INVOICE_NODE_ID_TYPE] .into_iter() .collect(); + let disclosed_fields = extract_disclosed_fields( + TlvStream::new(&invoice_bytes).filter(|r| included_types.contains(&r.r#type)), + ) + .unwrap(); let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); assert_eq!(disclosure.omitted_markers, vec![177, 178]); @@ -893,6 +983,7 @@ mod tests { issuer_signing_pubkey, invoice_bytes, included_types, + disclosed_fields, disclosure, }; @@ -948,6 +1039,10 @@ mod tests { ] .into_iter() .collect(); + let disclosed_fields = extract_disclosed_fields( + TlvStream::new(&invoice_bytes).filter(|r| included_types.contains(&r.r#type)), + ) + .unwrap(); let disclosure = compute_selective_disclosure(&invoice_bytes, &included_types).unwrap(); let unsigned = UnsignedPayerProof { @@ -958,6 +1053,7 @@ mod tests { issuer_signing_pubkey, invoice_bytes, included_types, + disclosed_fields, disclosure, }; From 849e01ec0ab8cc339988b5a95575c0be2730333c Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Wed, 25 Mar 2026 16:47:18 +0100 Subject: [PATCH 8/9] fix: resolve payer proof CI failures Fix the formatting drift introduced by the payer proof follow-up cherry-picks and update the stale splicing test to use the current RecipientOnionFields::secret_only signature so the CI build matrix compiles again. --- lightning/src/offers/payer_proof.rs | 53 ++++++++++++++++------------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/lightning/src/offers/payer_proof.rs b/lightning/src/offers/payer_proof.rs index 0f7e62b92c4..bee1c8217bd 100644 --- a/lightning/src/offers/payer_proof.rs +++ b/lightning/src/offers/payer_proof.rs @@ -253,10 +253,10 @@ impl<'a> PayerProofBuilder<'a> { for r in TlvStream::new(&invoice_bytes).filter(|r| !SIGNATURE_TYPES.contains(&r.r#type)) { bytes_without_sig.extend_from_slice(r.record_bytes); } - let disclosed_fields = extract_disclosed_fields( - TlvStream::new(&invoice_bytes) - .filter(|r| self.included_types.contains(&r.r#type) && !SIGNATURE_TYPES.contains(&r.r#type)), - )?; + let disclosed_fields = + extract_disclosed_fields(TlvStream::new(&invoice_bytes).filter(|r| { + self.included_types.contains(&r.r#type) && !SIGNATURE_TYPES.contains(&r.r#type) + }))?; let disclosure = merkle::compute_selective_disclosure(&bytes_without_sig, &self.included_types)?; @@ -347,9 +347,9 @@ impl UnsignedPayerProof { // Preserve TLV ordering by emitting included invoice records below the // payer-proof range first, then payer-proof TLVs (240..=250), then any // disclosed experimental invoice records above the reserved range. - for record in TlvStream::new(&self.invoice_bytes).filter(|r| { - self.included_types.contains(&r.r#type) && r.r#type < TLV_SIGNATURE - }) { + for record in TlvStream::new(&self.invoice_bytes) + .filter(|r| self.included_types.contains(&r.r#type) && r.r#type < TLV_SIGNATURE) + { bytes.extend_from_slice(record.record_bytes); } @@ -518,12 +518,14 @@ fn update_disclosed_fields( match record.r#type { OFFER_DESCRIPTION_TYPE => { disclosed_fields.offer_description = Some( - String::from_utf8(record.value_bytes.to_vec()).map_err(|_| DecodeError::InvalidValue)?, + String::from_utf8(record.value_bytes.to_vec()) + .map_err(|_| DecodeError::InvalidValue)?, ); }, OFFER_ISSUER_TYPE => { disclosed_fields.offer_issuer = Some( - String::from_utf8(record.value_bytes.to_vec()).map_err(|_| DecodeError::InvalidValue)?, + String::from_utf8(record.value_bytes.to_vec()) + .map_err(|_| DecodeError::InvalidValue)?, ); }, INVOICE_CREATED_AT_TYPE => { @@ -757,18 +759,18 @@ impl TryFrom> for PayerProof { Ok(PayerProof { bytes, - contents: PayerProofContents { - payer_id, - payment_hash, - issuer_signing_pubkey, - preimage, - invoice_signature, - payer_signature, - payer_note, - disclosed_fields, - }, - merkle_root, - }) + contents: PayerProofContents { + payer_id, + payment_hash, + issuer_signing_pubkey, + preimage, + invoice_signature, + payer_signature, + payer_note, + disclosed_fields, + }, + merkle_root, + }) } } @@ -903,7 +905,8 @@ mod tests { b"experimental-payer-proof-field", ); - let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_message = + TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); let invoice_signature = secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); @@ -960,7 +963,8 @@ mod tests { write_tlv_record_bytes(&mut invoice_bytes, 1_000_000_001, b"first-omitted-experimental"); write_tlv_record_bytes(&mut invoice_bytes, 1_000_000_003, b"second-omitted-experimental"); - let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_message = + TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); let invoice_signature = secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); @@ -1024,7 +1028,8 @@ mod tests { ); write_tlv_record(&mut invoice_bytes, INVOICE_NODE_ID_TYPE, &issuer_signing_pubkey); - let invoice_message = TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); + let invoice_message = + TaggedHash::from_valid_tlv_stream_bytes(SIGNATURE_TAG, &invoice_bytes); let invoice_signature = secp_ctx.sign_schnorr_no_aux_rand(invoice_message.as_digest(), &issuer_keys); From d5baee83aee0e2674020700f77918f46b3925ab6 Mon Sep 17 00:00:00 2001 From: Vincenzo Palazzo Date: Wed, 25 Mar 2026 17:18:23 +0100 Subject: [PATCH 9/9] fix: reconstruct omitted payer proof subtrees Allow payer proof merkle reconstruction to reuse hashes placed at omitted subtree roots instead of descending into already materialized descendants. --- lightning/src/offers/merkle.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lightning/src/offers/merkle.rs b/lightning/src/offers/merkle.rs index 76170feb042..85bbc6e53d7 100644 --- a/lightning/src/offers/merkle.rs +++ b/lightning/src/offers/merkle.rs @@ -645,9 +645,7 @@ pub(super) fn reconstruct_merkle_root<'a>( Some(tagged_branch_hash_from_engine(branch_tag.clone(), l, r)); }, (Some(_), None) => {}, - (None, _) => { - return Err(SelectiveDisclosureError::InsufficientMissingHashes); - }, + (None, _) => {}, }; } }