@@ -50,7 +50,7 @@ use crate::events::{self, Event, EventHandler, EventsProvider, InboundChannelFun
50
50
use crate::ln::inbound_payment;
51
51
use crate::ln::types::ChannelId;
52
52
use crate::types::payment::{PaymentHash, PaymentPreimage, PaymentSecret};
53
- use crate::ln::channel::{self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, ShutdownResult, UpdateFulfillCommitFetch, OutboundV1Channel, ReconnectionMsg, InboundV1Channel , WithChannelContext};
53
+ use crate::ln::channel::{self, duration_since_epoch, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel, OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch , WithChannelContext};
54
54
use crate::ln::channel::PendingV2Channel;
55
55
use crate::ln::channel_state::ChannelDetails;
56
56
use crate::types::features::{Bolt12InvoiceFeatures, ChannelFeatures, ChannelTypeFeatures, InitFeatures, NodeFeatures};
@@ -59,7 +59,7 @@ use crate::types::features::Bolt11InvoiceFeatures;
59
59
use crate::routing::router::{BlindedTail, InFlightHtlcs, Path, Payee, PaymentParameters, RouteParameters, RouteParametersConfig, Router, FixedRouter, Route};
60
60
use crate::ln::onion_payment::{check_incoming_htlc_cltv, create_recv_pending_htlc_info, create_fwd_pending_htlc_info, decode_incoming_update_add_htlc_onion, HopConnector, InboundHTLCErr, NextPacketDetails, invalid_payment_err_data};
61
61
use crate::ln::msgs;
62
- use crate::ln::onion_utils::{self};
62
+ use crate::ln::onion_utils::{self, process_onion_success, HOLD_TIME_LEN };
63
63
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
64
64
use crate::ln::msgs::{BaseMessageHandler, ChannelMessageHandler, CommitmentUpdate, DecodeError, LightningError, MessageSendEvent};
65
65
#[cfg(test)]
@@ -88,6 +88,8 @@ use crate::util::ser::{BigSize, FixedLengthReader, LengthReadable, Readable, Rea
88
88
use crate::util::logger::{Level, Logger, WithContext};
89
89
use crate::util::errors::APIError;
90
90
91
+ use crate::ln::onion_utils::AttributionData;
92
+
91
93
#[cfg(async_payments)] use {
92
94
crate::offers::offer::Amount,
93
95
crate::offers::static_invoice::{DEFAULT_RELATIVE_EXPIRY as STATIC_INVOICE_DEFAULT_RELATIVE_EXPIRY, StaticInvoice, StaticInvoiceBuilder},
@@ -7239,8 +7241,14 @@ where
7239
7241
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
7240
7242
}
7241
7243
});
7244
+
7245
+ let mut attribution_data = AttributionData::new();
7246
+ attribution_data.update(&[], &htlc.prev_hop.incoming_packet_shared_secret, 0);
7247
+ attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7248
+
7242
7249
self.claim_funds_from_hop(
7243
7250
htlc.prev_hop, payment_preimage, payment_info.clone(),
7251
+ attribution_data,
7244
7252
|_, definitely_duplicate| {
7245
7253
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
7246
7254
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7269,7 +7277,7 @@ where
7269
7277
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
7270
7278
>(
7271
7279
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7272
- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7280
+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
7273
7281
) {
7274
7282
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
7275
7283
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7282,15 +7290,17 @@ where
7282
7290
channel_id: prev_hop.channel_id,
7283
7291
htlc_id: prev_hop.htlc_id,
7284
7292
};
7285
- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7293
+ self.claim_mpp_part(htlc_source, payment_preimage, payment_info, attribution_data, completion_action)
7286
7294
}
7287
7295
7288
7296
fn claim_mpp_part<
7289
7297
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
7290
7298
>(
7291
7299
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7292
- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7300
+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
7293
7301
) {
7302
+ log_info!(self.logger, "claim_mpp_part called");
7303
+
7294
7304
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
7295
7305
7296
7306
// If we haven't yet run background events assume we're still deserializing and shouldn't
@@ -7322,7 +7332,7 @@ where
7322
7332
if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
7323
7333
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
7324
7334
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7325
- let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7335
+ let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
7326
7336
7327
7337
match fulfill_res {
7328
7338
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7474,9 +7484,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
7474
7484
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
7475
7485
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
7476
7486
next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7487
+ attribution_data: Option<&AttributionData>, send_timestamp: Option<Duration>,
7477
7488
) {
7489
+ log_info!(self.logger, "claim_funds_internal - ONLY NON FINAL");
7478
7490
match source {
7479
7491
HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7492
+ if let Some(attribution_data) = attribution_data {
7493
+ process_onion_success(&self.secp_ctx, &self.logger, &path,
7494
+ &session_priv, attribution_data.clone());
7495
+ }
7496
+
7480
7497
debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
7481
7498
"We don't support claim_htlc claims during startup - monitors may not be available yet");
7482
7499
debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7493,7 +7510,27 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
7493
7510
let prev_user_channel_id = hop_data.user_channel_id;
7494
7511
let prev_node_id = hop_data.counterparty_node_id;
7495
7512
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7496
- self.claim_funds_from_hop(hop_data, payment_preimage, None,
7513
+
7514
+ let mut attribution_data = attribution_data
7515
+ .map_or(AttributionData::new(), |attribution_data| {
7516
+ let mut attribution_data = attribution_data.clone();
7517
+
7518
+ attribution_data.shift_right();
7519
+
7520
+ attribution_data
7521
+ });
7522
+
7523
+ let now = duration_since_epoch();
7524
+ let hold_time = if let (Some(timestamp), Some(now)) = (send_timestamp, now) {
7525
+ u32::try_from(now.saturating_sub(timestamp).as_millis()).unwrap_or(u32::MAX)
7526
+ } else {
7527
+ 0
7528
+ };
7529
+
7530
+ attribution_data.update(&[], &hop_data.incoming_packet_shared_secret, hold_time);
7531
+ attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7532
+
7533
+ self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
7497
7534
|htlc_claim_value_msat, definitely_duplicate| {
7498
7535
let chan_to_release = Some(EventUnblockedChannel {
7499
7536
counterparty_node_id: next_channel_counterparty_node_id,
@@ -8896,7 +8933,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
8896
8933
fn internal_update_fulfill_htlc(&self, counterparty_node_id: &PublicKey, msg: &msgs::UpdateFulfillHTLC) -> Result<(), MsgHandleErrInternal> {
8897
8934
let funding_txo;
8898
8935
let next_user_channel_id;
8899
- let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
8936
+ let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp ) = {
8900
8937
let per_peer_state = self.per_peer_state.read().unwrap();
8901
8938
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
8902
8939
.ok_or_else(|| {
@@ -8937,7 +8974,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
8937
8974
};
8938
8975
self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
8939
8976
Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
8940
- funding_txo, msg.channel_id, Some(next_user_channel_id),
8977
+ funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
8978
+ send_timestamp,
8941
8979
);
8942
8980
8943
8981
Ok(())
@@ -9638,6 +9676,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
9638
9676
htlc_update.source, preimage,
9639
9677
htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
9640
9678
false, counterparty_node_id, funding_outpoint, channel_id, None,
9679
+ None, None,
9641
9680
);
9642
9681
} else {
9643
9682
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -12154,6 +12193,7 @@ where
12154
12193
}
12155
12194
12156
12195
fn handle_update_fulfill_htlc(&self, counterparty_node_id: PublicKey, msg: &msgs::UpdateFulfillHTLC) {
12196
+ log_info!(self.logger, "Received update_fulfill_htlc: {:?}", msg);
12157
12197
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
12158
12198
let _ = handle_error!(self, self.internal_update_fulfill_htlc(&counterparty_node_id, msg), counterparty_node_id);
12159
12199
}
@@ -14905,7 +14945,7 @@ where
14905
14945
// already (clearly) durably on disk in the `ChannelMonitor` so there's
14906
14946
// no need to worry about getting it into others.
14907
14947
channel_manager.claim_mpp_part(
14908
- part.into(), payment_preimage, None,
14948
+ part.into(), payment_preimage, None, AttributionData::new(),
14909
14949
|_, _|
14910
14950
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
14911
14951
);
@@ -15011,7 +15051,7 @@ where
15011
15051
// channel is closed we just assume that it probably came from an on-chain claim.
15012
15052
channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
15013
15053
downstream_closed, true, downstream_node_id, downstream_funding,
15014
- downstream_channel_id, None
15054
+ downstream_channel_id, None, None, None,
15015
15055
);
15016
15056
}
15017
15057
0 commit comments