Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 7 additions & 6 deletions fuzz/src/process_onion_failure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,12 @@ use lightning::{
ln::{
channelmanager::{HTLCSource, PaymentId},
msgs::OnionErrorPacket,
onion_utils,
},
routing::router::{BlindedTail, Path, RouteHop, TrampolineHop},
types::features::{ChannelFeatures, NodeFeatures},
util::logger::Logger,
util::ser::Readable,
};

// Imports that need to be added manually
Expand Down Expand Up @@ -126,19 +128,18 @@ fn do_test<Out: test_logger::Output>(data: &[u8], out: Out) {
let failure_data = get_slice!(failure_len);

let attribution_data = if get_bool!() {
Some(lightning::ln::AttributionData {
hold_times: get_slice!(80).try_into().unwrap(),
hmacs: get_slice!(840).try_into().unwrap(),
})
let mut bytes = get_slice!(80 + 840);
let data: onion_utils::AttributionData = Readable::read(&mut bytes).unwrap();
Some(data)
} else {
None
};
let encrypted_packet =
OnionErrorPacket { data: failure_data.into(), attribution_data: attribution_data.clone() };
lightning::ln::process_onion_failure(&secp_ctx, &logger, &htlc_source, encrypted_packet);
onion_utils::process_onion_failure(&secp_ctx, &logger, &htlc_source, encrypted_packet);

if let Some(attribution_data) = attribution_data {
lightning::ln::decode_fulfill_attribution_data(
onion_utils::decode_fulfill_attribution_data(
&secp_ctx,
&logger,
&path,
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/events/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ use crate::blinded_path::payment::{
use crate::chain::transaction;
use crate::ln::channel::FUNDING_CONF_DEADLINE_BLOCKS;
use crate::ln::channelmanager::{InterceptId, PaymentId, RecipientOnionFields};
use crate::ln::types::ChannelId;
use crate::ln::onion_utils::LocalHTLCFailureReason;
use crate::ln::types::ChannelId;
use crate::ln::msgs;
use crate::offers::invoice::Bolt12Invoice;
use crate::offers::invoice_request::InvoiceRequest;
Expand Down
8 changes: 0 additions & 8 deletions lightning/src/ln/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,14 +52,6 @@ pub(crate) mod interactivetxs;
// without the node parameter being mut. This is incorrect, and thus newer rustcs will complain
// about an unnecessary mut. Thus, we silence the unused_mut warning in two test modules below.

#[cfg(fuzzing)]
pub use onion_utils::decode_fulfill_attribution_data;
#[cfg(fuzzing)]
pub use onion_utils::process_onion_failure;

#[cfg(fuzzing)]
pub use onion_utils::AttributionData;

#[cfg(test)]
#[allow(unused_mut)]
mod async_payments_tests;
Expand Down
9 changes: 3 additions & 6 deletions lightning/src/ln/msgs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4354,7 +4354,7 @@ mod tests {
InboundOnionForwardPayload, InboundOnionReceivePayload, OutboundTrampolinePayload,
TrampolineOnionPacket,
};
use crate::ln::onion_utils::{AttributionData, HMAC_COUNT, HMAC_LEN, HOLD_TIME_LEN, MAX_HOPS};
use crate::ln::onion_utils::AttributionData;
use crate::ln::types::ChannelId;
use crate::routing::gossip::{NodeAlias, NodeId};
use crate::types::features::{
Expand Down Expand Up @@ -5887,13 +5887,10 @@ mod tests {
channel_id: ChannelId::from_bytes([2; 32]),
htlc_id: 2316138423780173,
reason: [1; 32].to_vec(),
attribution_data: Some(AttributionData {
hold_times: [3; MAX_HOPS * HOLD_TIME_LEN],
hmacs: [3; HMAC_LEN * HMAC_COUNT],
}),
attribution_data: Some(AttributionData::new()),
};
let encoded_value = update_fail_htlc.encode();
let target_value = <Vec<u8>>::from_hex("020202020202020202020202020202020202020202020202020202020202020200083a840000034d0020010101010101010101010101010101010101010101010101010101010101010101fd03980303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303030303").unwrap();
let target_value = <Vec<u8>>::from_hex("020202020202020202020202020202020202020202020202020202020202020200083a840000034d0020010101010101010101010101010101010101010101010101010101010101010101fd03980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap();
assert_eq!(encoded_value, target_value);
}

Expand Down
194 changes: 101 additions & 93 deletions lightning/src/ln/onion_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
// You may not use this file except in accordance with one or both of these
// licenses.

//! Utilities for handling and manipulating onions
//! Low-level onion manipulation logic and fields

use super::msgs::OnionErrorPacket;
use crate::blinded_path::BlindedHop;
Expand Down Expand Up @@ -981,49 +981,100 @@ mod fuzzy_onion_utils {
#[cfg(test)]
pub(crate) attribution_failed_channel: Option<u64>,
}

pub fn process_onion_failure<T: secp256k1::Signing, L: Deref>(
secp_ctx: &Secp256k1<T>, logger: &L, htlc_source: &HTLCSource,
encrypted_packet: OnionErrorPacket,
) -> DecodedOnionFailure
where
L::Target: Logger,
{
let (path, primary_session_priv) = match htlc_source {
HTLCSource::OutboundRoute { ref path, ref session_priv, .. } => (path, session_priv),
_ => unreachable!(),
};

if path.has_trampoline_hops() {
// If we have Trampoline hops, the outer onion session_priv is a hash of the inner one.
let session_priv_hash =
Sha256::hash(&primary_session_priv.secret_bytes()).to_byte_array();
let outer_session_priv =
SecretKey::from_slice(&session_priv_hash[..]).expect("You broke SHA-256!");
process_onion_failure_inner(
secp_ctx,
logger,
path,
&outer_session_priv,
Some(primary_session_priv),
encrypted_packet,
)
} else {
process_onion_failure_inner(
secp_ctx,
logger,
path,
primary_session_priv,
None,
encrypted_packet,
)
}
}

/// Decodes the attribution data that we got back from upstream on a payment we sent.
pub fn decode_fulfill_attribution_data<T: secp256k1::Signing, L: Deref>(
secp_ctx: &Secp256k1<T>, logger: &L, path: &Path, outer_session_priv: &SecretKey,
mut attribution_data: AttributionData,
) -> Vec<u32>
where
L::Target: Logger,
{
let mut hold_times = Vec::new();

// Only consider hops in the regular path for attribution data. Blinded path attribution data isn't accessible.
let shared_secrets =
construct_onion_keys_generic(secp_ctx, &path.hops, None, outer_session_priv)
.map(|(shared_secret, _, _, _, _)| shared_secret);

// Path length can reach 27 hops, but attribution data can only be conveyed back to the sender from the first 20
// hops. Determine the number of hops to be used for attribution data.
let attributable_hop_count = usize::min(path.hops.len(), MAX_HOPS);

for (route_hop_idx, shared_secret) in shared_secrets.enumerate().take(attributable_hop_count) {
attribution_data.crypt(shared_secret.as_ref());

// Calculate position relative to the last attributable hop. The last attributable hop is at position 0. We need
// to look at the chain of HMACs that does include all data up to the last attributable hop. Hold times beyond
// the last attributable hop will not be available.
let position = attributable_hop_count - route_hop_idx - 1;
let res = attribution_data.verify(&Vec::new(), shared_secret.as_ref(), position);
match res {
Ok(hold_time) => {
hold_times.push(hold_time);

// Shift attribution data to prepare for processing the next hop.
attribution_data.shift_left();
},
Err(()) => {
// We will hit this if there is a node on the path that does not support fulfill attribution data.
log_debug!(
logger,
"Invalid fulfill HMAC in attribution data for node at pos {}",
route_hop_idx
);

break;
},
}
}

hold_times
}
}
#[cfg(fuzzing)]
pub use self::fuzzy_onion_utils::*;
#[cfg(not(fuzzing))]
pub(crate) use self::fuzzy_onion_utils::*;

pub(crate) fn process_onion_failure<T: secp256k1::Signing, L: Deref>(
secp_ctx: &Secp256k1<T>, logger: &L, htlc_source: &HTLCSource,
encrypted_packet: OnionErrorPacket,
) -> DecodedOnionFailure
where
L::Target: Logger,
{
let (path, primary_session_priv) = match htlc_source {
HTLCSource::OutboundRoute { ref path, ref session_priv, .. } => (path, session_priv),
_ => unreachable!(),
};

if path.has_trampoline_hops() {
// If we have Trampoline hops, the outer onion session_priv is a hash of the inner one.
let session_priv_hash = Sha256::hash(&primary_session_priv.secret_bytes()).to_byte_array();
let outer_session_priv =
SecretKey::from_slice(&session_priv_hash[..]).expect("You broke SHA-256!");
process_onion_failure_inner(
secp_ctx,
logger,
path,
&outer_session_priv,
Some(primary_session_priv),
encrypted_packet,
)
} else {
process_onion_failure_inner(
secp_ctx,
logger,
path,
primary_session_priv,
None,
encrypted_packet,
)
}
}

/// Process failure we got back from upstream on a payment we sent (implying htlc_source is an
/// OutboundRoute).
fn process_onion_failure_inner<T: secp256k1::Signing, L: Deref>(
Expand Down Expand Up @@ -1468,56 +1519,6 @@ where
}
}

/// Decodes the attribution data that we got back from upstream on a payment we sent.
pub(crate) fn decode_fulfill_attribution_data<T: secp256k1::Signing, L: Deref>(
secp_ctx: &Secp256k1<T>, logger: &L, path: &Path, outer_session_priv: &SecretKey,
mut attribution_data: AttributionData,
) -> Vec<u32>
where
L::Target: Logger,
{
let mut hold_times = Vec::new();

// Only consider hops in the regular path for attribution data. Blinded path attribution data isn't accessible.
let shared_secrets =
construct_onion_keys_generic(secp_ctx, &path.hops, None, outer_session_priv)
.map(|(shared_secret, _, _, _, _)| shared_secret);

// Path length can reach 27 hops, but attribution data can only be conveyed back to the sender from the first 20
// hops. Determine the number of hops to be used for attribution data.
let attributable_hop_count = usize::min(path.hops.len(), MAX_HOPS);

for (route_hop_idx, shared_secret) in shared_secrets.enumerate().take(attributable_hop_count) {
attribution_data.crypt(shared_secret.as_ref());

// Calculate position relative to the last attributable hop. The last attributable hop is at position 0. We need
// to look at the chain of HMACs that does include all data up to the last attributable hop. Hold times beyond
// the last attributable hop will not be available.
let position = attributable_hop_count - route_hop_idx - 1;
let res = attribution_data.verify(&Vec::new(), shared_secret.as_ref(), position);
match res {
Ok(hold_time) => {
hold_times.push(hold_time);

// Shift attribution data to prepare for processing the next hop.
attribution_data.shift_left();
},
Err(()) => {
// We will hit this if there is a node on the path that does not support fulfill attribution data.
log_debug!(
logger,
"Invalid fulfill HMAC in attribution data for node at pos {}",
route_hop_idx
);

break;
},
}
}

hold_times
}

const BADONION: u16 = 0x8000;
const PERM: u16 = 0x4000;
const NODE: u16 = 0x2000;
Expand Down Expand Up @@ -2522,6 +2523,7 @@ where
}

/// Build a payment onion, returning the first hop msat and cltv values as well.
///
/// `cur_block_height` should be set to the best known block height + 1.
pub fn create_payment_onion<T: secp256k1::Signing>(
secp_ctx: &Secp256k1<T>, path: &Path, session_priv: &SecretKey, total_msat: u64,
Expand Down Expand Up @@ -2719,13 +2721,19 @@ pub(crate) const HMAC_LEN: usize = 4;
pub(crate) const HMAC_COUNT: usize = MAX_HOPS * (MAX_HOPS + 1) / 2;

#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub(crate) struct AttributionData {
pub hold_times: [u8; MAX_HOPS * HOLD_TIME_LEN],
pub hmacs: [u8; HMAC_LEN * HMAC_COUNT],
/// Attribution data allows the sender of an HTLC to identify which hop failed an HTLC robustly,
/// preventing earlier hops from corrupting the HTLC failure information (or at least allowing the
/// sender to identify the earliest hop which corrupted HTLC failure information).
///
/// Additionally, it allows a sender to identify how long each hop along a path held an HTLC, with
/// 100ms granularity.
pub struct AttributionData {
hold_times: [u8; MAX_HOPS * HOLD_TIME_LEN],
hmacs: [u8; HMAC_LEN * HMAC_COUNT],
}

impl AttributionData {
pub fn new() -> Self {
pub(crate) fn new() -> Self {
Self { hold_times: [0; MAX_HOPS * HOLD_TIME_LEN], hmacs: [0; HMAC_LEN * HMAC_COUNT] }
}
}
Expand Down Expand Up @@ -2774,7 +2782,7 @@ impl AttributionData {

/// Writes the HMACs corresponding to the given position that have been added already by downstream hops. Position is
/// relative to the final node. The final node is at position 0.
pub fn write_downstream_hmacs(&self, position: usize, w: &mut HmacEngine<Sha256>) {
pub(crate) fn write_downstream_hmacs(&self, position: usize, w: &mut HmacEngine<Sha256>) {
// Set the index to the first downstream HMAC that we need to include. Note that we skip the first MAX_HOPS HMACs
// because this is space reserved for the HMACs that we are producing for the current node.
let mut hmac_idx = MAX_HOPS + MAX_HOPS - position - 1;
Expand Down
Loading