From 66b1a883f48d70faecd946cc522d803c21b811d7 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 31 Jul 2025 08:54:54 +0200 Subject: [PATCH 01/27] Stop migrating spendable outputs from pre-v0.3 format As part of the version v0.3 release we switched to the upstreamed `OutputSweeper` which slightly changed our serialization format, having us run a migration step on startup for backwards compatibility ever since. Here we drop the migration code running on startup, for simplicity's sake, but also because it's going to be async going forward and we currently don't have a runtime available on startup (which might change soon, but still). As the v0.3 release now well over a year ago, it's very unlikely that there are any v0.2 (or even v0.3) users left. If there are any affected users left, they'll first have to upgrade to any version pre-v0.7, startup, and then upgrade to v0.7 or later. --- src/balance.rs | 13 +++++-- src/builder.rs | 16 +------- src/io/mod.rs | 5 --- src/io/utils.rs | 100 +----------------------------------------------- src/lib.rs | 1 - src/sweep.rs | 47 ----------------------- 6 files changed, 12 insertions(+), 170 deletions(-) delete mode 100644 src/sweep.rs diff --git a/src/balance.rs b/src/balance.rs index b5e2f5eb7..d0ebc310b 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -5,17 +5,16 @@ // http://opensource.org/licenses/MIT>, at your option. You may not use this file except in // accordance with one or both of these licenses. -use crate::sweep::value_from_descriptor; - use lightning::chain::channelmonitor::Balance as LdkBalance; use lightning::chain::channelmonitor::BalanceSource; use lightning::ln::types::ChannelId; +use lightning::sign::SpendableOutputDescriptor; use lightning::util::sweep::{OutputSpendStatus, TrackedSpendableOutput}; use lightning_types::payment::{PaymentHash, PaymentPreimage}; use bitcoin::secp256k1::PublicKey; -use bitcoin::{BlockHash, Txid}; +use bitcoin::{Amount, BlockHash, Txid}; /// Details of the known available balances returned by [`Node::list_balances`]. /// @@ -385,3 +384,11 @@ impl PendingSweepBalance { } } } + +fn value_from_descriptor(descriptor: &SpendableOutputDescriptor) -> Amount { + match &descriptor { + SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, + SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, + SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, + } +} diff --git a/src/builder.rs b/src/builder.rs index e160d1f6e..289c2954c 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -25,7 +25,7 @@ use crate::io::{ use crate::liquidity::{ LSPS1ClientConfig, LSPS2ClientConfig, LSPS2ServiceConfig, LiquiditySourceBuilder, }; -use crate::logger::{log_error, log_info, LdkLogger, LogLevel, LogWriter, Logger}; +use crate::logger::{log_error, LdkLogger, LogLevel, LogWriter, Logger}; use crate::message_handler::NodeCustomMessageHandler; use crate::peer_store::PeerStore; use crate::runtime::Runtime; @@ -1627,20 +1627,6 @@ fn build_with_store_internal( }, }; - match io::utils::migrate_deprecated_spendable_outputs( - Arc::clone(&output_sweeper), - Arc::clone(&kv_store), - Arc::clone(&logger), - ) { - Ok(()) => { - log_info!(logger, "Successfully migrated OutputSweeper data."); - }, - Err(e) => { - log_error!(logger, "Failed to migrate OutputSweeper data: {}", e); - return Err(BuildError::ReadFailed); - }, - } - let event_queue = match io::utils::read_event_queue(Arc::clone(&kv_store), Arc::clone(&logger)) { Ok(event_queue) => Arc::new(event_queue), diff --git a/src/io/mod.rs b/src/io/mod.rs index 3192dbb86..7a52a5c98 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -27,11 +27,6 @@ pub(crate) const PEER_INFO_PERSISTENCE_KEY: &str = "peers"; pub(crate) const PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = "payments"; pub(crate) const PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; -/// The spendable output information used to persisted under this prefix until LDK Node v0.3.0. -pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE: &str = - "spendable_outputs"; -pub(crate) const DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE: &str = ""; - /// The node metrics will be persisted under this key. pub(crate) const NODE_METRICS_PRIMARY_NAMESPACE: &str = ""; pub(crate) const NODE_METRICS_SECONDARY_NAMESPACE: &str = ""; diff --git a/src/io/utils.rs b/src/io/utils.rs index b5537ed7d..06a1017ba 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -15,7 +15,6 @@ use crate::io::{ }; use crate::logger::{log_error, LdkLogger, Logger}; use crate::peer_store::PeerStore; -use crate::sweep::DeprecatedSpendableOutputInfo; use crate::types::{Broadcaster, DynStore, KeysManager, Sweeper}; use crate::wallet::ser::{ChangeSetDeserWrapper, ChangeSetSerWrapper}; use crate::{Error, EventQueue, NodeMetrics, PaymentDetails}; @@ -33,7 +32,7 @@ use lightning::util::persist::{ }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::string::PrintableString; -use lightning::util::sweep::{OutputSpendStatus, OutputSweeper}; +use lightning::util::sweep::OutputSweeper; use bdk_chain::indexer::keychain_txout::ChangeSet as BdkIndexerChangeSet; use bdk_chain::local_chain::ChangeSet as BdkLocalChainChangeSet; @@ -258,103 +257,6 @@ pub(crate) fn read_output_sweeper( }) } -/// Read previously persisted spendable output information from the store and migrate to the -/// upstreamed `OutputSweeper`. -/// -/// We first iterate all `DeprecatedSpendableOutputInfo`s and have them tracked by the new -/// `OutputSweeper`. In order to be certain the initial output spends will happen in a single -/// transaction (and safe on-chain fees), we batch them to happen at current height plus two -/// blocks. Lastly, we remove the previously persisted data once we checked they are tracked and -/// awaiting their initial spend at the correct height. -/// -/// Note that this migration will be run in the `Builder`, i.e., at the time when the migration is -/// happening no background sync is ongoing, so we shouldn't have a risk of interleaving block -/// connections during the migration. -pub(crate) fn migrate_deprecated_spendable_outputs( - sweeper: Arc, kv_store: Arc, logger: L, -) -> Result<(), std::io::Error> -where - L::Target: LdkLogger, -{ - let best_block = sweeper.current_best_block(); - - for stored_key in kv_store.list( - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - )? { - let mut reader = Cursor::new(kv_store.read( - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &stored_key, - )?); - let output = DeprecatedSpendableOutputInfo::read(&mut reader).map_err(|e| { - log_error!(logger, "Failed to deserialize SpendableOutputInfo: {}", e); - std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Failed to deserialize SpendableOutputInfo", - ) - })?; - let descriptors = vec![output.descriptor.clone()]; - let spend_delay = Some(best_block.height + 2); - sweeper - .track_spendable_outputs(descriptors, output.channel_id, true, spend_delay) - .map_err(|_| { - log_error!(logger, "Failed to track spendable outputs. Aborting migration, will retry in the future."); - std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Failed to track spendable outputs. Aborting migration, will retry in the future.", - ) - })?; - - if let Some(tracked_spendable_output) = - sweeper.tracked_spendable_outputs().iter().find(|o| o.descriptor == output.descriptor) - { - match tracked_spendable_output.status { - OutputSpendStatus::PendingInitialBroadcast { delayed_until_height } => { - if delayed_until_height == spend_delay { - kv_store.remove( - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - DEPRECATED_SPENDABLE_OUTPUT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - &stored_key, - false, - )?; - } else { - debug_assert!(false, "Unexpected status in OutputSweeper migration."); - log_error!(logger, "Unexpected status in OutputSweeper migration."); - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Failed to migrate OutputSweeper state.", - )); - } - }, - _ => { - debug_assert!(false, "Unexpected status in OutputSweeper migration."); - log_error!(logger, "Unexpected status in OutputSweeper migration."); - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Failed to migrate OutputSweeper state.", - )); - }, - } - } else { - debug_assert!( - false, - "OutputSweeper failed to track and persist outputs during migration." - ); - log_error!( - logger, - "OutputSweeper failed to track and persist outputs during migration." - ); - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Failed to migrate OutputSweeper state.", - )); - } - } - - Ok(()) -} - pub(crate) fn read_node_metrics( kv_store: Arc, logger: L, ) -> Result diff --git a/src/lib.rs b/src/lib.rs index da86fce73..9035d5361 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -95,7 +95,6 @@ mod message_handler; pub mod payment; mod peer_store; mod runtime; -mod sweep; mod tx_broadcaster; mod types; mod wallet; diff --git a/src/sweep.rs b/src/sweep.rs deleted file mode 100644 index ba10869b8..000000000 --- a/src/sweep.rs +++ /dev/null @@ -1,47 +0,0 @@ -// This file is Copyright its original authors, visible in version control history. -// -// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in -// accordance with one or both of these licenses. - -//! The output sweeper used to live here before we upstreamed it to `rust-lightning` and migrated -//! to the upstreamed version with LDK Node v0.3.0 (May 2024). We should drop this module entirely -//! once sufficient time has passed for us to be confident any users completed the migration. - -use lightning::impl_writeable_tlv_based; -use lightning::ln::types::ChannelId; -use lightning::sign::SpendableOutputDescriptor; - -use bitcoin::{Amount, BlockHash, Transaction}; - -#[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct DeprecatedSpendableOutputInfo { - pub(crate) id: [u8; 32], - pub(crate) descriptor: SpendableOutputDescriptor, - pub(crate) channel_id: Option, - pub(crate) first_broadcast_hash: Option, - pub(crate) latest_broadcast_height: Option, - pub(crate) latest_spending_tx: Option, - pub(crate) confirmation_height: Option, - pub(crate) confirmation_hash: Option, -} - -impl_writeable_tlv_based!(DeprecatedSpendableOutputInfo, { - (0, id, required), - (2, descriptor, required), - (4, channel_id, option), - (6, first_broadcast_hash, option), - (8, latest_broadcast_height, option), - (10, latest_spending_tx, option), - (12, confirmation_height, option), - (14, confirmation_hash, option), -}); - -pub(crate) fn value_from_descriptor(descriptor: &SpendableOutputDescriptor) -> Amount { - match &descriptor { - SpendableOutputDescriptor::StaticOutput { output, .. } => output.value, - SpendableOutputDescriptor::DelayedPaymentOutput(output) => output.output.value, - SpendableOutputDescriptor::StaticPaymentOutput(output) => output.output.value, - } -} From ed12e65591813d22da5be51b0af8d92e8e7ba5cd Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 6 Feb 2025 16:14:34 +0100 Subject: [PATCH 02/27] Bump to LDK main (4e32d852) We bump our LDK dependency to 0.2-devel, up to commit `4e32d85249359d8ef8ece97d89848e40154363ab`. --- Cargo.toml | 51 +++-- bindings/ldk_node.udl | 81 ++++---- src/builder.rs | 49 +++-- src/chain/bitcoind.rs | 12 +- src/chain/electrum.rs | 9 +- src/chain/esplora.rs | 15 +- src/config.rs | 12 +- src/data_store.rs | 22 +-- src/event.rs | 82 +++++--- src/ffi/types.rs | 94 +++++++-- src/io/sqlite_store/migrations.rs | 2 +- src/io/sqlite_store/mod.rs | 11 +- src/io/test_utils.rs | 24 +-- src/io/utils.rs | 9 +- src/io/vss_store.rs | 10 +- src/lib.rs | 65 +++---- src/liquidity.rs | 154 +++++---------- src/message_handler.rs | 3 +- src/payment/bolt11.rs | 189 +++++++++--------- src/payment/bolt12.rs | 55 +++--- src/payment/mod.rs | 84 -------- src/payment/spontaneous.rs | 58 +++--- src/payment/store.rs | 2 +- src/peer_store.rs | 2 +- src/types.rs | 29 +-- src/wallet/mod.rs | 311 ++++++++++++++---------------- tests/common/mod.rs | 14 +- tests/integration_tests_rust.rs | 19 +- 28 files changed, 690 insertions(+), 778 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 96a9eea53..aaaa55f39 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,38 +28,53 @@ panic = 'abort' # Abort on panic default = [] [dependencies] -lightning = { version = "0.1.0", features = ["std"] } -lightning-types = { version = "0.2.0" } -lightning-invoice = { version = "0.33.0", features = ["std"] } -lightning-net-tokio = { version = "0.1.0" } -lightning-persister = { version = "0.1.0" } -lightning-background-processor = { version = "0.1.0", features = ["futures"] } -lightning-rapid-gossip-sync = { version = "0.1.0" } -lightning-block-sync = { version = "0.1.0", features = ["rpc-client", "rest-client", "tokio"] } -lightning-transaction-sync = { version = "0.1.0", features = ["esplora-async-https", "time", "electrum"] } -lightning-liquidity = { version = "0.1.0", features = ["std"] } +#lightning = { version = "0.1.0", features = ["std"] } +#lightning-types = { version = "0.2.0" } +#lightning-invoice = { version = "0.33.0", features = ["std"] } +#lightning-net-tokio = { version = "0.1.0" } +#lightning-persister = { version = "0.1.0" } +#lightning-background-processor = { version = "0.1.0" } +#lightning-rapid-gossip-sync = { version = "0.1.0" } +#lightning-block-sync = { version = "0.1.0", features = ["rest-client", "rpc-client", "tokio"] } +#lightning-transaction-sync = { version = "0.1.0", features = ["esplora-async-https", "time", "electrum"] } +#lightning-liquidity = { version = "0.1.0", features = ["std"] } +#lightning-macros = { version = "0.1.0" } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["std"] } #lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["std"] } #lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -#lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["futures"] } +#lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -#lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["rpc-client", "tokio"] } +#lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["rest-client", "rpc-client", "tokio"] } #lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["esplora-async-https", "electrum", "time"] } #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } +#lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } + +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["esplora-async-https", "electrum", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } #lightning-invoice = { path = "../rust-lightning/lightning-invoice", features = ["std"] } #lightning-net-tokio = { path = "../rust-lightning/lightning-net-tokio" } #lightning-persister = { path = "../rust-lightning/lightning-persister" } -#lightning-background-processor = { path = "../rust-lightning/lightning-background-processor", features = ["futures"] } +#lightning-background-processor = { path = "../rust-lightning/lightning-background-processor" } #lightning-rapid-gossip-sync = { path = "../rust-lightning/lightning-rapid-gossip-sync" } -#lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rpc-client", "tokio"] } +#lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rest-client", "rpc-client", "tokio"] } #lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async-https", "electrum", "time"] } #lightning-liquidity = { path = "../rust-lightning/lightning-liquidity", features = ["std"] } +#lightning-macros = { path = "../rust-lightning/lightning-macros" } bdk_chain = { version = "0.23.0", default-features = false, features = ["std"] } bdk_esplora = { version = "0.22.0", default-features = false, features = ["async-https-rustls", "tokio"]} @@ -78,11 +93,6 @@ rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync", "macros" ] } esplora-client = { version = "0.12", default-features = false, features = ["tokio", "async-https-rustls"] } - -# FIXME: This was introduced to decouple the `bdk_esplora` and -# `lightning-transaction-sync` APIs. We should drop it as part of the upgrade -# to LDK 0.2. -esplora-client_0_11 = { package = "esplora-client", version = "0.11", default-features = false, features = ["tokio", "async-https-rustls"] } electrum-client = { version = "0.24.0", default-features = true } libc = "0.2" uniffi = { version = "0.28.3", features = ["build"], optional = true } @@ -97,8 +107,9 @@ prost = { version = "0.11.6", default-features = false} winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] -lightning = { version = "0.1.0", features = ["std", "_test_utils"] } +#lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 076d7fc9b..b9bab61e8 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -12,7 +12,7 @@ dictionary Config { sequence trusted_peers_0conf; u64 probing_liquidity_limit_multiplier; AnchorChannelsConfig? anchor_channels_config; - SendingParameters? sending_parameters; + RouteParametersConfig? route_parameters; }; dictionary AnchorChannelsConfig { @@ -167,13 +167,13 @@ interface Bolt11InvoiceDescription { interface Bolt11Payment { [Throws=NodeError] - PaymentId send([ByRef]Bolt11Invoice invoice, SendingParameters? sending_parameters); + PaymentId send([ByRef]Bolt11Invoice invoice, RouteParametersConfig? route_parameters); [Throws=NodeError] - PaymentId send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat, SendingParameters? sending_parameters); + PaymentId send_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat, RouteParametersConfig? route_parameters); [Throws=NodeError] - void send_probes([ByRef]Bolt11Invoice invoice); + void send_probes([ByRef]Bolt11Invoice invoice, RouteParametersConfig? route_parameters); [Throws=NodeError] - void send_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat); + void send_probes_using_amount([ByRef]Bolt11Invoice invoice, u64 amount_msat, RouteParametersConfig? route_parameters); [Throws=NodeError] void claim_for_hash(PaymentHash payment_hash, u64 claimable_amount_msat, PaymentPreimage preimage); [Throws=NodeError] @@ -213,13 +213,13 @@ interface Bolt12Payment { interface SpontaneousPayment { [Throws=NodeError] - PaymentId send(u64 amount_msat, PublicKey node_id, SendingParameters? sending_parameters); + PaymentId send(u64 amount_msat, PublicKey node_id, RouteParametersConfig? route_parameters); [Throws=NodeError] - PaymentId send_with_custom_tlvs(u64 amount_msat, PublicKey node_id, SendingParameters? sending_parameters, sequence custom_tlvs); + PaymentId send_with_custom_tlvs(u64 amount_msat, PublicKey node_id, RouteParametersConfig? route_parameters, sequence custom_tlvs); [Throws=NodeError] - PaymentId send_with_preimage(u64 amount_msat, PublicKey node_id, PaymentPreimage preimage, SendingParameters? sending_parameters); + PaymentId send_with_preimage(u64 amount_msat, PublicKey node_id, PaymentPreimage preimage, RouteParametersConfig? route_parameters); [Throws=NodeError] - PaymentId send_with_preimage_and_custom_tlvs(u64 amount_msat, PublicKey node_id, sequence custom_tlvs, PaymentPreimage preimage, SendingParameters? sending_parameters); + PaymentId send_with_preimage_and_custom_tlvs(u64 amount_msat, PublicKey node_id, sequence custom_tlvs, PaymentPreimage preimage, RouteParametersConfig? route_parameters); [Throws=NodeError] void send_probes(u64 amount_msat, PublicKey node_id); }; @@ -254,7 +254,7 @@ interface LSPS1Liquidity { [Throws=NodeError] LSPS1OrderStatus request_channel(u64 lsp_balance_sat, u64 client_balance_sat, u32 channel_expiry_blocks, boolean announce_channel); [Throws=NodeError] - LSPS1OrderStatus check_order_status(OrderId order_id); + LSPS1OrderStatus check_order_status(LSPS1OrderId order_id); }; [Error] @@ -392,7 +392,7 @@ enum PaymentFailureReason { [Enum] interface ClosureReason { CounterpartyForceClosed(UntrustedString peer_msg); - HolderForceClosed(boolean? broadcasted_latest_txn); + HolderForceClosed(boolean? broadcasted_latest_txn, string message); LegacyCooperativeClosure(); CounterpartyInitiatedCooperativeClosure(); LocallyInitiatedCooperativeClosure(); @@ -402,8 +402,9 @@ interface ClosureReason { DisconnectedPeer(); OutdatedChannelManager(); CounterpartyCoopClosedUnfundedChannel(); + LocallyCoopClosedUnfundedChannel(); FundingBatchClosure(); - HTLCsTimedOut(); + HTLCsTimedOut( PaymentHash? payment_hash ); PeerFeerateTooLow(u32 peer_feerate_sat_per_kw, u32 required_feerate_sat_per_kw); }; @@ -456,11 +457,11 @@ dictionary PaymentDetails { u64 latest_update_timestamp; }; -dictionary SendingParameters { - MaxTotalRoutingFeeLimit? max_total_routing_fee_msat; - u32? max_total_cltv_expiry_delta; - u8? max_path_count; - u8? max_channel_saturation_power_of_half; +dictionary RouteParametersConfig { + u64? max_total_routing_fee_msat; + u32 max_total_cltv_expiry_delta; + u8 max_path_count; + u8 max_channel_saturation_power_of_half; }; dictionary CustomTlvRecord { @@ -469,13 +470,13 @@ dictionary CustomTlvRecord { }; dictionary LSPS1OrderStatus { - OrderId order_id; - OrderParameters order_params; - PaymentInfo payment_options; - ChannelOrderInfo? channel_state; + LSPS1OrderId order_id; + LSPS1OrderParams order_params; + LSPS1PaymentInfo payment_options; + LSPS1ChannelInfo? channel_state; }; -dictionary OrderParameters { +dictionary LSPS1OrderParams { u64 lsp_balance_sat; u64 client_balance_sat; u16 required_channel_confirmations; @@ -485,22 +486,22 @@ dictionary OrderParameters { boolean announce_channel; }; -dictionary PaymentInfo { - Bolt11PaymentInfo? bolt11; - OnchainPaymentInfo? onchain; +dictionary LSPS1PaymentInfo { + LSPS1Bolt11PaymentInfo? bolt11; + LSPS1OnchainPaymentInfo? onchain; }; -dictionary Bolt11PaymentInfo { - PaymentState state; - DateTime expires_at; +dictionary LSPS1Bolt11PaymentInfo { + LSPS1PaymentState state; + LSPSDateTime expires_at; u64 fee_total_sat; u64 order_total_sat; Bolt11Invoice invoice; }; -dictionary OnchainPaymentInfo { - PaymentState state; - DateTime expires_at; +dictionary LSPS1OnchainPaymentInfo { + LSPS1PaymentState state; + LSPSDateTime expires_at; u64 fee_total_sat; u64 order_total_sat; Address address; @@ -509,24 +510,18 @@ dictionary OnchainPaymentInfo { Address? refund_onchain_address; }; -dictionary ChannelOrderInfo { - DateTime funded_at; +dictionary LSPS1ChannelInfo { + LSPSDateTime funded_at; OutPoint funding_outpoint; - DateTime expires_at; + LSPSDateTime expires_at; }; -enum PaymentState { +enum LSPS1PaymentState { "ExpectPayment", "Paid", "Refunded", }; -[Enum] -interface MaxTotalRoutingFeeLimit { - None (); - Some ( u64 amount_msat ); -}; - [NonExhaustive] enum Network { "Bitcoin", @@ -861,7 +856,7 @@ typedef string UntrustedString; typedef string NodeAlias; [Custom] -typedef string OrderId; +typedef string LSPS1OrderId; [Custom] -typedef string DateTime; +typedef string LSPSDateTime; diff --git a/src/builder.rs b/src/builder.rs index 289c2954c..094c21e72 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -48,7 +48,7 @@ use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ ProbabilisticScorer, ProbabilisticScoringDecayParameters, ProbabilisticScoringFeeParameters, }; -use lightning::sign::EntropySource; +use lightning::sign::{EntropySource, NodeSigner}; use lightning::util::persist::{ read_channel_monitors, CHANNEL_MANAGER_PERSISTENCE_KEY, @@ -173,17 +173,17 @@ pub enum BuildError { RuntimeSetupFailed, /// We failed to read data from the [`KVStore`]. /// - /// [`KVStore`]: lightning::util::persist::KVStore + /// [`KVStore`]: lightning::util::persist::KVStoreSync ReadFailed, /// We failed to write data to the [`KVStore`]. /// - /// [`KVStore`]: lightning::util::persist::KVStore + /// [`KVStore`]: lightning::util::persist::KVStoreSync WriteFailed, /// We failed to access the given `storage_dir_path`. StoragePathAccessFailed, /// We failed to setup our [`KVStore`]. /// - /// [`KVStore`]: lightning::util::persist::KVStore + /// [`KVStore`]: lightning::util::persist::KVStoreSync KVStoreSetupFailed, /// We failed to setup the onchain wallet. WalletSetupFailed, @@ -1275,15 +1275,6 @@ fn build_with_store_internal( }, }; - // Initialize the ChainMonitor - let chain_monitor: Arc = Arc::new(chainmonitor::ChainMonitor::new( - Some(Arc::clone(&chain_source)), - Arc::clone(&tx_broadcaster), - Arc::clone(&logger), - Arc::clone(&fee_estimator), - Arc::clone(&kv_store), - )); - // Initialize the KeysManager let cur_time = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH).map_err(|e| { log_error!(logger, "Failed to get current time: {}", e); @@ -1299,6 +1290,19 @@ fn build_with_store_internal( Arc::clone(&logger), )); + let peer_storage_key = keys_manager.get_peer_storage_key(); + + // Initialize the ChainMonitor + let chain_monitor: Arc = Arc::new(chainmonitor::ChainMonitor::new( + Some(Arc::clone(&chain_source)), + Arc::clone(&tx_broadcaster), + Arc::clone(&logger), + Arc::clone(&fee_estimator), + Arc::clone(&kv_store), + Arc::clone(&keys_manager), + peer_storage_key, + )); + // Initialize the network graph, scorer, and router let network_graph = match io::utils::read_network_graph(Arc::clone(&kv_store), Arc::clone(&logger)) { @@ -1359,17 +1363,6 @@ fn build_with_store_internal( }; let mut user_config = default_user_config(&config); - if liquidity_source_config.and_then(|lsc| lsc.lsps2_client.as_ref()).is_some() { - // Generally allow claiming underpaying HTLCs as the LSP will skim off some fee. We'll - // check that they don't take too much before claiming. - user_config.channel_config.accept_underpaying_htlcs = true; - - // FIXME: When we're an LSPS2 client, set maximum allowed inbound HTLC value in flight - // to 100%. We should eventually be able to set this on a per-channel basis, but for - // now we just bump the default for all channels. - user_config.channel_handshake_config.max_inbound_htlc_value_in_flight_percent_of_channel = - 100; - } if liquidity_source_config.and_then(|lsc| lsc.lsps2_service.as_ref()).is_some() { // If we act as an LSPS2 service, we need to to be able to intercept HTLCs and forward the @@ -1447,8 +1440,8 @@ fn build_with_store_internal( // Give ChannelMonitors to ChainMonitor for (_blockhash, channel_monitor) in channel_monitors.into_iter() { - let funding_outpoint = channel_monitor.get_funding_txo().0; - chain_monitor.watch_channel(funding_outpoint, channel_monitor).map_err(|e| { + let channel_id = channel_monitor.channel_id(); + chain_monitor.watch_channel(channel_id, channel_monitor).map_err(|e| { log_error!(logger, "Failed to watch channel monitor: {:?}", e); BuildError::InvalidChannelMonitor })?; @@ -1560,6 +1553,7 @@ fn build_with_store_internal( as Arc, onion_message_handler: Arc::clone(&onion_messenger), custom_message_handler, + send_only_message_handler: Arc::clone(&chain_monitor), }, GossipSync::Rapid(_) => MessageHandler { chan_handler: Arc::clone(&channel_manager), @@ -1567,6 +1561,7 @@ fn build_with_store_internal( as Arc, onion_message_handler: Arc::clone(&onion_messenger), custom_message_handler, + send_only_message_handler: Arc::clone(&chain_monitor), }, GossipSync::None => { unreachable!("We must always have a gossip sync!"); @@ -1611,7 +1606,7 @@ fn build_with_store_internal( Ok(output_sweeper) => Arc::new(output_sweeper), Err(e) => { if e.kind() == std::io::ErrorKind::NotFound { - Arc::new(OutputSweeper::new( + Arc::new(OutputSweeper::new_with_kv_store_sync( channel_manager.current_best_block(), Arc::clone(&tx_broadcaster), Arc::clone(&fee_estimator), diff --git a/src/chain/bitcoind.rs b/src/chain/bitcoind.rs index c282a6141..7157e5a4f 100644 --- a/src/chain/bitcoind.rs +++ b/src/chain/bitcoind.rs @@ -173,7 +173,7 @@ impl BitcoindChainSource { if let Some(worst_channel_monitor_block_hash) = chain_monitor .list_monitors() .iter() - .flat_map(|(txo, _)| chain_monitor.get_monitor(*txo)) + .flat_map(|channel_id| chain_monitor.get_monitor(*channel_id)) .map(|m| m.current_best_block()) .min_by_key(|b| b.height) .map(|b| b.block_hash) @@ -1381,11 +1381,11 @@ impl Listen for ChainListener { self.output_sweeper.block_connected(block, height); } - fn block_disconnected(&self, header: &bitcoin::block::Header, height: u32) { - self.onchain_wallet.block_disconnected(header, height); - self.channel_manager.block_disconnected(header, height); - self.chain_monitor.block_disconnected(header, height); - self.output_sweeper.block_disconnected(header, height); + fn blocks_disconnected(&self, fork_point_block: lightning::chain::BestBlock) { + self.onchain_wallet.blocks_disconnected(fork_point_block); + self.channel_manager.blocks_disconnected(fork_point_block); + self.chain_monitor.blocks_disconnected(fork_point_block); + self.output_sweeper.blocks_disconnected(fork_point_block); } } diff --git a/src/chain/electrum.rs b/src/chain/electrum.rs index b6d37409b..40d929ce7 100644 --- a/src/chain/electrum.rs +++ b/src/chain/electrum.rs @@ -402,7 +402,7 @@ impl ElectrumRuntimeStatus { struct ElectrumRuntimeClient { electrum_client: Arc, - bdk_electrum_client: Arc>, + bdk_electrum_client: Arc>>, tx_sync: Arc>>, runtime: Arc, config: Arc, @@ -424,12 +424,7 @@ impl ElectrumRuntimeClient { Error::ConnectionFailed })?, ); - let electrum_client_2 = - ElectrumClient::from_config(&server_url, electrum_config).map_err(|e| { - log_error!(logger, "Failed to connect to electrum server: {}", e); - Error::ConnectionFailed - })?; - let bdk_electrum_client = Arc::new(BdkElectrumClient::new(electrum_client_2)); + let bdk_electrum_client = Arc::new(BdkElectrumClient::new(Arc::clone(&electrum_client))); let tx_sync = Arc::new( ElectrumSyncClient::new(server_url.clone(), Arc::clone(&logger)).map_err(|e| { log_error!(logger, "Failed to connect to electrum server: {}", e); diff --git a/src/chain/esplora.rs b/src/chain/esplora.rs index a8806a413..8e9a4dbd4 100644 --- a/src/chain/esplora.rs +++ b/src/chain/esplora.rs @@ -57,19 +57,6 @@ impl EsploraChainSource { kv_store: Arc, config: Arc, logger: Arc, node_metrics: Arc>, ) -> Self { - // FIXME / TODO: We introduced this to make `bdk_esplora` work separately without updating - // `lightning-transaction-sync`. We should revert this as part of of the upgrade to LDK 0.2. - let mut client_builder_0_11 = esplora_client_0_11::Builder::new(&server_url); - client_builder_0_11 = client_builder_0_11.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); - - for (header_name, header_value) in &headers { - client_builder_0_11 = client_builder_0_11.header(header_name, header_value); - } - - let esplora_client_0_11 = client_builder_0_11.build_async().unwrap(); - let tx_sync = - Arc::new(EsploraSyncClient::from_client(esplora_client_0_11, Arc::clone(&logger))); - let mut client_builder = esplora_client::Builder::new(&server_url); client_builder = client_builder.timeout(DEFAULT_ESPLORA_CLIENT_TIMEOUT_SECS); @@ -78,6 +65,8 @@ impl EsploraChainSource { } let esplora_client = client_builder.build_async().unwrap(); + let tx_sync = + Arc::new(EsploraSyncClient::from_client(esplora_client.clone(), Arc::clone(&logger))); let onchain_wallet_sync_status = Mutex::new(WalletSyncStatus::Completed); let lightning_wallet_sync_status = Mutex::new(WalletSyncStatus::Completed); diff --git a/src/config.rs b/src/config.rs index 02df8bbc7..84f62d220 100644 --- a/src/config.rs +++ b/src/config.rs @@ -8,10 +8,10 @@ //! Objects for configuring the node. use crate::logger::LogLevel; -use crate::payment::SendingParameters; use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; +use lightning::routing::router::RouteParametersConfig; use lightning::util::config::ChannelConfig as LdkChannelConfig; use lightning::util::config::MaxDustHTLCExposure as LdkMaxDustHTLCExposure; use lightning::util::config::UserConfig; @@ -114,9 +114,9 @@ pub const WALLET_KEYS_SEED_LEN: usize = 64; /// | `probing_liquidity_limit_multiplier` | 3 | /// | `log_level` | Debug | /// | `anchor_channels_config` | Some(..) | -/// | `sending_parameters` | None | +/// | `route_parameters` | None | /// -/// See [`AnchorChannelsConfig`] and [`SendingParameters`] for more information regarding their +/// See [`AnchorChannelsConfig`] and [`RouteParametersConfig`] for more information regarding their /// respective default values. /// /// [`Node`]: crate::Node @@ -173,12 +173,12 @@ pub struct Config { pub anchor_channels_config: Option, /// Configuration options for payment routing and pathfinding. /// - /// Setting the `SendingParameters` provides flexibility to customize how payments are routed, + /// Setting the [`RouteParametersConfig`] provides flexibility to customize how payments are routed, /// including setting limits on routing fees, CLTV expiry, and channel utilization. /// /// **Note:** If unset, default parameters will be used, and you will be able to override the /// parameters on a per-payment basis in the corresponding method calls. - pub sending_parameters: Option, + pub route_parameters: Option, } impl Default for Config { @@ -191,7 +191,7 @@ impl Default for Config { trusted_peers_0conf: Vec::new(), probing_liquidity_limit_multiplier: DEFAULT_PROBING_LIQUIDITY_LIMIT_MULTIPLIER, anchor_channels_config: Some(AnchorChannelsConfig::default()), - sending_parameters: None, + route_parameters: None, node_alias: None, } } diff --git a/src/data_store.rs b/src/data_store.rs index 78e3e7870..45802c272 100644 --- a/src/data_store.rs +++ b/src/data_store.rs @@ -143,18 +143,18 @@ where let store_key = object.id().encode_to_hex_str(); let data = object.encode(); self.kv_store - .write(&self.primary_namespace, &self.secondary_namespace, &store_key, &data) + .write(&self.primary_namespace, &self.secondary_namespace, &store_key, data) .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - &self.primary_namespace, - &self.secondary_namespace, - store_key, - e - ); - Error::PersistenceFailed - })?; + log_error!( + self.logger, + "Write for key {}/{}/{} failed due to: {}", + &self.primary_namespace, + &self.secondary_namespace, + store_key, + e + ); + Error::PersistenceFailed + })?; Ok(()) } } diff --git a/src/event.rs b/src/event.rs index ff94d51d1..bad1b84ab 100644 --- a/src/event.rs +++ b/src/event.rs @@ -38,6 +38,9 @@ use lightning::impl_writeable_tlv_based_enum; use lightning::ln::channelmanager::PaymentId; use lightning::ln::types::ChannelId; use lightning::routing::gossip::NodeId; +use lightning::util::config::{ + ChannelConfigOverrides, ChannelConfigUpdate, ChannelHandshakeConfigUpdate, +}; use lightning::util::errors::APIError; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; @@ -56,7 +59,6 @@ use core::task::{Poll, Waker}; use std::collections::VecDeque; use std::ops::Deref; use std::sync::{Arc, Condvar, Mutex}; -use std::time::Duration; /// An event emitted by [`Node`], which should be handled by the user. /// @@ -358,7 +360,7 @@ where EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_KEY, - &data, + data, ) .map_err(|e| { log_error!( @@ -544,7 +546,7 @@ where Err(err) => { log_error!(self.logger, "Failed to create funding transaction: {}", err); self.channel_manager - .force_close_without_broadcasting_txn( + .force_close_broadcasting_latest_txn( &temporary_channel_id, &counterparty_node_id, "Failed to create funding transaction".to_string(), @@ -565,13 +567,10 @@ where payment_hash, purpose, amount_msat, - receiver_node_id: _, - via_channel_id: _, - via_user_channel_id: _, claim_deadline, onion_fields, counterparty_skimmed_fee_msat, - payment_id: _, + .. } => { let payment_id = PaymentId(payment_hash.0); if let Some(info) = self.payment_store.get(&payment_id) { @@ -1043,26 +1042,17 @@ where LdkEvent::PaymentPathFailed { .. } => {}, LdkEvent::ProbeSuccessful { .. } => {}, LdkEvent::ProbeFailed { .. } => {}, - LdkEvent::HTLCHandlingFailed { failed_next_destination, .. } => { + LdkEvent::HTLCHandlingFailed { failure_type, .. } => { if let Some(liquidity_source) = self.liquidity_source.as_ref() { - liquidity_source.handle_htlc_handling_failed(failed_next_destination); + liquidity_source.handle_htlc_handling_failed(failure_type); } }, - LdkEvent::PendingHTLCsForwardable { time_forwardable } => { - let forwarding_channel_manager = self.channel_manager.clone(); - let min = time_forwardable.as_millis() as u64; - - let future = async move { - let millis_to_sleep = thread_rng().gen_range(min..min * 5) as u64; - tokio::time::sleep(Duration::from_millis(millis_to_sleep)).await; - - forwarding_channel_manager.process_pending_htlc_forwards(); - }; - - self.runtime.spawn_cancellable_background_task(future); - }, LdkEvent::SpendableOutputs { outputs, channel_id } => { - match self.output_sweeper.track_spendable_outputs(outputs, channel_id, true, None) { + match self + .output_sweeper + .track_spendable_outputs(outputs, channel_id, true, None) + .await + { Ok(_) => return Ok(()), Err(_) => { log_error!(self.logger, "Failed to track spendable outputs"); @@ -1084,7 +1074,7 @@ where log_error!(self.logger, "Rejecting inbound announced channel from peer {} due to missing configuration: {}", counterparty_node_id, err); self.channel_manager - .force_close_without_broadcasting_txn( + .force_close_broadcasting_latest_txn( &temporary_channel_id, &counterparty_node_id, "Channel request rejected".to_string(), @@ -1128,7 +1118,7 @@ where required_amount_sats, ); self.channel_manager - .force_close_without_broadcasting_txn( + .force_close_broadcasting_latest_txn( &temporary_channel_id, &counterparty_node_id, "Channel request rejected".to_string(), @@ -1145,7 +1135,7 @@ where counterparty_node_id, ); self.channel_manager - .force_close_without_broadcasting_txn( + .force_close_broadcasting_latest_txn( &temporary_channel_id, &counterparty_node_id, "Channel request rejected".to_string(), @@ -1157,19 +1147,46 @@ where } } - let user_channel_id: u128 = rand::thread_rng().gen::(); + let user_channel_id: u128 = thread_rng().gen::(); let allow_0conf = self.config.trusted_peers_0conf.contains(&counterparty_node_id); + let mut channel_override_config = None; + if let Some((lsp_node_id, _)) = self + .liquidity_source + .as_ref() + .and_then(|ls| ls.as_ref().get_lsps2_lsp_details()) + { + if lsp_node_id == counterparty_node_id { + // When we're an LSPS2 client, allow claiming underpaying HTLCs as the LSP will skim off some fee. We'll + // check that they don't take too much before claiming. + // + // We also set maximum allowed inbound HTLC value in flight + // to 100%. We should eventually be able to set this on a per-channel basis, but for + // now we just bump the default for all channels. + channel_override_config = Some(ChannelConfigOverrides { + handshake_overrides: Some(ChannelHandshakeConfigUpdate { + max_inbound_htlc_value_in_flight_percent_of_channel: Some(100), + ..Default::default() + }), + update_overrides: Some(ChannelConfigUpdate { + accept_underpaying_htlcs: Some(true), + ..Default::default() + }), + }); + } + } let res = if allow_0conf { self.channel_manager.accept_inbound_channel_from_trusted_peer_0conf( &temporary_channel_id, &counterparty_node_id, user_channel_id, + channel_override_config, ) } else { self.channel_manager.accept_inbound_channel( &temporary_channel_id, &counterparty_node_id, user_channel_id, + channel_override_config, ) }; @@ -1469,7 +1486,7 @@ where BumpTransactionEvent::HTLCResolution { .. } => {}, } - self.bump_tx_event_handler.handle_event(&bte); + self.bump_tx_event_handler.handle_event(&bte).await; }, LdkEvent::OnionMessageIntercepted { .. } => { debug_assert!(false, "We currently don't support onion message interception, so this event should never be emitted."); @@ -1477,6 +1494,15 @@ where LdkEvent::OnionMessagePeerConnected { .. } => { debug_assert!(false, "We currently don't support onion message interception, so this event should never be emitted."); }, + LdkEvent::PersistStaticInvoice { .. } => { + debug_assert!(false, "We currently don't support static invoice persistence, so this event should never be emitted."); + }, + LdkEvent::StaticInvoiceRequested { .. } => { + debug_assert!(false, "We currently don't support static invoice persistence, so this event should never be emitted."); + }, + LdkEvent::FundingTransactionReadyForSigning { .. } => { + debug_assert!(false, "We currently don't support interactive-tx, so this event should never be emitted."); + }, } Ok(()) } diff --git a/src/ffi/types.rs b/src/ffi/types.rs index 984e4da8f..02d321787 100644 --- a/src/ffi/types.rs +++ b/src/ffi/types.rs @@ -15,26 +15,29 @@ pub use crate::config::{ EsploraSyncConfig, MaxDustHTLCExposure, }; pub use crate::graph::{ChannelInfo, ChannelUpdateInfo, NodeAnnouncementInfo, NodeInfo}; -pub use crate::liquidity::{LSPS1OrderStatus, LSPS2ServiceConfig, OnchainPaymentInfo, PaymentInfo}; +pub use crate::liquidity::{LSPS1OrderStatus, LSPS2ServiceConfig}; pub use crate::logger::{LogLevel, LogRecord, LogWriter}; pub use crate::payment::store::{ ConfirmationStatus, LSPFeeLimits, PaymentDirection, PaymentKind, PaymentStatus, }; -pub use crate::payment::{MaxTotalRoutingFeeLimit, QrPaymentResult, SendingParameters}; +pub use crate::payment::QrPaymentResult; pub use lightning::chain::channelmonitor::BalanceSource; pub use lightning::events::{ClosureReason, PaymentFailureReason}; pub use lightning::ln::types::ChannelId; pub use lightning::offers::offer::OfferId; pub use lightning::routing::gossip::{NodeAlias, NodeId, RoutingFees}; -pub use lightning::util::string::UntrustedString; +pub use lightning::routing::router::RouteParametersConfig; +pub use lightning_types::string::UntrustedString; pub use lightning_types::payment::{PaymentHash, PaymentPreimage, PaymentSecret}; pub use lightning_invoice::{Description, SignedRawBolt11Invoice}; -pub use lightning_liquidity::lsps1::msgs::ChannelInfo as ChannelOrderInfo; -pub use lightning_liquidity::lsps1::msgs::{OrderId, OrderParameters, PaymentState}; +pub use lightning_liquidity::lsps0::ser::LSPSDateTime; +pub use lightning_liquidity::lsps1::msgs::{ + LSPS1ChannelInfo, LSPS1OrderId, LSPS1OrderParams, LSPS1PaymentState, +}; pub use bitcoin::{Address, BlockHash, FeeRate, Network, OutPoint, Txid}; @@ -42,8 +45,6 @@ pub use bip39::Mnemonic; pub use vss_client::headers::{VssHeaderProvider, VssHeaderProviderError}; -pub type DateTime = chrono::DateTime; - use crate::UniffiCustomTypeConverter; use crate::builder::sanitize_alias; @@ -125,9 +126,8 @@ impl From for OfferAmount { fn from(ldk_amount: LdkAmount) -> Self { match ldk_amount { LdkAmount::Bitcoin { amount_msats } => OfferAmount::Bitcoin { amount_msats }, - LdkAmount::Currency { iso4217_code, amount } => OfferAmount::Currency { - iso4217_code: iso4217_code.iter().map(|&b| b as char).collect(), - amount, + LdkAmount::Currency { iso4217_code, amount } => { + OfferAmount::Currency { iso4217_code: iso4217_code.as_str().to_owned(), amount } }, } } @@ -1066,13 +1066,71 @@ impl std::fmt::Display for Bolt11Invoice { } } +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LSPS1PaymentInfo { + /// A Lightning payment using BOLT 11. + pub bolt11: Option, + /// An onchain payment. + pub onchain: Option, +} + +#[cfg(feature = "uniffi")] +impl From for LSPS1PaymentInfo { + fn from(value: lightning_liquidity::lsps1::msgs::LSPS1PaymentInfo) -> Self { + LSPS1PaymentInfo { + bolt11: value.bolt11.map(|b| b.into()), + onchain: value.onchain.map(|o| o.into()), + } + } +} + +/// An onchain payment. +#[cfg(feature = "uniffi")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LSPS1OnchainPaymentInfo { + /// Indicates the current state of the payment. + pub state: lightning_liquidity::lsps1::msgs::LSPS1PaymentState, + /// The datetime when the payment option expires. + pub expires_at: LSPSDateTime, + /// The total fee the LSP will charge to open this channel in satoshi. + pub fee_total_sat: u64, + /// The amount the client needs to pay to have the requested channel openend. + pub order_total_sat: u64, + /// An on-chain address the client can send [`Self::order_total_sat`] to to have the channel + /// opened. + pub address: bitcoin::Address, + /// The minimum number of block confirmations that are required for the on-chain payment to be + /// considered confirmed. + pub min_onchain_payment_confirmations: Option, + /// The minimum fee rate for the on-chain payment in case the client wants the payment to be + /// confirmed without a confirmation. + pub min_fee_for_0conf: Arc, + /// The address where the LSP will send the funds if the order fails. + pub refund_onchain_address: Option, +} + +#[cfg(feature = "uniffi")] +impl From for LSPS1OnchainPaymentInfo { + fn from(value: lightning_liquidity::lsps1::msgs::LSPS1OnchainPaymentInfo) -> Self { + Self { + state: value.state, + expires_at: value.expires_at, + fee_total_sat: value.fee_total_sat, + order_total_sat: value.order_total_sat, + address: value.address, + min_onchain_payment_confirmations: value.min_onchain_payment_confirmations, + min_fee_for_0conf: Arc::new(value.min_fee_for_0conf), + refund_onchain_address: value.refund_onchain_address, + } + } +} /// A Lightning payment using BOLT 11. #[derive(Clone, Debug, PartialEq, Eq)] -pub struct Bolt11PaymentInfo { +pub struct LSPS1Bolt11PaymentInfo { /// Indicates the current state of the payment. - pub state: PaymentState, + pub state: LSPS1PaymentState, /// The datetime when the payment option expires. - pub expires_at: chrono::DateTime, + pub expires_at: LSPSDateTime, /// The total fee the LSP will charge to open this channel in satoshi. pub fee_total_sat: u64, /// The amount the client needs to pay to have the requested channel openend. @@ -1081,8 +1139,8 @@ pub struct Bolt11PaymentInfo { pub invoice: Arc, } -impl From for Bolt11PaymentInfo { - fn from(info: lightning_liquidity::lsps1::msgs::Bolt11PaymentInfo) -> Self { +impl From for LSPS1Bolt11PaymentInfo { + fn from(info: lightning_liquidity::lsps1::msgs::LSPS1Bolt11PaymentInfo) -> Self { Self { state: info.state, expires_at: info.expires_at, @@ -1093,7 +1151,7 @@ impl From for Bolt11Payment } } -impl UniffiCustomTypeConverter for OrderId { +impl UniffiCustomTypeConverter for LSPS1OrderId { type Builtin = String; fn into_custom(val: Self::Builtin) -> uniffi::Result { @@ -1105,11 +1163,11 @@ impl UniffiCustomTypeConverter for OrderId { } } -impl UniffiCustomTypeConverter for DateTime { +impl UniffiCustomTypeConverter for LSPSDateTime { type Builtin = String; fn into_custom(val: Self::Builtin) -> uniffi::Result { - Ok(DateTime::from_str(&val).map_err(|_| Error::InvalidDateTime)?) + Ok(LSPSDateTime::from_str(&val).map_err(|_| Error::InvalidDateTime)?) } fn from_custom(obj: Self) -> Self::Builtin { diff --git a/src/io/sqlite_store/migrations.rs b/src/io/sqlite_store/migrations.rs index 0486b8a4f..15e60bcc2 100644 --- a/src/io/sqlite_store/migrations.rs +++ b/src/io/sqlite_store/migrations.rs @@ -78,7 +78,7 @@ mod tests { use crate::io::sqlite_store::SqliteStore; use crate::io::test_utils::{do_read_write_remove_list_persist, random_storage_path}; - use lightning::util::persist::KVStore; + use lightning::util::persist::KVStoreSync; use rusqlite::{named_params, Connection}; diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index b72db5a2b..4006ab2cc 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -9,8 +9,9 @@ use crate::io::utils::check_namespace_key_validity; use lightning::io; -use lightning::util::persist::KVStore; -use lightning::util::string::PrintableString; +use lightning::util::persist::KVStoreSync; + +use lightning_types::string::PrintableString; use rusqlite::{named_params, Connection}; @@ -34,7 +35,7 @@ pub const DEFAULT_KV_TABLE_NAME: &str = "ldk_data"; // The current SQLite `user_version`, which we can use if we'd ever need to do a schema migration. const SCHEMA_USER_VERSION: u16 = 2; -/// A [`KVStore`] implementation that writes to and reads from an [SQLite] database. +/// A [`KVStoreSync`] implementation that writes to and reads from an [SQLite] database. /// /// [SQLite]: https://sqlite.org pub struct SqliteStore { @@ -129,7 +130,7 @@ impl SqliteStore { } } -impl KVStore for SqliteStore { +impl KVStoreSync for SqliteStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { @@ -179,7 +180,7 @@ impl KVStore for SqliteStore { } fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8], + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; diff --git a/src/io/test_utils.rs b/src/io/test_utils.rs index df806779e..244dd9cdc 100644 --- a/src/io/test_utils.rs +++ b/src/io/test_utils.rs @@ -9,7 +9,7 @@ use lightning::ln::functional_test_utils::{ connect_block, create_announced_chan_between_nodes, create_chanmon_cfgs, create_dummy_block, create_network, create_node_cfgs, create_node_chanmgrs, send_payment, }; -use lightning::util::persist::{read_channel_monitors, KVStore, KVSTORE_NAMESPACE_KEY_MAX_LEN}; +use lightning::util::persist::{read_channel_monitors, KVStoreSync, KVSTORE_NAMESPACE_KEY_MAX_LEN}; use lightning::events::ClosureReason; use lightning::util::test_utils; @@ -29,23 +29,24 @@ pub(crate) fn random_storage_path() -> PathBuf { temp_path } -pub(crate) fn do_read_write_remove_list_persist(kv_store: &K) { - let data = [42u8; 32]; +pub(crate) fn do_read_write_remove_list_persist(kv_store: &K) { + let data = vec![42u8; 32]; let primary_namespace = "testspace"; let secondary_namespace = "testsubspace"; let key = "testkey"; // Test the basic KVStore operations. - kv_store.write(primary_namespace, secondary_namespace, key, &data).unwrap(); + kv_store.write(primary_namespace, secondary_namespace, key, data.clone()).unwrap(); // Test empty primary/secondary namespaces are allowed, but not empty primary namespace and non-empty // secondary primary_namespace, and not empty key. - kv_store.write("", "", key, &data).unwrap(); - let res = std::panic::catch_unwind(|| kv_store.write("", secondary_namespace, key, &data)); + kv_store.write("", "", key, data.clone()).unwrap(); + let res = + std::panic::catch_unwind(|| kv_store.write("", secondary_namespace, key, data.clone())); assert!(res.is_err()); let res = std::panic::catch_unwind(|| { - kv_store.write(primary_namespace, secondary_namespace, "", &data) + kv_store.write(primary_namespace, secondary_namespace, "", data.clone()) }); assert!(res.is_err()); @@ -63,7 +64,7 @@ pub(crate) fn do_read_write_remove_list_persist(kv_s // Ensure we have no issue operating with primary_namespace/secondary_namespace/key being KVSTORE_NAMESPACE_KEY_MAX_LEN let max_chars: String = std::iter::repeat('A').take(KVSTORE_NAMESPACE_KEY_MAX_LEN).collect(); - kv_store.write(&max_chars, &max_chars, &max_chars, &data).unwrap(); + kv_store.write(&max_chars, &max_chars, &max_chars, data.clone()).unwrap(); let listed_keys = kv_store.list(&max_chars, &max_chars).unwrap(); assert_eq!(listed_keys.len(), 1); @@ -80,7 +81,7 @@ pub(crate) fn do_read_write_remove_list_persist(kv_s // Integration-test the given KVStore implementation. Test relaying a few payments and check that // the persisted data is updated the appropriate number of times. -pub(crate) fn do_test_store(store_0: &K, store_1: &K) { +pub(crate) fn do_test_store(store_0: &K, store_1: &K) { let chanmon_cfgs = create_chanmon_cfgs(2); let mut node_cfgs = create_node_cfgs(2, &chanmon_cfgs); let chain_mon_0 = test_utils::TestChainMonitor::new( @@ -145,18 +146,19 @@ pub(crate) fn do_test_store(store_0: &K, store_1: &K) { // Force close because cooperative close doesn't result in any persisted // updates. + let message = "Channel force-closed".to_owned(); nodes[0] .node .force_close_broadcasting_latest_txn( &nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), - "whoops".to_string(), + message.clone(), ) .unwrap(); check_closed_event!( nodes[0], 1, - ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, + ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true), message }, [nodes[1].node.get_our_node_id()], 100000 ); diff --git a/src/io/utils.rs b/src/io/utils.rs index 06a1017ba..51e7be505 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -31,9 +31,10 @@ use lightning::util::persist::{ SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; -use lightning::util::string::PrintableString; use lightning::util::sweep::OutputSweeper; +use lightning_types::string::PrintableString; + use bdk_chain::indexer::keychain_txout::ChangeSet as BdkIndexerChangeSet; use bdk_chain::local_chain::ChangeSet as BdkLocalChainChangeSet; use bdk_chain::miniscript::{Descriptor, DescriptorPublicKey}; @@ -251,7 +252,7 @@ pub(crate) fn read_output_sweeper( kv_store, logger.clone(), ); - OutputSweeper::read(&mut reader, args).map_err(|e| { + OutputSweeper::read_with_kv_store_sync(&mut reader, args).map_err(|e| { log_error!(logger, "Failed to deserialize OutputSweeper: {}", e); std::io::Error::new(std::io::ErrorKind::InvalidData, "Failed to deserialize OutputSweeper") }) @@ -286,7 +287,7 @@ where NODE_METRICS_PRIMARY_NAMESPACE, NODE_METRICS_SECONDARY_NAMESPACE, NODE_METRICS_KEY, - &data, + data, ) .map_err(|e| { log_error!( @@ -441,7 +442,7 @@ macro_rules! impl_read_write_change_set_type { L::Target: LdkLogger, { let data = ChangeSetSerWrapper(value).encode(); - kv_store.write($primary_namespace, $secondary_namespace, $key, &data).map_err(|e| { + kv_store.write($primary_namespace, $secondary_namespace, $key, data).map_err(|e| { log_error!( logger, "Writing data to key {}/{}/{} failed due to: {}", diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index e2cfc3c7b..87f966a9b 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -10,7 +10,7 @@ use crate::runtime::Runtime; use bitcoin::hashes::{sha256, Hash, HashEngine, Hmac, HmacEngine}; use lightning::io::{self, Error, ErrorKind}; -use lightning::util::persist::KVStore; +use lightning::util::persist::KVStoreSync; use prost::Message; use rand::RngCore; #[cfg(test)] @@ -38,7 +38,7 @@ type CustomRetryPolicy = FilteredRetryPolicy< Box bool + 'static + Send + Sync>, >; -/// A [`KVStore`] implementation that writes to and reads from a [VSS](https://github.com/lightningdevkit/vss-server/blob/main/README.md) backend. +/// A [`KVStoreSync`] implementation that writes to and reads from a [VSS](https://github.com/lightningdevkit/vss-server/blob/main/README.md) backend. pub struct VssStore { client: VssClient, store_id: String, @@ -127,7 +127,7 @@ impl VssStore { } } -impl KVStore for VssStore { +impl KVStoreSync for VssStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { @@ -160,11 +160,11 @@ impl KVStore for VssStore { } fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8], + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; let version = -1; - let storable = self.storable_builder.build(buf.to_vec(), version); + let storable = self.storable_builder.build(buf, version); let request = PutObjectRequest { store_id: self.store_id.clone(), global_version: None, diff --git a/src/lib.rs b/src/lib.rs index 9035d5361..160762dd2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -158,7 +158,7 @@ use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; -use lightning_background_processor::process_events_async; +use lightning_background_processor::process_events_async_with_kv_store_sync; use bitcoin::secp256k1::PublicKey; @@ -521,6 +521,9 @@ impl Node { let background_chan_man = Arc::clone(&self.channel_manager); let background_gossip_sync = self.gossip_source.as_gossip_sync(); let background_peer_man = Arc::clone(&self.peer_manager); + let background_liquidity_man_opt = + self.liquidity_source.as_ref().map(|ls| ls.liquidity_manager()); + let background_sweeper = Arc::clone(&self.output_sweeper); let background_onion_messenger = Arc::clone(&self.onion_messenger); let background_logger = Arc::clone(&self.logger); let background_error_logger = Arc::clone(&self.logger); @@ -547,7 +550,7 @@ impl Node { }; self.runtime.spawn_background_processor_task(async move { - process_events_async( + process_events_async_with_kv_store_sync( background_persister, |e| background_event_handler.handle_event(e), background_chain_mon, @@ -555,6 +558,8 @@ impl Node { Some(background_onion_messenger), background_gossip_sync, background_peer_man, + background_liquidity_man_opt, + Some(background_sweeper), background_logger, Some(background_scorer), sleeper, @@ -1193,12 +1198,17 @@ impl Node { self.runtime.block_on(async move { if chain_source.is_transaction_based() { chain_source.update_fee_rate_estimates().await?; - chain_source.sync_lightning_wallet(sync_cman, sync_cmon, sync_sweeper).await?; + chain_source + .sync_lightning_wallet(sync_cman, sync_cmon, Arc::clone(&sync_sweeper)) + .await?; chain_source.sync_onchain_wallet().await?; } else { chain_source.update_fee_rate_estimates().await?; - chain_source.poll_and_update_listeners(sync_cman, sync_cmon, sync_sweeper).await?; + chain_source + .poll_and_update_listeners(sync_cman, sync_cmon, Arc::clone(&sync_sweeper)) + .await?; } + let _ = sync_sweeper.regenerate_and_broadcast_spend_if_necessary().await; Ok(()) }) } @@ -1247,35 +1257,16 @@ impl Node { open_channels.iter().find(|c| c.user_channel_id == user_channel_id.0) { if force { - if self.config.anchor_channels_config.as_ref().map_or(false, |acc| { - acc.trusted_peers_no_reserve.contains(&counterparty_node_id) - }) { - self.channel_manager - .force_close_without_broadcasting_txn( - &channel_details.channel_id, - &counterparty_node_id, - force_close_reason.unwrap_or_default(), - ) - .map_err(|e| { - log_error!( - self.logger, - "Failed to force-close channel to trusted peer: {:?}", - e - ); - Error::ChannelClosingFailed - })?; - } else { - self.channel_manager - .force_close_broadcasting_latest_txn( - &channel_details.channel_id, - &counterparty_node_id, - force_close_reason.unwrap_or_default(), - ) - .map_err(|e| { - log_error!(self.logger, "Failed to force-close channel: {:?}", e); - Error::ChannelClosingFailed - })?; - } + self.channel_manager + .force_close_broadcasting_latest_txn( + &channel_details.channel_id, + &counterparty_node_id, + force_close_reason.unwrap_or_default(), + ) + .map_err(|e| { + log_error!(self.logger, "Failed to force-close channel: {:?}", e); + Error::ChannelClosingFailed + })?; } else { self.channel_manager .close_channel(&channel_details.channel_id, &counterparty_node_id) @@ -1340,12 +1331,10 @@ impl Node { let mut total_lightning_balance_sats = 0; let mut lightning_balances = Vec::new(); - for (funding_txo, channel_id) in self.chain_monitor.list_monitors() { - match self.chain_monitor.get_monitor(funding_txo) { + for channel_id in self.chain_monitor.list_monitors() { + match self.chain_monitor.get_monitor(channel_id) { Ok(monitor) => { - // unwrap safety: `get_counterparty_node_id` will always be `Some` after 0.0.110 and - // LDK Node 0.1 depended on 0.0.115 already. - let counterparty_node_id = monitor.get_counterparty_node_id().unwrap(); + let counterparty_node_id = monitor.get_counterparty_node_id(); for ldk_balance in monitor.get_claimable_balances() { total_lightning_balance_sats += ldk_balance.claimable_amount_satoshis(); lightning_balances.push(LightningBalance::from_ldk_balance( diff --git a/src/liquidity.rs b/src/liquidity.rs index 6ee8066c1..5d0bf5afe 100644 --- a/src/liquidity.rs +++ b/src/liquidity.rs @@ -14,7 +14,7 @@ use crate::runtime::Runtime; use crate::types::{ChannelManager, KeysManager, LiquidityManager, PeerManager, Wallet}; use crate::{total_anchor_channels_reserve_sats, Config, Error}; -use lightning::events::HTLCDestination; +use lightning::events::HTLCHandlingFailureType; use lightning::ln::channelmanager::{InterceptId, MIN_FINAL_CLTV_EXPIRY_DELTA}; use lightning::ln::msgs::SocketAddress; use lightning::ln::types::ChannelId; @@ -22,14 +22,16 @@ use lightning::routing::router::{RouteHint, RouteHintHop}; use lightning_invoice::{Bolt11Invoice, Bolt11InvoiceDescription, InvoiceBuilder, RoutingFees}; -use lightning_liquidity::events::Event; -use lightning_liquidity::lsps0::ser::RequestId; +use lightning_liquidity::events::LiquidityEvent; +use lightning_liquidity::lsps0::ser::{LSPSDateTime, LSPSRequestId}; use lightning_liquidity::lsps1::client::LSPS1ClientConfig as LdkLSPS1ClientConfig; use lightning_liquidity::lsps1::event::LSPS1ClientEvent; -use lightning_liquidity::lsps1::msgs::{ChannelInfo, LSPS1Options, OrderId, OrderParameters}; +use lightning_liquidity::lsps1::msgs::{ + LSPS1ChannelInfo, LSPS1Options, LSPS1OrderId, LSPS1OrderParams, +}; use lightning_liquidity::lsps2::client::LSPS2ClientConfig as LdkLSPS2ClientConfig; use lightning_liquidity::lsps2::event::{LSPS2ClientEvent, LSPS2ServiceEvent}; -use lightning_liquidity::lsps2::msgs::{OpeningFeeParams, RawOpeningFeeParams}; +use lightning_liquidity::lsps2::msgs::{LSPS2OpeningFeeParams, LSPS2RawOpeningFeeParams}; use lightning_liquidity::lsps2::service::LSPS2ServiceConfig as LdkLSPS2ServiceConfig; use lightning_liquidity::lsps2::utils::compute_opening_fee; use lightning_liquidity::{LiquidityClientConfig, LiquidityServiceConfig}; @@ -41,7 +43,7 @@ use bitcoin::secp256k1::{PublicKey, Secp256k1}; use tokio::sync::oneshot; -use chrono::{DateTime, Utc}; +use chrono::Utc; use rand::Rng; @@ -62,10 +64,10 @@ struct LSPS1Client { token: Option, ldk_client_config: LdkLSPS1ClientConfig, pending_opening_params_requests: - Mutex>>, - pending_create_order_requests: Mutex>>, + Mutex>>, + pending_create_order_requests: Mutex>>, pending_check_order_status_requests: - Mutex>>, + Mutex>>, } #[derive(Debug, Clone)] @@ -80,8 +82,8 @@ struct LSPS2Client { lsp_address: SocketAddress, token: Option, ldk_client_config: LdkLSPS2ClientConfig, - pending_fee_requests: Mutex>>, - pending_buy_requests: Mutex>>, + pending_fee_requests: Mutex>>, + pending_buy_requests: Mutex>>, } #[derive(Debug, Clone)] @@ -221,16 +223,22 @@ where pub(crate) fn build(self) -> LiquiditySource { let liquidity_service_config = self.lsps2_service.as_ref().map(|s| { let lsps2_service_config = Some(s.ldk_service_config.clone()); + let lsps5_service_config = None; let advertise_service = s.service_config.advertise_service; - LiquidityServiceConfig { lsps2_service_config, advertise_service } + LiquidityServiceConfig { lsps2_service_config, lsps5_service_config, advertise_service } }); let lsps1_client_config = self.lsps1_client.as_ref().map(|s| s.ldk_client_config.clone()); let lsps2_client_config = self.lsps2_client.as_ref().map(|s| s.ldk_client_config.clone()); - let liquidity_client_config = - Some(LiquidityClientConfig { lsps1_client_config, lsps2_client_config }); + let lsps5_client_config = None; + let liquidity_client_config = Some(LiquidityClientConfig { + lsps1_client_config, + lsps2_client_config, + lsps5_client_config, + }); let liquidity_manager = Arc::new(LiquidityManager::new( + Arc::clone(&self.keys_manager), Arc::clone(&self.keys_manager), Arc::clone(&self.channel_manager), Some(Arc::clone(&self.chain_source)), @@ -275,13 +283,11 @@ where L::Target: LdkLogger, { pub(crate) fn set_peer_manager(&self, peer_manager: Arc) { - *self.peer_manager.write().unwrap() = Some(Arc::clone(&peer_manager)); - let process_msgs_callback = move || peer_manager.process_events(); - self.liquidity_manager.set_process_msgs_callback(process_msgs_callback); + *self.peer_manager.write().unwrap() = Some(peer_manager); } - pub(crate) fn liquidity_manager(&self) -> &LiquidityManager { - self.liquidity_manager.as_ref() + pub(crate) fn liquidity_manager(&self) -> Arc { + Arc::clone(&self.liquidity_manager) } pub(crate) fn get_lsps1_lsp_details(&self) -> Option<(PublicKey, SocketAddress)> { @@ -294,7 +300,7 @@ where pub(crate) async fn handle_next_event(&self) { match self.liquidity_manager.next_event_async().await { - Event::LSPS1Client(LSPS1ClientEvent::SupportedOptionsReady { + LiquidityEvent::LSPS1Client(LSPS1ClientEvent::SupportedOptionsReady { request_id, counterparty_node_id, supported_options, @@ -347,7 +353,7 @@ where ); } }, - Event::LSPS1Client(LSPS1ClientEvent::OrderCreated { + LiquidityEvent::LSPS1Client(LSPS1ClientEvent::OrderCreated { request_id, counterparty_node_id, order_id, @@ -405,7 +411,7 @@ where log_error!(self.logger, "Received unexpected LSPS1Client::OrderCreated event!"); } }, - Event::LSPS1Client(LSPS1ClientEvent::OrderStatus { + LiquidityEvent::LSPS1Client(LSPS1ClientEvent::OrderStatus { request_id, counterparty_node_id, order_id, @@ -463,7 +469,7 @@ where log_error!(self.logger, "Received unexpected LSPS1Client::OrderStatus event!"); } }, - Event::LSPS2Service(LSPS2ServiceEvent::GetInfo { + LiquidityEvent::LSPS2Service(LSPS2ServiceEvent::GetInfo { request_id, counterparty_node_id, token, @@ -484,7 +490,7 @@ where if token != Some(required) { log_error!( self.logger, - "Rejecting LSPS2 request {:?} from counterparty {} as the client provided an invalid token.", + "Rejecting LSPS2 request {:?} from counterparty {} as the client provided an invalid token.", request_id, counterparty_node_id ); @@ -502,10 +508,8 @@ where } } - let mut valid_until: DateTime = Utc::now(); - valid_until += LSPS2_GETINFO_REQUEST_EXPIRY; - - let opening_fee_params = RawOpeningFeeParams { + let valid_until = LSPSDateTime(Utc::now() + LSPS2_GETINFO_REQUEST_EXPIRY); + let opening_fee_params = LSPS2RawOpeningFeeParams { min_fee_msat: service_config.min_channel_opening_fee_msat, proportional: service_config.channel_opening_fee_ppm, valid_until, @@ -533,7 +537,7 @@ where return; } }, - Event::LSPS2Service(LSPS2ServiceEvent::BuyRequest { + LiquidityEvent::LSPS2Service(LSPS2ServiceEvent::BuyRequest { request_id, counterparty_node_id, opening_fee_params: _, @@ -600,7 +604,7 @@ where return; } }, - Event::LSPS2Service(LSPS2ServiceEvent::OpenChannel { + LiquidityEvent::LSPS2Service(LSPS2ServiceEvent::OpenChannel { their_network_key, amt_to_forward_msat, opening_fee_msat: _, @@ -674,7 +678,7 @@ where return; } - let mut config = *self.channel_manager.get_current_default_configuration(); + let mut config = self.channel_manager.get_current_config().clone(); // We set these LSP-specific values during Node building, here we're making sure it's actually set. debug_assert_eq!( @@ -714,7 +718,7 @@ where }, } }, - Event::LSPS2Client(LSPS2ClientEvent::OpeningParametersReady { + LiquidityEvent::LSPS2Client(LSPS2ClientEvent::OpeningParametersReady { request_id, counterparty_node_id, opening_fee_params_menu, @@ -764,7 +768,7 @@ where ); } }, - Event::LSPS2Client(LSPS2ClientEvent::InvoiceParametersReady { + LiquidityEvent::LSPS2Client(LSPS2ClientEvent::InvoiceParametersReady { request_id, counterparty_node_id, intercept_scid, @@ -904,7 +908,7 @@ where return Err(Error::LiquidityRequestFailed); } - let order_params = OrderParameters { + let order_params = LSPS1OrderParams { lsp_balance_sat, client_balance_sat, required_channel_confirmations: lsp_limits.min_required_channel_confirmations, @@ -953,7 +957,7 @@ where } pub(crate) async fn lsps1_check_order_status( - &self, order_id: OrderId, + &self, order_id: LSPS1OrderId, ) -> Result { let lsps1_client = self.lsps1_client.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; let client_handler = self.liquidity_manager.lsps1_client_handler().ok_or_else(|| { @@ -1127,7 +1131,7 @@ where } async fn lsps2_send_buy_request( - &self, amount_msat: Option, opening_fee_params: OpeningFeeParams, + &self, amount_msat: Option, opening_fee_params: LSPS2OpeningFeeParams, ) -> Result { let lsps2_client = self.lsps2_client.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; @@ -1280,9 +1284,9 @@ where } } - pub(crate) fn handle_htlc_handling_failed(&self, failed_next_destination: HTLCDestination) { + pub(crate) fn handle_htlc_handling_failed(&self, failure_type: HTLCHandlingFailureType) { if let Some(lsps2_service_handler) = self.liquidity_manager.lsps2_service_handler() { - if let Err(e) = lsps2_service_handler.htlc_handling_failed(failed_next_destination) { + if let Err(e) = lsps2_service_handler.htlc_handling_failed(failure_type) { log_error!( self.logger, "LSPS2 service failed to handle HTLCHandlingFailed event: {:?}", @@ -1316,82 +1320,24 @@ pub(crate) struct LSPS1OpeningParamsResponse { #[derive(Debug, Clone)] pub struct LSPS1OrderStatus { /// The id of the channel order. - pub order_id: OrderId, + pub order_id: LSPS1OrderId, /// The parameters of channel order. - pub order_params: OrderParameters, + pub order_params: LSPS1OrderParams, /// Contains details about how to pay for the order. - pub payment_options: PaymentInfo, + pub payment_options: LSPS1PaymentInfo, /// Contains information about the channel state. - pub channel_state: Option, + pub channel_state: Option, } #[cfg(not(feature = "uniffi"))] -type PaymentInfo = lightning_liquidity::lsps1::msgs::PaymentInfo; - -/// Details regarding how to pay for an order. -#[cfg(feature = "uniffi")] -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct PaymentInfo { - /// A Lightning payment using BOLT 11. - pub bolt11: Option, - /// An onchain payment. - pub onchain: Option, -} - -#[cfg(feature = "uniffi")] -impl From for PaymentInfo { - fn from(value: lightning_liquidity::lsps1::msgs::PaymentInfo) -> Self { - PaymentInfo { - bolt11: value.bolt11.map(|b| b.into()), - onchain: value.onchain.map(|o| o.into()), - } - } -} - -/// An onchain payment. -#[cfg(feature = "uniffi")] -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct OnchainPaymentInfo { - /// Indicates the current state of the payment. - pub state: lightning_liquidity::lsps1::msgs::PaymentState, - /// The datetime when the payment option expires. - pub expires_at: chrono::DateTime, - /// The total fee the LSP will charge to open this channel in satoshi. - pub fee_total_sat: u64, - /// The amount the client needs to pay to have the requested channel openend. - pub order_total_sat: u64, - /// An on-chain address the client can send [`Self::order_total_sat`] to to have the channel - /// opened. - pub address: bitcoin::Address, - /// The minimum number of block confirmations that are required for the on-chain payment to be - /// considered confirmed. - pub min_onchain_payment_confirmations: Option, - /// The minimum fee rate for the on-chain payment in case the client wants the payment to be - /// confirmed without a confirmation. - pub min_fee_for_0conf: Arc, - /// The address where the LSP will send the funds if the order fails. - pub refund_onchain_address: Option, -} +type LSPS1PaymentInfo = lightning_liquidity::lsps1::msgs::LSPS1PaymentInfo; #[cfg(feature = "uniffi")] -impl From for OnchainPaymentInfo { - fn from(value: lightning_liquidity::lsps1::msgs::OnchainPaymentInfo) -> Self { - Self { - state: value.state, - expires_at: value.expires_at, - fee_total_sat: value.fee_total_sat, - order_total_sat: value.order_total_sat, - address: value.address, - min_onchain_payment_confirmations: value.min_onchain_payment_confirmations, - min_fee_for_0conf: Arc::new(value.min_fee_for_0conf), - refund_onchain_address: value.refund_onchain_address, - } - } -} +type LSPS1PaymentInfo = crate::ffi::LSPS1PaymentInfo; #[derive(Debug, Clone)] pub(crate) struct LSPS2FeeResponse { - opening_fee_params_menu: Vec, + opening_fee_params_menu: Vec, } #[derive(Debug, Clone)] @@ -1474,7 +1420,7 @@ impl LSPS1Liquidity { } /// Connects to the configured LSP and checks for the status of a previously-placed order. - pub fn check_order_status(&self, order_id: OrderId) -> Result { + pub fn check_order_status(&self, order_id: LSPS1OrderId) -> Result { let liquidity_source = self.liquidity_source.as_ref().ok_or(Error::LiquiditySourceUnavailable)?; diff --git a/src/message_handler.rs b/src/message_handler.rs index cebd1ea07..25995a481 100644 --- a/src/message_handler.rs +++ b/src/message_handler.rs @@ -10,6 +10,7 @@ use crate::liquidity::LiquiditySource; use lightning::ln::peer_handler::CustomMessageHandler; use lightning::ln::wire::CustomMessageReader; use lightning::util::logger::Logger; +use lightning::util::ser::LengthLimitedRead; use lightning_types::features::{InitFeatures, NodeFeatures}; @@ -47,7 +48,7 @@ where { type CustomMessage = RawLSPSMessage; - fn read( + fn read( &self, message_type: u16, buffer: &mut RD, ) -> Result, lightning::ln::msgs::DecodeError> { match self { diff --git a/src/payment/bolt11.rs b/src/payment/bolt11.rs index 92d7fc948..7dcb2817c 100644 --- a/src/payment/bolt11.rs +++ b/src/payment/bolt11.rs @@ -20,16 +20,14 @@ use crate::payment::store::{ LSPFeeLimits, PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, PaymentStatus, }; -use crate::payment::SendingParameters; use crate::peer_store::{PeerInfo, PeerStore}; use crate::runtime::Runtime; use crate::types::{ChannelManager, PaymentStore}; -use lightning::ln::bolt11_payment; use lightning::ln::channelmanager::{ - Bolt11InvoiceParameters, PaymentId, RecipientOnionFields, Retry, RetryableSendFailure, + Bolt11InvoiceParameters, Bolt11PaymentError, PaymentId, Retry, RetryableSendFailure, }; -use lightning::routing::router::{PaymentParameters, RouteParameters}; +use lightning::routing::router::{PaymentParameters, RouteParameters, RouteParametersConfig}; use lightning_types::payment::{PaymentHash, PaymentPreimage}; @@ -92,22 +90,17 @@ impl Bolt11Payment { /// Send a payment given an invoice. /// - /// If `sending_parameters` are provided they will override the default as well as the - /// node-wide parameters configured via [`Config::sending_parameters`] on a per-field basis. + /// If `route_parameters` are provided they will override the default as well as the + /// node-wide parameters configured via [`Config::route_parameters`] on a per-field basis. pub fn send( - &self, invoice: &Bolt11Invoice, sending_parameters: Option, + &self, invoice: &Bolt11Invoice, route_parameters: Option, ) -> Result { if !*self.is_running.read().unwrap() { return Err(Error::NotRunning); } let invoice = maybe_deref(invoice); - - let (payment_hash, recipient_onion, mut route_params) = bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_using_amount instead."); - Error::InvalidInvoice - })?; - + let payment_hash = PaymentHash(invoice.payment_hash().to_byte_array()); let payment_id = PaymentId(invoice.payment_hash().to_byte_array()); if let Some(payment) = self.payment_store.get(&payment_id) { if payment.status == PaymentStatus::Pending @@ -118,29 +111,16 @@ impl Bolt11Payment { } } - let override_params = - sending_parameters.as_ref().or(self.config.sending_parameters.as_ref()); - if let Some(override_params) = override_params { - override_params - .max_total_routing_fee_msat - .map(|f| route_params.max_total_routing_fee_msat = f.into()); - override_params - .max_total_cltv_expiry_delta - .map(|d| route_params.payment_params.max_total_cltv_expiry_delta = d); - override_params.max_path_count.map(|p| route_params.payment_params.max_path_count = p); - override_params - .max_channel_saturation_power_of_half - .map(|s| route_params.payment_params.max_channel_saturation_power_of_half = s); - }; - - let payment_secret = Some(*invoice.payment_secret()); + let route_parameters = + route_parameters.or(self.config.route_parameters).unwrap_or_default(); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); + let payment_secret = Some(*invoice.payment_secret()); - match self.channel_manager.send_payment( - payment_hash, - recipient_onion, + match self.channel_manager.pay_for_bolt11_invoice( + invoice, payment_id, - route_params, + None, + route_parameters, retry_strategy, ) { Ok(()) => { @@ -166,7 +146,13 @@ impl Bolt11Payment { Ok(payment_id) }, - Err(e) => { + Err(Bolt11PaymentError::InvalidAmount) => { + log_error!(self.logger, + "Failed to send payment due to the given invoice being \"zero-amount\". Please use send_using_amount instead." + ); + return Err(Error::InvalidInvoice); + }, + Err(Bolt11PaymentError::SendingFailed(e)) => { log_error!(self.logger, "Failed to send payment: {:?}", e); match e { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), @@ -200,18 +186,17 @@ impl Bolt11Payment { /// This can be used to pay a so-called "zero-amount" invoice, i.e., an invoice that leaves the /// amount paid to be determined by the user. /// - /// If `sending_parameters` are provided they will override the default as well as the - /// node-wide parameters configured via [`Config::sending_parameters`] on a per-field basis. + /// If `route_parameters` are provided they will override the default as well as the + /// node-wide parameters configured via [`Config::route_parameters`] on a per-field basis. pub fn send_using_amount( &self, invoice: &Bolt11Invoice, amount_msat: u64, - sending_parameters: Option, + route_parameters: Option, ) -> Result { if !*self.is_running.read().unwrap() { return Err(Error::NotRunning); } let invoice = maybe_deref(invoice); - if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { if amount_msat < invoice_amount_msat { log_error!( @@ -232,46 +217,16 @@ impl Bolt11Payment { } } - let payment_secret = invoice.payment_secret(); - let expiry_time = invoice.duration_since_epoch().saturating_add(invoice.expiry_time()); - let mut payment_params = PaymentParameters::from_node_id( - invoice.recover_payee_pub_key(), - invoice.min_final_cltv_expiry_delta() as u32, - ) - .with_expiry_time(expiry_time.as_secs()) - .with_route_hints(invoice.route_hints()) - .map_err(|_| Error::InvalidInvoice)?; - if let Some(features) = invoice.features() { - payment_params = payment_params - .with_bolt11_features(features.clone()) - .map_err(|_| Error::InvalidInvoice)?; - } - let mut route_params = - RouteParameters::from_payment_params_and_value(payment_params, amount_msat); - - let override_params = - sending_parameters.as_ref().or(self.config.sending_parameters.as_ref()); - if let Some(override_params) = override_params { - override_params - .max_total_routing_fee_msat - .map(|f| route_params.max_total_routing_fee_msat = f.into()); - override_params - .max_total_cltv_expiry_delta - .map(|d| route_params.payment_params.max_total_cltv_expiry_delta = d); - override_params.max_path_count.map(|p| route_params.payment_params.max_path_count = p); - override_params - .max_channel_saturation_power_of_half - .map(|s| route_params.payment_params.max_channel_saturation_power_of_half = s); - }; - + let route_parameters = + route_parameters.or(self.config.route_parameters).unwrap_or_default(); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let recipient_fields = RecipientOnionFields::secret_only(*payment_secret); + let payment_secret = Some(*invoice.payment_secret()); - match self.channel_manager.send_payment( - payment_hash, - recipient_fields, + match self.channel_manager.pay_for_bolt11_invoice( + invoice, payment_id, - route_params, + Some(amount_msat), + route_parameters, retry_strategy, ) { Ok(()) => { @@ -286,7 +241,7 @@ impl Bolt11Payment { let kind = PaymentKind::Bolt11 { hash: payment_hash, preimage: None, - secret: Some(*payment_secret), + secret: payment_secret, }; let payment = PaymentDetails::new( @@ -301,16 +256,22 @@ impl Bolt11Payment { Ok(payment_id) }, - Err(e) => { + Err(Bolt11PaymentError::InvalidAmount) => { + log_error!( + self.logger, + "Failed to send payment due to amount given being insufficient." + ); + return Err(Error::InvalidInvoice); + }, + Err(Bolt11PaymentError::SendingFailed(e)) => { log_error!(self.logger, "Failed to send payment: {:?}", e); - match e { RetryableSendFailure::DuplicatePayment => Err(Error::DuplicatePayment), _ => { let kind = PaymentKind::Bolt11 { hash: payment_hash, preimage: None, - secret: Some(*payment_secret), + secret: payment_secret, }; let payment = PaymentDetails::new( payment_id, @@ -320,8 +281,8 @@ impl Bolt11Payment { PaymentDirection::Outbound, PaymentStatus::Failed, ); - self.payment_store.insert(payment)?; + self.payment_store.insert(payment)?; Err(Error::PaymentSendingFailed) }, } @@ -798,18 +759,41 @@ impl Bolt11Payment { /// payment. To mitigate this issue, channels with available liquidity less than the required /// amount times [`Config::probing_liquidity_limit_multiplier`] won't be used to send /// pre-flight probes. - pub fn send_probes(&self, invoice: &Bolt11Invoice) -> Result<(), Error> { + /// + /// If `route_parameters` are provided they will override the default as well as the + /// node-wide parameters configured via [`Config::route_parameters`] on a per-field basis. + pub fn send_probes( + &self, invoice: &Bolt11Invoice, route_parameters: Option, + ) -> Result<(), Error> { if !*self.is_running.read().unwrap() { return Err(Error::NotRunning); } let invoice = maybe_deref(invoice); + let payment_params = PaymentParameters::from_bolt11_invoice(invoice); - let (_payment_hash, _recipient_onion, route_params) = bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { + let amount_msat = invoice.amount_milli_satoshis().ok_or_else(|| { log_error!(self.logger, "Failed to send probes due to the given invoice being \"zero-amount\". Please use send_probes_using_amount instead."); Error::InvalidInvoice })?; + let mut route_params = + RouteParameters::from_payment_params_and_value(payment_params, amount_msat); + + if let Some(RouteParametersConfig { + max_total_routing_fee_msat, + max_total_cltv_expiry_delta, + max_path_count, + max_channel_saturation_power_of_half, + }) = route_parameters.as_ref().or(self.config.route_parameters.as_ref()) + { + route_params.max_total_routing_fee_msat = *max_total_routing_fee_msat; + route_params.payment_params.max_total_cltv_expiry_delta = *max_total_cltv_expiry_delta; + route_params.payment_params.max_path_count = *max_path_count; + route_params.payment_params.max_channel_saturation_power_of_half = + *max_channel_saturation_power_of_half; + } + let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); self.channel_manager @@ -828,36 +812,49 @@ impl Bolt11Payment { /// This can be used to send pre-flight probes for a so-called "zero-amount" invoice, i.e., an /// invoice that leaves the amount paid to be determined by the user. /// + /// If `route_parameters` are provided they will override the default as well as the + /// node-wide parameters configured via [`Config::route_parameters`] on a per-field basis. + /// /// See [`Self::send_probes`] for more information. pub fn send_probes_using_amount( &self, invoice: &Bolt11Invoice, amount_msat: u64, + route_parameters: Option, ) -> Result<(), Error> { if !*self.is_running.read().unwrap() { return Err(Error::NotRunning); } let invoice = maybe_deref(invoice); + let payment_params = PaymentParameters::from_bolt11_invoice(invoice); - let (_payment_hash, _recipient_onion, route_params) = if let Some(invoice_amount_msat) = - invoice.amount_milli_satoshis() - { + if let Some(invoice_amount_msat) = invoice.amount_milli_satoshis() { if amount_msat < invoice_amount_msat { log_error!( self.logger, - "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", invoice_amount_msat, amount_msat); + "Failed to send probes as the given amount needs to be at least the invoice amount: required {}msat, gave {}msat.", + invoice_amount_msat, + amount_msat + ); return Err(Error::InvalidAmount); } + } - bolt11_payment::payment_parameters_from_invoice(&invoice).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being \"zero-amount\"."); - Error::InvalidInvoice - })? - } else { - bolt11_payment::payment_parameters_from_variable_amount_invoice(&invoice, amount_msat).map_err(|_| { - log_error!(self.logger, "Failed to send probes due to the given invoice unexpectedly being not \"zero-amount\"."); - Error::InvalidInvoice - })? - }; + let mut route_params = + RouteParameters::from_payment_params_and_value(payment_params, amount_msat); + + if let Some(RouteParametersConfig { + max_total_routing_fee_msat, + max_total_cltv_expiry_delta, + max_path_count, + max_channel_saturation_power_of_half, + }) = route_parameters.as_ref().or(self.config.route_parameters.as_ref()) + { + route_params.max_total_routing_fee_msat = *max_total_routing_fee_msat; + route_params.payment_params.max_total_cltv_expiry_delta = *max_total_cltv_expiry_delta; + route_params.payment_params.max_path_count = *max_path_count; + route_params.payment_params.max_channel_saturation_power_of_half = + *max_channel_saturation_power_of_half; + } let liquidity_limit_multiplier = Some(self.config.probing_liquidity_limit_multiplier); diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index 8e10b9f4f..4e968deb7 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -19,7 +19,9 @@ use crate::types::{ChannelManager, PaymentStore}; use lightning::ln::channelmanager::{PaymentId, Retry}; use lightning::offers::offer::{Amount, Offer as LdkOffer, Quantity}; use lightning::offers::parse::Bolt12SemanticError; -use lightning::util::string::UntrustedString; +use lightning::routing::router::RouteParametersConfig; + +use lightning_types::string::UntrustedString; use rand::RngCore; @@ -82,7 +84,7 @@ impl Bolt12Payment { rand::thread_rng().fill_bytes(&mut random_bytes); let payment_id = PaymentId(random_bytes); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let max_total_routing_fee_msat = None; + let route_params_config = RouteParametersConfig::default(); let offer_amount_msat = match offer.amount() { Some(Amount::Bitcoin { amount_msats }) => amount_msats, @@ -103,7 +105,7 @@ impl Bolt12Payment { payer_note.clone(), payment_id, retry_strategy, - max_total_routing_fee_msat, + route_params_config, ) { Ok(()) => { let payee_pubkey = offer.issuer_signing_pubkey(); @@ -185,7 +187,7 @@ impl Bolt12Payment { rand::thread_rng().fill_bytes(&mut random_bytes); let payment_id = PaymentId(random_bytes); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let max_total_routing_fee_msat = None; + let route_params_config = RouteParametersConfig::default(); let offer_amount_msat = match offer.amount() { Some(Amount::Bitcoin { amount_msats }) => amount_msats, @@ -210,7 +212,7 @@ impl Bolt12Payment { payer_note.clone(), payment_id, retry_strategy, - max_total_routing_fee_msat, + route_params_config, ) { Ok(()) => { let payee_pubkey = offer.issuer_signing_pubkey(); @@ -273,17 +275,17 @@ impl Bolt12Payment { pub(crate) fn receive_inner( &self, amount_msat: u64, description: &str, expiry_secs: Option, quantity: Option, ) -> Result { - let absolute_expiry = expiry_secs.map(|secs| { - (SystemTime::now() + Duration::from_secs(secs as u64)) - .duration_since(UNIX_EPOCH) - .unwrap() - }); + let mut offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; - let offer_builder = - self.channel_manager.create_offer_builder(absolute_expiry).map_err(|e| { - log_error!(self.logger, "Failed to create offer builder: {:?}", e); - Error::OfferCreationFailed - })?; + if let Some(expiry_secs) = expiry_secs { + let absolute_expiry = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) + .duration_since(UNIX_EPOCH) + .unwrap(); + offer_builder = offer_builder.absolute_expiry(absolute_expiry); + } let mut offer = offer_builder.amount_msats(amount_msat).description(description.to_string()); @@ -319,17 +321,18 @@ impl Bolt12Payment { pub fn receive_variable_amount( &self, description: &str, expiry_secs: Option, ) -> Result { - let absolute_expiry = expiry_secs.map(|secs| { - (SystemTime::now() + Duration::from_secs(secs as u64)) + let mut offer_builder = self.channel_manager.create_offer_builder().map_err(|e| { + log_error!(self.logger, "Failed to create offer builder: {:?}", e); + Error::OfferCreationFailed + })?; + + if let Some(expiry_secs) = expiry_secs { + let absolute_expiry = (SystemTime::now() + Duration::from_secs(expiry_secs as u64)) .duration_since(UNIX_EPOCH) - .unwrap() - }); + .unwrap(); + offer_builder = offer_builder.absolute_expiry(absolute_expiry); + } - let offer_builder = - self.channel_manager.create_offer_builder(absolute_expiry).map_err(|e| { - log_error!(self.logger, "Failed to create offer builder: {:?}", e); - Error::OfferCreationFailed - })?; let offer = offer_builder.description(description.to_string()).build().map_err(|e| { log_error!(self.logger, "Failed to create offer: {:?}", e); Error::OfferCreationFailed @@ -396,7 +399,7 @@ impl Bolt12Payment { .duration_since(UNIX_EPOCH) .unwrap(); let retry_strategy = Retry::Timeout(LDK_PAYMENT_RETRY_TIMEOUT); - let max_total_routing_fee_msat = None; + let route_params_config = RouteParametersConfig::default(); let mut refund_builder = self .channel_manager @@ -405,7 +408,7 @@ impl Bolt12Payment { absolute_expiry, payment_id, retry_strategy, - max_total_routing_fee_msat, + route_params_config, ) .map_err(|e| { log_error!(self.logger, "Failed to create refund builder: {:?}", e); diff --git a/src/payment/mod.rs b/src/payment/mod.rs index b031e37fd..54f7894dc 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -22,87 +22,3 @@ pub use store::{ ConfirmationStatus, LSPFeeLimits, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, }; pub use unified_qr::{QrPaymentResult, UnifiedQrPayment}; - -/// Represents information used to send a payment. -#[derive(Clone, Debug, PartialEq)] -pub struct SendingParameters { - /// The maximum total fees, in millisatoshi, that may accrue during route finding. - /// - /// This limit also applies to the total fees that may arise while retrying failed payment - /// paths. - /// - /// Note that values below a few sats may result in some paths being spuriously ignored. - #[cfg(not(feature = "uniffi"))] - pub max_total_routing_fee_msat: Option>, - /// The maximum total fees, in millisatoshi, that may accrue during route finding. - /// - /// This limit also applies to the total fees that may arise while retrying failed payment - /// paths. - /// - /// Note that values below a few sats may result in some paths being spuriously ignored. - #[cfg(feature = "uniffi")] - pub max_total_routing_fee_msat: Option, - /// The maximum total CLTV delta we accept for the route. - /// - /// Defaults to [`DEFAULT_MAX_TOTAL_CLTV_EXPIRY_DELTA`]. - /// - /// [`DEFAULT_MAX_TOTAL_CLTV_EXPIRY_DELTA`]: lightning::routing::router::DEFAULT_MAX_TOTAL_CLTV_EXPIRY_DELTA - pub max_total_cltv_expiry_delta: Option, - /// The maximum number of paths that may be used by (MPP) payments. - /// - /// Defaults to [`DEFAULT_MAX_PATH_COUNT`]. - /// - /// [`DEFAULT_MAX_PATH_COUNT`]: lightning::routing::router::DEFAULT_MAX_PATH_COUNT - pub max_path_count: Option, - /// Selects the maximum share of a channel's total capacity which will be sent over a channel, - /// as a power of 1/2. - /// - /// A higher value prefers to send the payment using more MPP parts whereas - /// a lower value prefers to send larger MPP parts, potentially saturating channels and - /// increasing failure probability for those paths. - /// - /// Note that this restriction will be relaxed during pathfinding after paths which meet this - /// restriction have been found. While paths which meet this criteria will be searched for, it - /// is ultimately up to the scorer to select them over other paths. - /// - /// Examples: - /// - /// | Value | Max Proportion of Channel Capacity Used | - /// |-------|-----------------------------------------| - /// | 0 | Up to 100% of the channel’s capacity | - /// | 1 | Up to 50% of the channel’s capacity | - /// | 2 | Up to 25% of the channel’s capacity | - /// | 3 | Up to 12.5% of the channel’s capacity | - /// - /// Default value: 2 - pub max_channel_saturation_power_of_half: Option, -} - -/// Represents the possible states of [`SendingParameters::max_total_routing_fee_msat`]. -// -// Required only in bindings as UniFFI can't expose `Option>`. -#[cfg(feature = "uniffi")] -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum MaxTotalRoutingFeeLimit { - None, - Some { amount_msat: u64 }, -} - -#[cfg(feature = "uniffi")] -impl From for Option { - fn from(value: MaxTotalRoutingFeeLimit) -> Self { - match value { - MaxTotalRoutingFeeLimit::Some { amount_msat } => Some(amount_msat), - MaxTotalRoutingFeeLimit::None => None, - } - } -} - -#[cfg(feature = "uniffi")] -impl From> for MaxTotalRoutingFeeLimit { - fn from(value: Option) -> Self { - value.map_or(MaxTotalRoutingFeeLimit::None, |amount_msat| MaxTotalRoutingFeeLimit::Some { - amount_msat, - }) - } -} diff --git a/src/payment/spontaneous.rs b/src/payment/spontaneous.rs index 3e48fd090..181307a0f 100644 --- a/src/payment/spontaneous.rs +++ b/src/payment/spontaneous.rs @@ -11,11 +11,10 @@ use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; use crate::error::Error; use crate::logger::{log_error, log_info, LdkLogger, Logger}; use crate::payment::store::{PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus}; -use crate::payment::SendingParameters; use crate::types::{ChannelManager, CustomTlvRecord, KeysManager, PaymentStore}; use lightning::ln::channelmanager::{PaymentId, RecipientOnionFields, Retry, RetryableSendFailure}; -use lightning::routing::router::{PaymentParameters, RouteParameters}; +use lightning::routing::router::{PaymentParameters, RouteParameters, RouteParametersConfig}; use lightning::sign::EntropySource; use lightning_types::payment::{PaymentHash, PaymentPreimage}; @@ -52,41 +51,43 @@ impl SpontaneousPayment { /// Send a spontaneous aka. "keysend", payment. /// - /// If `sending_parameters` are provided they will override the default as well as the - /// node-wide parameters configured via [`Config::sending_parameters`] on a per-field basis. + /// If `route_parameters` are provided they will override the default as well as the + /// node-wide parameters configured via [`Config::route_parameters`] on a per-field basis. pub fn send( - &self, amount_msat: u64, node_id: PublicKey, sending_parameters: Option, + &self, amount_msat: u64, node_id: PublicKey, + route_parameters: Option, ) -> Result { - self.send_inner(amount_msat, node_id, sending_parameters, None, None) + self.send_inner(amount_msat, node_id, route_parameters, None, None) } /// Send a spontaneous payment including a list of custom TLVs. pub fn send_with_custom_tlvs( - &self, amount_msat: u64, node_id: PublicKey, sending_parameters: Option, - custom_tlvs: Vec, + &self, amount_msat: u64, node_id: PublicKey, + route_parameters: Option, custom_tlvs: Vec, ) -> Result { - self.send_inner(amount_msat, node_id, sending_parameters, Some(custom_tlvs), None) + self.send_inner(amount_msat, node_id, route_parameters, Some(custom_tlvs), None) } /// Send a spontaneous payment with custom preimage pub fn send_with_preimage( &self, amount_msat: u64, node_id: PublicKey, preimage: PaymentPreimage, - sending_parameters: Option, + route_parameters: Option, ) -> Result { - self.send_inner(amount_msat, node_id, sending_parameters, None, Some(preimage)) + self.send_inner(amount_msat, node_id, route_parameters, None, Some(preimage)) } /// Send a spontaneous payment with custom preimage including a list of custom TLVs. pub fn send_with_preimage_and_custom_tlvs( &self, amount_msat: u64, node_id: PublicKey, custom_tlvs: Vec, - preimage: PaymentPreimage, sending_parameters: Option, + preimage: PaymentPreimage, route_parameters: Option, ) -> Result { - self.send_inner(amount_msat, node_id, sending_parameters, Some(custom_tlvs), Some(preimage)) + self.send_inner(amount_msat, node_id, route_parameters, Some(custom_tlvs), Some(preimage)) } fn send_inner( - &self, amount_msat: u64, node_id: PublicKey, sending_parameters: Option, - custom_tlvs: Option>, preimage: Option, + &self, amount_msat: u64, node_id: PublicKey, + route_parameters: Option, custom_tlvs: Option>, + preimage: Option, ) -> Result { if !*self.is_running.read().unwrap() { return Err(Error::NotRunning); @@ -112,20 +113,19 @@ impl SpontaneousPayment { amount_msat, ); - let override_params = - sending_parameters.as_ref().or(self.config.sending_parameters.as_ref()); - if let Some(override_params) = override_params { - override_params - .max_total_routing_fee_msat - .map(|f| route_params.max_total_routing_fee_msat = f.into()); - override_params - .max_total_cltv_expiry_delta - .map(|d| route_params.payment_params.max_total_cltv_expiry_delta = d); - override_params.max_path_count.map(|p| route_params.payment_params.max_path_count = p); - override_params - .max_channel_saturation_power_of_half - .map(|s| route_params.payment_params.max_channel_saturation_power_of_half = s); - }; + if let Some(RouteParametersConfig { + max_total_routing_fee_msat, + max_total_cltv_expiry_delta, + max_path_count, + max_channel_saturation_power_of_half, + }) = route_parameters.as_ref().or(self.config.route_parameters.as_ref()) + { + route_params.max_total_routing_fee_msat = *max_total_routing_fee_msat; + route_params.payment_params.max_total_cltv_expiry_delta = *max_total_cltv_expiry_delta; + route_params.payment_params.max_path_count = *max_path_count; + route_params.payment_params.max_channel_saturation_power_of_half = + *max_channel_saturation_power_of_half; + } let recipient_fields = match custom_tlvs { Some(tlvs) => RecipientOnionFields::spontaneous_empty() diff --git a/src/payment/store.rs b/src/payment/store.rs index 75b2b1b2a..568394b48 100644 --- a/src/payment/store.rs +++ b/src/payment/store.rs @@ -9,13 +9,13 @@ use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::DecodeError; use lightning::offers::offer::OfferId; use lightning::util::ser::{Readable, Writeable}; -use lightning::util::string::UntrustedString; use lightning::{ _init_and_read_len_prefixed_tlv_fields, impl_writeable_tlv_based, impl_writeable_tlv_based_enum, write_tlv_fields, }; use lightning_types::payment::{PaymentHash, PaymentPreimage, PaymentSecret}; +use lightning_types::string::UntrustedString; use bitcoin::{BlockHash, Txid}; diff --git a/src/peer_store.rs b/src/peer_store.rs index 4d1c65157..cf3755d23 100644 --- a/src/peer_store.rs +++ b/src/peer_store.rs @@ -73,7 +73,7 @@ where PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, PEER_INFO_PERSISTENCE_KEY, - &data, + data, ) .map_err(|e| { log_error!( diff --git a/src/types.rs b/src/types.rs index 3103ead3f..b9bc1c317 100644 --- a/src/types.rs +++ b/src/types.rs @@ -25,20 +25,23 @@ use lightning::routing::gossip; use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters}; use lightning::sign::InMemorySigner; -use lightning::util::persist::KVStore; +use lightning::util::persist::KVStoreSync; +use lightning::util::persist::KVStoreSyncWrapper; use lightning::util::ser::{Readable, Writeable, Writer}; -use lightning::util::sweep::OutputSweeper; +use lightning::util::sweep::OutputSweeper; use lightning_block_sync::gossip::{GossipVerifier, UtxoSource}; use lightning_net_tokio::SocketDescriptor; +use lightning_liquidity::utils::time::DefaultTimeProvider; + use bitcoin::secp256k1::PublicKey; use bitcoin::OutPoint; use std::sync::{Arc, Mutex}; -pub(crate) type DynStore = dyn KVStore + Sync + Send; +pub(crate) type DynStore = dyn KVStoreSync + Sync + Send; pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, @@ -47,6 +50,7 @@ pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< Arc, Arc, Arc, + Arc, >; pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< @@ -57,10 +61,16 @@ pub(crate) type PeerManager = lightning::ln::peer_handler::PeerManager< Arc, Arc>>, Arc, + Arc, >; -pub(crate) type LiquidityManager = - lightning_liquidity::LiquidityManager, Arc, Arc>; +pub(crate) type LiquidityManager = lightning_liquidity::LiquidityManager< + Arc, + Arc, + Arc, + Arc, + Arc, +>; pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< Arc, @@ -76,11 +86,8 @@ pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< pub(crate) type Broadcaster = crate::tx_broadcaster::TransactionBroadcaster>; -pub(crate) type Wallet = - crate::wallet::Wallet, Arc, Arc>; - -pub(crate) type KeysManager = - crate::wallet::WalletKeysManager, Arc, Arc>; +pub(crate) type Wallet = crate::wallet::Wallet; +pub(crate) type KeysManager = crate::wallet::WalletKeysManager; pub(crate) type Router = DefaultRouter< Arc, @@ -132,7 +139,7 @@ pub(crate) type Sweeper = OutputSweeper< Arc, Arc, Arc, - Arc, + KVStoreSyncWrapper>, Arc, Arc, >; diff --git a/src/wallet/mod.rs b/src/wallet/mod.rs index fbac1d1b6..c03353ef8 100644 --- a/src/wallet/mod.rs +++ b/src/wallet/mod.rs @@ -8,12 +8,12 @@ use persist::KVStoreWalletPersister; use crate::config::Config; -use crate::logger::{log_debug, log_error, log_info, log_trace, LdkLogger}; +use crate::logger::{log_debug, log_error, log_info, log_trace, LdkLogger, Logger}; -use crate::fee_estimator::{ConfirmationTarget, FeeEstimator}; +use crate::fee_estimator::{ConfirmationTarget, FeeEstimator, OnchainFeeEstimator}; use crate::payment::store::ConfirmationStatus; use crate::payment::{PaymentDetails, PaymentDirection, PaymentStatus}; -use crate::types::PaymentStore; +use crate::types::{Broadcaster, PaymentStore}; use crate::Error; use lightning::chain::chaininterface::BroadcasterInterface; @@ -23,11 +23,11 @@ use lightning::chain::{BestBlock, Listen}; use lightning::events::bump_transaction::{Utxo, WalletSource}; use lightning::ln::channelmanager::PaymentId; use lightning::ln::inbound_payment::ExpandedKey; -use lightning::ln::msgs::{DecodeError, UnsignedGossipMessage}; +use lightning::ln::msgs::UnsignedGossipMessage; use lightning::ln::script::ShutdownScript; use lightning::sign::{ ChangeDestinationSource, EntropySource, InMemorySigner, KeysManager, NodeSigner, OutputSpender, - Recipient, SignerProvider, SpendableOutputDescriptor, + PeerStorageKey, Recipient, SignerProvider, SpendableOutputDescriptor, }; use lightning::util::message_signing; @@ -44,13 +44,14 @@ use bitcoin::key::XOnlyPublicKey; use bitcoin::psbt::Psbt; use bitcoin::secp256k1::ecdh::SharedSecret; use bitcoin::secp256k1::ecdsa::{RecoverableSignature, Signature}; -use bitcoin::secp256k1::{PublicKey, Scalar, Secp256k1, SecretKey, Signing}; +use bitcoin::secp256k1::{All, PublicKey, Scalar, Secp256k1, SecretKey}; use bitcoin::{ Address, Amount, FeeRate, Network, ScriptBuf, Transaction, TxOut, Txid, WPubkeyHash, WitnessProgram, WitnessVersion, }; -use std::ops::Deref; +use std::future::Future; +use std::pin::Pin; use std::str::FromStr; use std::sync::{Arc, Mutex}; @@ -63,32 +64,23 @@ pub(crate) enum OnchainSendAmount { pub(crate) mod persist; pub(crate) mod ser; -pub(crate) struct Wallet -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +pub(crate) struct Wallet { // A BDK on-chain wallet. inner: Mutex>, persister: Mutex, - broadcaster: B, - fee_estimator: E, + broadcaster: Arc, + fee_estimator: Arc, payment_store: Arc, config: Arc, - logger: L, + logger: Arc, } -impl Wallet -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl Wallet { pub(crate) fn new( wallet: bdk_wallet::PersistedWallet, - wallet_persister: KVStoreWalletPersister, broadcaster: B, fee_estimator: E, - payment_store: Arc, config: Arc, logger: L, + wallet_persister: KVStoreWalletPersister, broadcaster: Arc, + fee_estimator: Arc, payment_store: Arc, + config: Arc, logger: Arc, ) -> Self { let inner = Mutex::new(wallet); let persister = Mutex::new(wallet_persister); @@ -318,7 +310,7 @@ where #[cfg(debug_assertions)] if balance.confirmed != Amount::ZERO { debug_assert!( - self.list_confirmed_utxos().map_or(false, |v| !v.is_empty()), + self.list_confirmed_utxos_inner().map_or(false, |v| !v.is_empty()), "Confirmed amounts should always be available for Anchor spending" ); } @@ -568,80 +560,8 @@ where Ok(txid) } -} - -impl Listen for Wallet -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ - fn filtered_block_connected( - &self, _header: &bitcoin::block::Header, - _txdata: &lightning::chain::transaction::TransactionData, _height: u32, - ) { - debug_assert!(false, "Syncing filtered blocks is currently not supported"); - // As far as we can tell this would be a no-op anyways as we don't have to tell BDK about - // the header chain of intermediate blocks. According to the BDK team, it's sufficient to - // only connect full blocks starting from the last point of disagreement. - } - - fn block_connected(&self, block: &bitcoin::Block, height: u32) { - let mut locked_wallet = self.inner.lock().unwrap(); - - let pre_checkpoint = locked_wallet.latest_checkpoint(); - if pre_checkpoint.height() != height - 1 - || pre_checkpoint.hash() != block.header.prev_blockhash - { - log_debug!( - self.logger, - "Detected reorg while applying a connected block to on-chain wallet: new block with hash {} at height {}", - block.header.block_hash(), - height - ); - } - match locked_wallet.apply_block(block, height) { - Ok(()) => { - if let Err(e) = self.update_payment_store(&mut *locked_wallet) { - log_error!(self.logger, "Failed to update payment store: {}", e); - return; - } - }, - Err(e) => { - log_error!( - self.logger, - "Failed to apply connected block to on-chain wallet: {}", - e - ); - return; - }, - }; - - let mut locked_persister = self.persister.lock().unwrap(); - match locked_wallet.persist(&mut locked_persister) { - Ok(_) => (), - Err(e) => { - log_error!(self.logger, "Failed to persist on-chain wallet: {}", e); - return; - }, - }; - } - - fn block_disconnected(&self, _header: &bitcoin::block::Header, _height: u32) { - // This is a no-op as we don't have to tell BDK about disconnections. According to the BDK - // team, it's sufficient in case of a reorg to always connect blocks starting from the last - // point of disagreement. - } -} - -impl WalletSource for Wallet -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ - fn list_confirmed_utxos(&self) -> Result, ()> { + fn list_confirmed_utxos_inner(&self) -> Result, ()> { let locked_wallet = self.inner.lock().unwrap(); let mut utxos = Vec::new(); let confirmed_txs: Vec = locked_wallet @@ -733,7 +653,7 @@ where Ok(utxos) } - fn get_change_script(&self) -> Result { + fn get_change_script_inner(&self) -> Result { let mut locked_wallet = self.inner.lock().unwrap(); let mut locked_persister = self.persister.lock().unwrap(); @@ -745,7 +665,7 @@ where Ok(address_info.address.script_pubkey()) } - fn sign_psbt(&self, mut psbt: Psbt) -> Result { + fn sign_psbt_inner(&self, mut psbt: Psbt) -> Result { let locked_wallet = self.inner.lock().unwrap(); // While BDK populates both `witness_utxo` and `non_witness_utxo` fields, LDK does not. As @@ -775,32 +695,102 @@ where } } +impl Listen for Wallet { + fn filtered_block_connected( + &self, _header: &bitcoin::block::Header, + _txdata: &lightning::chain::transaction::TransactionData, _height: u32, + ) { + debug_assert!(false, "Syncing filtered blocks is currently not supported"); + // As far as we can tell this would be a no-op anyways as we don't have to tell BDK about + // the header chain of intermediate blocks. According to the BDK team, it's sufficient to + // only connect full blocks starting from the last point of disagreement. + } + + fn block_connected(&self, block: &bitcoin::Block, height: u32) { + let mut locked_wallet = self.inner.lock().unwrap(); + + let pre_checkpoint = locked_wallet.latest_checkpoint(); + if pre_checkpoint.height() != height - 1 + || pre_checkpoint.hash() != block.header.prev_blockhash + { + log_debug!( + self.logger, + "Detected reorg while applying a connected block to on-chain wallet: new block with hash {} at height {}", + block.header.block_hash(), + height + ); + } + + match locked_wallet.apply_block(block, height) { + Ok(()) => { + if let Err(e) = self.update_payment_store(&mut *locked_wallet) { + log_error!(self.logger, "Failed to update payment store: {}", e); + return; + } + }, + Err(e) => { + log_error!( + self.logger, + "Failed to apply connected block to on-chain wallet: {}", + e + ); + return; + }, + }; + + let mut locked_persister = self.persister.lock().unwrap(); + match locked_wallet.persist(&mut locked_persister) { + Ok(_) => (), + Err(e) => { + log_error!(self.logger, "Failed to persist on-chain wallet: {}", e); + return; + }, + }; + } + + fn blocks_disconnected(&self, _fork_point_block: BestBlock) { + // This is a no-op as we don't have to tell BDK about disconnections. According to the BDK + // team, it's sufficient in case of a reorg to always connect blocks starting from the last + // point of disagreement. + } +} + +impl WalletSource for Wallet { + fn list_confirmed_utxos<'a>( + &'a self, + ) -> Pin, ()>> + Send + 'a>> { + Box::pin(async move { self.list_confirmed_utxos_inner() }) + } + + fn get_change_script<'a>( + &'a self, + ) -> Pin> + Send + 'a>> { + Box::pin(async move { self.get_change_script_inner() }) + } + + fn sign_psbt<'a>( + &'a self, psbt: Psbt, + ) -> Pin> + Send + 'a>> { + Box::pin(async move { self.sign_psbt_inner(psbt) }) + } +} + /// Similar to [`KeysManager`], but overrides the destination and shutdown scripts so they are /// directly spendable by the BDK wallet. -pub(crate) struct WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +pub(crate) struct WalletKeysManager { inner: KeysManager, - wallet: Arc>, - logger: L, + wallet: Arc, + logger: Arc, } -impl WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl WalletKeysManager { /// Constructs a `WalletKeysManager` that overrides the destination and shutdown scripts. /// /// See [`KeysManager::new`] for more information on `seed`, `starting_time_secs`, and /// `starting_time_nanos`. pub fn new( - seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, - wallet: Arc>, logger: L, + seed: &[u8; 32], starting_time_secs: u64, starting_time_nanos: u32, wallet: Arc, + logger: Arc, ) -> Self { let inner = KeysManager::new(seed, starting_time_secs, starting_time_nanos); Self { inner, wallet, logger } @@ -819,12 +809,7 @@ where } } -impl NodeSigner for WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl NodeSigner for WalletKeysManager { fn get_node_id(&self, recipient: Recipient) -> Result { self.inner.get_node_id(recipient) } @@ -835,8 +820,16 @@ where self.inner.ecdh(recipient, other_key, tweak) } - fn get_inbound_payment_key(&self) -> ExpandedKey { - self.inner.get_inbound_payment_key() + fn get_expanded_key(&self) -> ExpandedKey { + self.inner.get_expanded_key() + } + + fn get_peer_storage_key(&self) -> PeerStorageKey { + self.inner.get_peer_storage_key() + } + + fn get_receive_auth_key(&self) -> lightning::sign::ReceiveAuthKey { + self.inner.get_receive_auth_key() } fn sign_invoice( @@ -854,19 +847,17 @@ where ) -> Result { self.inner.sign_bolt12_invoice(invoice) } + fn sign_message(&self, msg: &[u8]) -> Result { + self.inner.sign_message(msg) + } } -impl OutputSpender for WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl OutputSpender for WalletKeysManager { /// See [`KeysManager::spend_spendable_outputs`] for documentation on this method. - fn spend_spendable_outputs( + fn spend_spendable_outputs( &self, descriptors: &[&SpendableOutputDescriptor], outputs: Vec, change_destination_script: ScriptBuf, feerate_sat_per_1000_weight: u32, - locktime: Option, secp_ctx: &Secp256k1, + locktime: Option, secp_ctx: &Secp256k1, ) -> Result { self.inner.spend_spendable_outputs( descriptors, @@ -879,39 +870,21 @@ where } } -impl EntropySource for WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl EntropySource for WalletKeysManager { fn get_secure_random_bytes(&self) -> [u8; 32] { self.inner.get_secure_random_bytes() } } -impl SignerProvider for WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ +impl SignerProvider for WalletKeysManager { type EcdsaSigner = InMemorySigner; - fn generate_channel_keys_id( - &self, inbound: bool, channel_value_satoshis: u64, user_channel_id: u128, - ) -> [u8; 32] { - self.inner.generate_channel_keys_id(inbound, channel_value_satoshis, user_channel_id) - } - - fn derive_channel_signer( - &self, channel_value_satoshis: u64, channel_keys_id: [u8; 32], - ) -> Self::EcdsaSigner { - self.inner.derive_channel_signer(channel_value_satoshis, channel_keys_id) + fn generate_channel_keys_id(&self, inbound: bool, user_channel_id: u128) -> [u8; 32] { + self.inner.generate_channel_keys_id(inbound, user_channel_id) } - fn read_chan_signer(&self, reader: &[u8]) -> Result { - self.inner.read_chan_signer(reader) + fn derive_channel_signer(&self, channel_keys_id: [u8; 32]) -> Self::EcdsaSigner { + self.inner.derive_channel_signer(channel_keys_id) } fn get_destination_script(&self, _channel_keys_id: [u8; 32]) -> Result { @@ -941,16 +914,20 @@ where } } -impl ChangeDestinationSource for WalletKeysManager -where - B::Target: BroadcasterInterface, - E::Target: FeeEstimator, - L::Target: LdkLogger, -{ - fn get_change_destination_script(&self) -> Result { - let address = self.wallet.get_new_internal_address().map_err(|e| { - log_error!(self.logger, "Failed to retrieve new address from wallet: {}", e); - })?; - Ok(address.script_pubkey()) +impl ChangeDestinationSource for WalletKeysManager { + fn get_change_destination_script<'a>( + &self, + ) -> Pin> + Send + 'a>> { + let wallet = Arc::clone(&self.wallet); + let logger = Arc::clone(&self.logger); + Box::pin(async move { + wallet + .get_new_internal_address() + .map_err(|e| { + log_error!(logger, "Failed to retrieve new address from wallet: {}", e); + }) + .map(|addr| addr.script_pubkey()) + .map_err(|_| ()) + }) } } diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 780e9bbf4..f5bfe76fc 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -21,7 +21,7 @@ use ldk_node::{ use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; -use lightning::util::persist::KVStore; +use lightning::util::persist::KVStoreSync; use lightning::util::test_utils::TestStore; use lightning_invoice::{Bolt11InvoiceDescription, Description}; @@ -1236,7 +1236,7 @@ impl TestSyncStore { } } -impl KVStore for TestSyncStore { +impl KVStoreSync for TestSyncStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> lightning::io::Result> { @@ -1263,12 +1263,14 @@ impl KVStore for TestSyncStore { } fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: &[u8], + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); - let fs_res = self.fs_store.write(primary_namespace, secondary_namespace, key, buf); - let sqlite_res = self.sqlite_store.write(primary_namespace, secondary_namespace, key, buf); - let test_res = self.test_store.write(primary_namespace, secondary_namespace, key, buf); + let fs_res = self.fs_store.write(primary_namespace, secondary_namespace, key, buf.clone()); + let sqlite_res = + self.sqlite_store.write(primary_namespace, secondary_namespace, key, buf.clone()); + let test_res = + self.test_store.write(primary_namespace, secondary_namespace, key, buf.clone()); assert!(self .do_list(primary_namespace, secondary_namespace) diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 0932116ef..fa88fe0cc 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -22,13 +22,14 @@ use ldk_node::config::EsploraSyncConfig; use ldk_node::liquidity::LSPS2ServiceConfig; use ldk_node::payment::{ ConfirmationStatus, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, - QrPaymentResult, SendingParameters, + QrPaymentResult, }; use ldk_node::{Builder, Event, NodeError}; use lightning::ln::channelmanager::PaymentId; use lightning::routing::gossip::{NodeAlias, NodeId}; -use lightning::util::persist::KVStore; +use lightning::routing::router::RouteParametersConfig; +use lightning::util::persist::KVStoreSync; use lightning_invoice::{Bolt11InvoiceDescription, Description}; use lightning_types::payment::{PaymentHash, PaymentPreimage}; @@ -212,11 +213,11 @@ fn multi_hop_sending() { // Sleep a bit for gossip to propagate. std::thread::sleep(std::time::Duration::from_secs(1)); - let sending_params = SendingParameters { - max_total_routing_fee_msat: Some(Some(75_000).into()), - max_total_cltv_expiry_delta: Some(1000), - max_path_count: Some(10), - max_channel_saturation_power_of_half: Some(2), + let route_params = RouteParametersConfig { + max_total_routing_fee_msat: Some(75_000), + max_total_cltv_expiry_delta: 1000, + max_path_count: 10, + max_channel_saturation_power_of_half: 2, }; let invoice_description = @@ -225,7 +226,7 @@ fn multi_hop_sending() { .bolt11_payment() .receive(2_500_000, &invoice_description.clone().into(), 9217) .unwrap(); - nodes[0].bolt11_payment().send(&invoice, Some(sending_params)).unwrap(); + nodes[0].bolt11_payment().send(&invoice, Some(route_params)).unwrap(); expect_event!(nodes[1], PaymentForwarded); @@ -246,7 +247,7 @@ fn start_stop_reinit() { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let test_sync_store: Arc = + let test_sync_store: Arc = Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.clone().into())); let sync_config = EsploraSyncConfig { background_sync_config: None }; From 4b45d7c1e6f3494e23f005ce04eb8761d06ec6af Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 15 Aug 2025 09:42:47 +0200 Subject: [PATCH 03/27] Switch to use `rustls-ring` everywhere We switch to use `rustls-ring` everywhere, which is necessary for Swift builds, but also generally makes our lives easier. --- .github/workflows/kotlin.yml | 3 --- Cargo.toml | 12 ++++++------ src/builder.rs | 2 +- 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/.github/workflows/kotlin.yml b/.github/workflows/kotlin.yml index 5cb1b8c27..a1711ba49 100644 --- a/.github/workflows/kotlin.yml +++ b/.github/workflows/kotlin.yml @@ -39,9 +39,6 @@ jobs: - name: Generate Kotlin JVM run: ./scripts/uniffi_bindgen_generate_kotlin.sh - - name: Install `bindgen-cli` - run: cargo install --force bindgen-cli - - name: Generate Kotlin Android run: ./scripts/uniffi_bindgen_generate_kotlin_android.sh diff --git a/Cargo.toml b/Cargo.toml index aaaa55f39..9010ad6d5 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,7 @@ default = [] #lightning-background-processor = { version = "0.1.0" } #lightning-rapid-gossip-sync = { version = "0.1.0" } #lightning-block-sync = { version = "0.1.0", features = ["rest-client", "rpc-client", "tokio"] } -#lightning-transaction-sync = { version = "0.1.0", features = ["esplora-async-https", "time", "electrum"] } +#lightning-transaction-sync = { version = "0.1.0", features = ["esplora-async-https", "time", "electrum-rustls-ring"] } #lightning-liquidity = { version = "0.1.0", features = ["std"] } #lightning-macros = { version = "0.1.0" } @@ -48,7 +48,7 @@ default = [] #lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["rest-client", "rpc-client", "tokio"] } -#lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["esplora-async-https", "electrum", "time"] } +#lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } @@ -60,7 +60,7 @@ lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["esplora-async-https", "electrum", "time"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } @@ -72,13 +72,13 @@ lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", #lightning-background-processor = { path = "../rust-lightning/lightning-background-processor" } #lightning-rapid-gossip-sync = { path = "../rust-lightning/lightning-rapid-gossip-sync" } #lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rest-client", "rpc-client", "tokio"] } -#lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async-https", "electrum", "time"] } +#lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } #lightning-liquidity = { path = "../rust-lightning/lightning-liquidity", features = ["std"] } #lightning-macros = { path = "../rust-lightning/lightning-macros" } bdk_chain = { version = "0.23.0", default-features = false, features = ["std"] } bdk_esplora = { version = "0.22.0", default-features = false, features = ["async-https-rustls", "tokio"]} -bdk_electrum = { version = "0.23.0", default-features = false, features = ["use-rustls"]} +bdk_electrum = { version = "0.23.0", default-features = false, features = ["use-rustls-ring"]} bdk_wallet = { version = "2.0.0", default-features = false, features = ["std", "keys-bip39"]} reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] } @@ -93,7 +93,7 @@ rand = "0.8.5" chrono = { version = "0.4", default-features = false, features = ["clock"] } tokio = { version = "1.37", default-features = false, features = [ "rt-multi-thread", "time", "sync", "macros" ] } esplora-client = { version = "0.12", default-features = false, features = ["tokio", "async-https-rustls"] } -electrum-client = { version = "0.24.0", default-features = true } +electrum-client = { version = "0.24.0", default-features = false, features = ["proxy", "use-rustls-ring"] } libc = "0.2" uniffi = { version = "0.28.3", features = ["build"], optional = true } serde = { version = "1.0.210", default-features = false, features = ["std", "derive"] } diff --git a/src/builder.rs b/src/builder.rs index 094c21e72..a46b182e1 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1688,7 +1688,7 @@ fn optionally_install_rustls_cryptoprovider() { INIT_CRYPTO.call_once(|| { // Ensure we always install a `CryptoProvider` for `rustls` if it was somehow not previously installed by now. if rustls::crypto::CryptoProvider::get_default().is_none() { - let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + let _ = rustls::crypto::ring::default_provider().install_default(); } // Refuse to startup without TLS support. Better to catch it now than even later at runtime. From 80ac9f35eaf39847ed6c2438df1c25e0299a6807 Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Wed, 3 Sep 2025 12:46:07 +0200 Subject: [PATCH 04/27] Use log timestamps with millisecond resolution Helpful to correlate multiple log files throughout time --- src/logger.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/logger.rs b/src/logger.rs index bbd24ec20..40817897c 100644 --- a/src/logger.rs +++ b/src/logger.rs @@ -124,7 +124,7 @@ impl LogWriter for Writer { let log = format!( "{} {:<5} [{}:{}] {}\n", - Utc::now().format("%Y-%m-%d %H:%M:%S"), + Utc::now().format("%Y-%m-%d %H:%M:%S%.3f"), record.level.to_string(), record.module_path, record.line, From 66f5c28ca8663a3c9e5b7562660192f3804bec4f Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Tue, 19 Aug 2025 15:37:31 +0200 Subject: [PATCH 05/27] Add static invoice support This commit adds support for using ldk-node as a static invoice server. When configured as such, the node persists and retrieves invoices from the configured kv store. Access is guarded by a rate limiter to prevent overload and mitigate potential DoS attacks. In this mode, ldk-node also exposes blinded paths that can be shared with async recipients, allowing them to contact the static invoice server. When ldk-node functions as a recipient, it can communicate with the static invoice server to set up async payments. --- bindings/ldk_node.udl | 9 + src/builder.rs | 2 +- src/config.rs | 3 + src/error.rs | 8 + src/event.rs | 63 +++- src/io/mod.rs | 5 + src/lib.rs | 10 + src/payment/asynchronous/mod.rs | 9 + src/payment/asynchronous/rate_limiter.rs | 96 ++++++ .../asynchronous/static_invoice_store.rs | 277 ++++++++++++++++++ src/payment/bolt12.rs | 105 ++++++- src/payment/mod.rs | 1 + src/types.rs | 2 +- tests/integration_tests_rust.rs | 95 ++++++ 14 files changed, 672 insertions(+), 13 deletions(-) create mode 100644 src/payment/asynchronous/mod.rs create mode 100644 src/payment/asynchronous/rate_limiter.rs create mode 100644 src/payment/asynchronous/static_invoice_store.rs diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index b9bab61e8..9f0ef697e 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -13,6 +13,7 @@ dictionary Config { u64 probing_liquidity_limit_multiplier; AnchorChannelsConfig? anchor_channels_config; RouteParametersConfig? route_parameters; + boolean async_payment_services_enabled; }; dictionary AnchorChannelsConfig { @@ -209,6 +210,12 @@ interface Bolt12Payment { Bolt12Invoice request_refund_payment([ByRef]Refund refund); [Throws=NodeError] Refund initiate_refund(u64 amount_msat, u32 expiry_secs, u64? quantity, string? payer_note); + [Throws=NodeError] + Offer receive_async(); + [Throws=NodeError] + void set_paths_to_static_invoice_server(bytes paths); + [Throws=NodeError] + bytes blinded_paths_for_async_recipient(bytes recipient_id); }; interface SpontaneousPayment { @@ -311,6 +318,8 @@ enum NodeError { "InsufficientFunds", "LiquiditySourceUnavailable", "LiquidityFeeTooHigh", + "InvalidBlindedPaths", + "AsyncPaymentServicesDisabled", }; dictionary NodeStatus { diff --git a/src/builder.rs b/src/builder.rs index a46b182e1..d330597ee 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1455,7 +1455,7 @@ fn build_with_store_internal( Arc::clone(&channel_manager), message_router, Arc::clone(&channel_manager), - IgnoringMessageHandler {}, + Arc::clone(&channel_manager), IgnoringMessageHandler {}, IgnoringMessageHandler {}, )); diff --git a/src/config.rs b/src/config.rs index 84f62d220..bb0bd56ba 100644 --- a/src/config.rs +++ b/src/config.rs @@ -179,6 +179,8 @@ pub struct Config { /// **Note:** If unset, default parameters will be used, and you will be able to override the /// parameters on a per-payment basis in the corresponding method calls. pub route_parameters: Option, + /// Whether to enable the static invoice service to support async payment reception for clients. + pub async_payment_services_enabled: bool, } impl Default for Config { @@ -193,6 +195,7 @@ impl Default for Config { anchor_channels_config: Some(AnchorChannelsConfig::default()), route_parameters: None, node_alias: None, + async_payment_services_enabled: false, } } } diff --git a/src/error.rs b/src/error.rs index 2cb71186d..eaa022e56 100644 --- a/src/error.rs +++ b/src/error.rs @@ -120,6 +120,10 @@ pub enum Error { LiquiditySourceUnavailable, /// The given operation failed due to the LSP's required opening fee being too high. LiquidityFeeTooHigh, + /// The given blinded paths are invalid. + InvalidBlindedPaths, + /// Asynchronous payment services are disabled. + AsyncPaymentServicesDisabled, } impl fmt::Display for Error { @@ -193,6 +197,10 @@ impl fmt::Display for Error { Self::LiquidityFeeTooHigh => { write!(f, "The given operation failed due to the LSP's required opening fee being too high.") }, + Self::InvalidBlindedPaths => write!(f, "The given blinded paths are invalid."), + Self::AsyncPaymentServicesDisabled => { + write!(f, "Asynchronous payment services are disabled.") + }, } } } diff --git a/src/event.rs b/src/event.rs index bad1b84ab..7a6dc4832 100644 --- a/src/event.rs +++ b/src/event.rs @@ -6,7 +6,6 @@ // accordance with one or both of these licenses. use crate::types::{CustomTlvRecord, DynStore, PaymentStore, Sweeper, Wallet}; - use crate::{ hex_utils, BumpTransactionEventHandler, ChannelManager, Error, Graph, PeerInfo, PeerStore, UserChannelId, @@ -19,6 +18,7 @@ use crate::fee_estimator::ConfirmationTarget; use crate::liquidity::LiquiditySource; use crate::logger::Logger; +use crate::payment::asynchronous::static_invoice_store::StaticInvoiceStore; use crate::payment::store::{ PaymentDetails, PaymentDetailsUpdate, PaymentDirection, PaymentKind, PaymentStatus, }; @@ -27,7 +27,7 @@ use crate::io::{ EVENT_QUEUE_PERSISTENCE_KEY, EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, }; -use crate::logger::{log_debug, log_error, log_info, LdkLogger}; +use crate::logger::{log_debug, log_error, log_info, log_trace, LdkLogger}; use crate::runtime::Runtime; @@ -458,6 +458,7 @@ where runtime: Arc, logger: L, config: Arc, + static_invoice_store: Option, } impl EventHandler @@ -470,8 +471,9 @@ where channel_manager: Arc, connection_manager: Arc>, output_sweeper: Arc, network_graph: Arc, liquidity_source: Option>>>, - payment_store: Arc, peer_store: Arc>, runtime: Arc, - logger: L, config: Arc, + payment_store: Arc, peer_store: Arc>, + static_invoice_store: Option, runtime: Arc, logger: L, + config: Arc, ) -> Self { Self { event_queue, @@ -487,6 +489,7 @@ where logger, runtime, config, + static_invoice_store, } } @@ -1494,11 +1497,55 @@ where LdkEvent::OnionMessagePeerConnected { .. } => { debug_assert!(false, "We currently don't support onion message interception, so this event should never be emitted."); }, - LdkEvent::PersistStaticInvoice { .. } => { - debug_assert!(false, "We currently don't support static invoice persistence, so this event should never be emitted."); + + LdkEvent::PersistStaticInvoice { + invoice, + invoice_slot, + recipient_id, + invoice_persisted_path, + } => { + if let Some(store) = self.static_invoice_store.as_ref() { + match store + .handle_persist_static_invoice(invoice, invoice_slot, recipient_id) + .await + { + Ok(_) => { + self.channel_manager.static_invoice_persisted(invoice_persisted_path); + }, + Err(e) => { + log_error!(self.logger, "Failed to persist static invoice: {}", e); + return Err(ReplayEvent()); + }, + }; + } }, - LdkEvent::StaticInvoiceRequested { .. } => { - debug_assert!(false, "We currently don't support static invoice persistence, so this event should never be emitted."); + LdkEvent::StaticInvoiceRequested { recipient_id, invoice_slot, reply_path } => { + if let Some(store) = self.static_invoice_store.as_ref() { + let invoice = + store.handle_static_invoice_requested(&recipient_id, invoice_slot).await; + + match invoice { + Ok(Some(invoice)) => { + if let Err(e) = + self.channel_manager.send_static_invoice(invoice, reply_path) + { + log_error!(self.logger, "Failed to send static invoice: {:?}", e); + } + }, + Ok(None) => { + log_trace!( + self.logger, + "No static invoice found for recipient {} and slot {}", + hex_utils::to_string(&recipient_id), + invoice_slot + ); + }, + Err(e) => { + log_error!(self.logger, "Failed to retrieve static invoice: {}", e); + return Err(ReplayEvent()); + }, + } + } }, LdkEvent::FundingTransactionReadyForSigning { .. } => { debug_assert!(false, "We currently don't support interactive-tx, so this event should never be emitted."); diff --git a/src/io/mod.rs b/src/io/mod.rs index 7a52a5c98..38fba5114 100644 --- a/src/io/mod.rs +++ b/src/io/mod.rs @@ -73,3 +73,8 @@ pub(crate) const BDK_WALLET_TX_GRAPH_KEY: &str = "tx_graph"; pub(crate) const BDK_WALLET_INDEXER_PRIMARY_NAMESPACE: &str = "bdk_wallet"; pub(crate) const BDK_WALLET_INDEXER_SECONDARY_NAMESPACE: &str = ""; pub(crate) const BDK_WALLET_INDEXER_KEY: &str = "indexer"; + +/// [`StaticInvoice`]s will be persisted under this key. +/// +/// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice +pub(crate) const STATIC_INVOICE_STORE_PRIMARY_NAMESPACE: &str = "static_invoices"; diff --git a/src/lib.rs b/src/lib.rs index 160762dd2..e7e27273b 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -136,6 +136,7 @@ use gossip::GossipSource; use graph::NetworkGraph; use io::utils::write_node_metrics; use liquidity::{LSPS1Liquidity, LiquiditySource}; +use payment::asynchronous::static_invoice_store::StaticInvoiceStore; use payment::{ Bolt11Payment, Bolt12Payment, OnchainPayment, PaymentDetails, SpontaneousPayment, UnifiedQrPayment, @@ -498,6 +499,12 @@ impl Node { Arc::clone(&self.logger), )); + let static_invoice_store = if self.config.async_payment_services_enabled { + Some(StaticInvoiceStore::new(Arc::clone(&self.kv_store))) + } else { + None + }; + let event_handler = Arc::new(EventHandler::new( Arc::clone(&self.event_queue), Arc::clone(&self.wallet), @@ -509,6 +516,7 @@ impl Node { self.liquidity_source.clone(), Arc::clone(&self.payment_store), Arc::clone(&self.peer_store), + static_invoice_store, Arc::clone(&self.runtime), Arc::clone(&self.logger), Arc::clone(&self.config), @@ -818,6 +826,7 @@ impl Node { Bolt12Payment::new( Arc::clone(&self.channel_manager), Arc::clone(&self.payment_store), + Arc::clone(&self.config), Arc::clone(&self.is_running), Arc::clone(&self.logger), ) @@ -831,6 +840,7 @@ impl Node { Arc::new(Bolt12Payment::new( Arc::clone(&self.channel_manager), Arc::clone(&self.payment_store), + Arc::clone(&self.config), Arc::clone(&self.is_running), Arc::clone(&self.logger), )) diff --git a/src/payment/asynchronous/mod.rs b/src/payment/asynchronous/mod.rs new file mode 100644 index 000000000..ebb7a4bd3 --- /dev/null +++ b/src/payment/asynchronous/mod.rs @@ -0,0 +1,9 @@ +// This file is Copyright its original authors, visible in version control history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in +// accordance with one or both of these licenses. + +mod rate_limiter; +pub(crate) mod static_invoice_store; diff --git a/src/payment/asynchronous/rate_limiter.rs b/src/payment/asynchronous/rate_limiter.rs new file mode 100644 index 000000000..153577b16 --- /dev/null +++ b/src/payment/asynchronous/rate_limiter.rs @@ -0,0 +1,96 @@ +// This file is Copyright its original authors, visible in version control history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in +// accordance with one or both of these licenses. + +//! [`RateLimiter`] to control the rate of requests from users. + +use std::collections::HashMap; +use std::time::{Duration, Instant}; + +/// Implements a leaky-bucket style rate limiter parameterized by the max capacity of the bucket, the refill interval, +/// and the max idle duration. +/// +/// For every passing of the refill interval, one token is added to the bucket, up to the maximum capacity. When the +/// bucket has remained at the maximum capacity for longer than the max idle duration, it is removed to prevent memory +/// leakage. +pub(crate) struct RateLimiter { + users: HashMap, Bucket>, + capacity: u32, + refill_interval: Duration, + max_idle: Duration, +} + +struct Bucket { + tokens: u32, + last_refill: Instant, +} + +impl RateLimiter { + pub(crate) fn new(capacity: u32, refill_interval: Duration, max_idle: Duration) -> Self { + Self { users: HashMap::new(), capacity, refill_interval, max_idle } + } + + pub(crate) fn allow(&mut self, user_id: &[u8]) -> bool { + let now = Instant::now(); + + let entry = self.users.entry(user_id.to_vec()); + let is_new_user = matches!(entry, std::collections::hash_map::Entry::Vacant(_)); + + let bucket = entry.or_insert(Bucket { tokens: self.capacity, last_refill: now }); + + let elapsed = now.duration_since(bucket.last_refill); + let tokens_to_add = (elapsed.as_secs_f64() / self.refill_interval.as_secs_f64()) as u32; + + if tokens_to_add > 0 { + bucket.tokens = (bucket.tokens + tokens_to_add).min(self.capacity); + bucket.last_refill = now; + } + + let allow = if bucket.tokens > 0 { + bucket.tokens -= 1; + true + } else { + false + }; + + // Each time a new user is added, we take the opportunity to clean up old rate limits. + if is_new_user { + self.garbage_collect(self.max_idle); + } + + allow + } + + fn garbage_collect(&mut self, max_idle: Duration) { + let now = Instant::now(); + self.users.retain(|_, bucket| now.duration_since(bucket.last_refill) < max_idle); + } +} + +#[cfg(test)] +mod tests { + use crate::payment::asynchronous::rate_limiter::RateLimiter; + + use std::time::Duration; + + #[test] + fn rate_limiter_test() { + // Test + let mut rate_limiter = + RateLimiter::new(3, Duration::from_millis(100), Duration::from_secs(1)); + + assert!(rate_limiter.allow(b"user1")); + assert!(rate_limiter.allow(b"user1")); + assert!(rate_limiter.allow(b"user1")); + assert!(!rate_limiter.allow(b"user1")); + assert!(rate_limiter.allow(b"user2")); + + std::thread::sleep(Duration::from_millis(150)); + + assert!(rate_limiter.allow(b"user1")); + assert!(rate_limiter.allow(b"user2")); + } +} diff --git a/src/payment/asynchronous/static_invoice_store.rs b/src/payment/asynchronous/static_invoice_store.rs new file mode 100644 index 000000000..eed6720e5 --- /dev/null +++ b/src/payment/asynchronous/static_invoice_store.rs @@ -0,0 +1,277 @@ +// This file is Copyright its original authors, visible in version control history. +// +// This file is licensed under the Apache License, Version 2.0 or the MIT license , at your option. You may not use this file except in +// accordance with one or both of these licenses. + +//! Store implementation for [`StaticInvoice`]s. + +use crate::hex_utils; +use crate::io::STATIC_INVOICE_STORE_PRIMARY_NAMESPACE; +use crate::payment::asynchronous::rate_limiter::RateLimiter; +use crate::types::DynStore; + +use bitcoin::hashes::sha256::Hash as Sha256; +use bitcoin::hashes::Hash; + +use lightning::{offers::static_invoice::StaticInvoice, util::ser::Writeable}; + +use std::sync::{Arc, Mutex}; +use std::time::Duration; + +pub(crate) struct StaticInvoiceStore { + kv_store: Arc, + request_rate_limiter: Mutex, + persist_rate_limiter: Mutex, +} + +impl StaticInvoiceStore { + const RATE_LIMITER_BUCKET_CAPACITY: u32 = 5; + const RATE_LIMITER_REFILL_INTERVAL: Duration = Duration::from_millis(100); + const RATE_LIMITER_MAX_IDLE: Duration = Duration::from_secs(600); + + pub(crate) fn new(kv_store: Arc) -> Self { + Self { + kv_store, + request_rate_limiter: Mutex::new(RateLimiter::new( + Self::RATE_LIMITER_BUCKET_CAPACITY, + Self::RATE_LIMITER_REFILL_INTERVAL, + Self::RATE_LIMITER_MAX_IDLE, + )), + persist_rate_limiter: Mutex::new(RateLimiter::new( + Self::RATE_LIMITER_BUCKET_CAPACITY, + Self::RATE_LIMITER_REFILL_INTERVAL, + Self::RATE_LIMITER_MAX_IDLE, + )), + } + } + + fn check_rate_limit( + limiter: &Mutex, recipient_id: &[u8], + ) -> Result<(), lightning::io::Error> { + let mut limiter = limiter.lock().unwrap(); + if !limiter.allow(recipient_id) { + Err(lightning::io::Error::new(lightning::io::ErrorKind::Other, "Rate limit exceeded")) + } else { + Ok(()) + } + } + + pub(crate) async fn handle_static_invoice_requested( + &self, recipient_id: &[u8], invoice_slot: u16, + ) -> Result, lightning::io::Error> { + Self::check_rate_limit(&self.request_rate_limiter, &recipient_id)?; + + let (secondary_namespace, key) = Self::get_storage_location(invoice_slot, recipient_id); + + self.kv_store + .read(STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, &secondary_namespace, &key) + .and_then(|data| { + data.try_into().map(Some).map_err(|e| { + lightning::io::Error::new( + lightning::io::ErrorKind::InvalidData, + format!("Failed to parse static invoice: {:?}", e), + ) + }) + }) + .or_else( + |e| { + if e.kind() == lightning::io::ErrorKind::NotFound { + Ok(None) + } else { + Err(e) + } + }, + ) + } + + pub(crate) async fn handle_persist_static_invoice( + &self, invoice: StaticInvoice, invoice_slot: u16, recipient_id: Vec, + ) -> Result<(), lightning::io::Error> { + Self::check_rate_limit(&self.persist_rate_limiter, &recipient_id)?; + + let (secondary_namespace, key) = Self::get_storage_location(invoice_slot, &recipient_id); + + let mut buf = Vec::new(); + invoice.write(&mut buf)?; + + // Static invoices will be persisted at "static_invoices//". + // + // Example: static_invoices/039058c6f2c0cb492c533b0a4d14ef77cc0f78abccced5287d84a1a2011cfb81/00001 + self.kv_store.write(STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, &secondary_namespace, &key, buf) + } + + fn get_storage_location(invoice_slot: u16, recipient_id: &[u8]) -> (String, String) { + let hash = Sha256::hash(recipient_id).to_byte_array(); + let secondary_namespace = hex_utils::to_string(&hash); + + let key = format!("{:05}", invoice_slot); + (secondary_namespace, key) + } +} + +#[cfg(test)] +mod tests { + use std::{sync::Arc, time::Duration}; + + use bitcoin::{ + key::{Keypair, Secp256k1}, + secp256k1::{PublicKey, SecretKey}, + }; + use lightning::blinded_path::{ + message::BlindedMessagePath, + payment::{BlindedPayInfo, BlindedPaymentPath}, + BlindedHop, + }; + use lightning::ln::inbound_payment::ExpandedKey; + use lightning::offers::{ + nonce::Nonce, + offer::OfferBuilder, + static_invoice::{StaticInvoice, StaticInvoiceBuilder}, + }; + use lightning::sign::EntropySource; + use lightning::util::test_utils::TestStore; + use lightning_types::features::BlindedHopFeatures; + + use crate::payment::asynchronous::static_invoice_store::StaticInvoiceStore; + use crate::types::DynStore; + + #[tokio::test] + async fn static_invoice_store_test() { + let store: Arc = Arc::new(TestStore::new(false)); + let static_invoice_store = StaticInvoiceStore::new(Arc::clone(&store)); + + let static_invoice = invoice(); + let recipient_id = vec![1, 1, 1]; + assert!(static_invoice_store + .handle_persist_static_invoice(static_invoice.clone(), 0, recipient_id.clone()) + .await + .is_ok()); + + let requested_invoice = + static_invoice_store.handle_static_invoice_requested(&recipient_id, 0).await.unwrap(); + + assert_eq!(requested_invoice.unwrap(), static_invoice); + + assert!(static_invoice_store + .handle_static_invoice_requested(&recipient_id, 1) + .await + .unwrap() + .is_none()); + + assert!(static_invoice_store + .handle_static_invoice_requested(&[2, 2, 2], 0) + .await + .unwrap() + .is_none()); + } + + fn invoice() -> StaticInvoice { + let node_id = recipient_pubkey(); + let payment_paths = payment_paths(); + let now = now(); + let expanded_key = ExpandedKey::new([42; 32]); + let entropy = FixedEntropy {}; + let nonce = Nonce::from_entropy_source(&entropy); + let secp_ctx = Secp256k1::new(); + + let offer = OfferBuilder::deriving_signing_pubkey(node_id, &expanded_key, nonce, &secp_ctx) + .path(blinded_path()) + .build() + .unwrap(); + + StaticInvoiceBuilder::for_offer_using_derived_keys( + &offer, + payment_paths.clone(), + vec![blinded_path()], + now, + &expanded_key, + nonce, + &secp_ctx, + ) + .unwrap() + .build_and_sign(&secp_ctx) + .unwrap() + } + + fn now() -> Duration { + std::time::SystemTime::now() + .duration_since(std::time::SystemTime::UNIX_EPOCH) + .expect("SystemTime::now() should come after SystemTime::UNIX_EPOCH") + } + + fn payment_paths() -> Vec { + vec![ + BlindedPaymentPath::from_blinded_path_and_payinfo( + pubkey(40), + pubkey(41), + vec![ + BlindedHop { blinded_node_id: pubkey(43), encrypted_payload: vec![0; 43] }, + BlindedHop { blinded_node_id: pubkey(44), encrypted_payload: vec![0; 44] }, + ], + BlindedPayInfo { + fee_base_msat: 1, + fee_proportional_millionths: 1_000, + cltv_expiry_delta: 42, + htlc_minimum_msat: 100, + htlc_maximum_msat: 1_000_000_000_000, + features: BlindedHopFeatures::empty(), + }, + ), + BlindedPaymentPath::from_blinded_path_and_payinfo( + pubkey(40), + pubkey(41), + vec![ + BlindedHop { blinded_node_id: pubkey(45), encrypted_payload: vec![0; 45] }, + BlindedHop { blinded_node_id: pubkey(46), encrypted_payload: vec![0; 46] }, + ], + BlindedPayInfo { + fee_base_msat: 1, + fee_proportional_millionths: 1_000, + cltv_expiry_delta: 42, + htlc_minimum_msat: 100, + htlc_maximum_msat: 1_000_000_000_000, + features: BlindedHopFeatures::empty(), + }, + ), + ] + } + + fn blinded_path() -> BlindedMessagePath { + BlindedMessagePath::from_blinded_path( + pubkey(40), + pubkey(41), + vec![ + BlindedHop { blinded_node_id: pubkey(42), encrypted_payload: vec![0; 43] }, + BlindedHop { blinded_node_id: pubkey(43), encrypted_payload: vec![0; 44] }, + ], + ) + } + + fn pubkey(byte: u8) -> PublicKey { + let secp_ctx = Secp256k1::new(); + PublicKey::from_secret_key(&secp_ctx, &privkey(byte)) + } + + fn privkey(byte: u8) -> SecretKey { + SecretKey::from_slice(&[byte; 32]).unwrap() + } + + fn recipient_keys() -> Keypair { + let secp_ctx = Secp256k1::new(); + Keypair::from_secret_key(&secp_ctx, &SecretKey::from_slice(&[43; 32]).unwrap()) + } + + fn recipient_pubkey() -> PublicKey { + recipient_keys().public_key() + } + + struct FixedEntropy; + + impl EntropySource for FixedEntropy { + fn get_secure_random_bytes(&self) -> [u8; 32] { + [42; 32] + } + } +} diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index 4e968deb7..81349e2bd 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -9,18 +9,21 @@ //! //! [BOLT 12]: https://github.com/lightning/bolts/blob/master/12-offer-encoding.md -use crate::config::LDK_PAYMENT_RETRY_TIMEOUT; +use crate::config::{Config, LDK_PAYMENT_RETRY_TIMEOUT}; use crate::error::Error; use crate::ffi::{maybe_deref, maybe_wrap}; use crate::logger::{log_error, log_info, LdkLogger, Logger}; use crate::payment::store::{PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus}; use crate::types::{ChannelManager, PaymentStore}; +use lightning::blinded_path::message::BlindedMessagePath; use lightning::ln::channelmanager::{PaymentId, Retry}; use lightning::offers::offer::{Amount, Offer as LdkOffer, Quantity}; use lightning::offers::parse::Bolt12SemanticError; use lightning::routing::router::RouteParametersConfig; +#[cfg(feature = "uniffi")] +use lightning::util::ser::{Readable, Writeable}; use lightning_types::string::UntrustedString; use rand::RngCore; @@ -54,15 +57,16 @@ pub struct Bolt12Payment { channel_manager: Arc, payment_store: Arc, is_running: Arc>, + config: Arc, logger: Arc, } impl Bolt12Payment { pub(crate) fn new( channel_manager: Arc, payment_store: Arc, - is_running: Arc>, logger: Arc, + config: Arc, is_running: Arc>, logger: Arc, ) -> Self { - Self { channel_manager, payment_store, is_running, logger } + Self { channel_manager, payment_store, config, is_running, logger } } /// Send a payment given an offer. @@ -450,4 +454,99 @@ impl Bolt12Payment { Ok(maybe_wrap(refund)) } + + /// Retrieve an [`Offer`] for receiving async payments as an often-offline recipient. + /// + /// Will only return an offer if [`Bolt12Payment::set_paths_to_static_invoice_server`] was called and we succeeded + /// in interactively building a [`StaticInvoice`] with the static invoice server. + /// + /// Useful for posting offers to receive payments later, such as posting an offer on a website. + /// + /// **Caution**: Async payments support is considered experimental. + /// + /// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice + /// [`Offer`]: lightning::offers::offer::Offer + pub fn receive_async(&self) -> Result { + self.channel_manager + .get_async_receive_offer() + .map(maybe_wrap) + .or(Err(Error::OfferCreationFailed)) + } + + /// Sets the [`BlindedMessagePath`]s that we will use as an async recipient to interactively build [`Offer`]s with a + /// static invoice server, so the server can serve [`StaticInvoice`]s to payers on our behalf when we're offline. + /// + /// **Caution**: Async payments support is considered experimental. + /// + /// [`Offer`]: lightning::offers::offer::Offer + /// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice + #[cfg(not(feature = "uniffi"))] + pub fn set_paths_to_static_invoice_server( + &self, paths: Vec, + ) -> Result<(), Error> { + self.channel_manager + .set_paths_to_static_invoice_server(paths) + .or(Err(Error::InvalidBlindedPaths)) + } + + /// Sets the [`BlindedMessagePath`]s that we will use as an async recipient to interactively build [`Offer`]s with a + /// static invoice server, so the server can serve [`StaticInvoice`]s to payers on our behalf when we're offline. + /// + /// **Caution**: Async payments support is considered experimental. + /// + /// [`Offer`]: lightning::offers::offer::Offer + /// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice + #[cfg(feature = "uniffi")] + pub fn set_paths_to_static_invoice_server(&self, paths: Vec) -> Result<(), Error> { + let decoded_paths = as Readable>::read(&mut &paths[..]) + .or(Err(Error::InvalidBlindedPaths))?; + + self.channel_manager + .set_paths_to_static_invoice_server(decoded_paths) + .or(Err(Error::InvalidBlindedPaths)) + } + + /// [`BlindedMessagePath`]s for an async recipient to communicate with this node and interactively + /// build [`Offer`]s and [`StaticInvoice`]s for receiving async payments. + /// + /// **Caution**: Async payments support is considered experimental. + /// + /// [`Offer`]: lightning::offers::offer::Offer + /// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice + #[cfg(not(feature = "uniffi"))] + pub fn blinded_paths_for_async_recipient( + &self, recipient_id: Vec, + ) -> Result, Error> { + self.blinded_paths_for_async_recipient_internal(recipient_id) + } + + /// [`BlindedMessagePath`]s for an async recipient to communicate with this node and interactively + /// build [`Offer`]s and [`StaticInvoice`]s for receiving async payments. + /// + /// **Caution**: Async payments support is considered experimental. + /// + /// [`Offer`]: lightning::offers::offer::Offer + /// [`StaticInvoice`]: lightning::offers::static_invoice::StaticInvoice + #[cfg(feature = "uniffi")] + pub fn blinded_paths_for_async_recipient( + &self, recipient_id: Vec, + ) -> Result, Error> { + let paths = self.blinded_paths_for_async_recipient_internal(recipient_id)?; + + let mut bytes = Vec::new(); + paths.write(&mut bytes).or(Err(Error::InvalidBlindedPaths))?; + Ok(bytes) + } + + fn blinded_paths_for_async_recipient_internal( + &self, recipient_id: Vec, + ) -> Result, Error> { + if !self.config.async_payment_services_enabled { + return Err(Error::AsyncPaymentServicesDisabled); + } + + self.channel_manager + .blinded_paths_for_async_recipient(recipient_id, None) + .or(Err(Error::InvalidBlindedPaths)) + } } diff --git a/src/payment/mod.rs b/src/payment/mod.rs index 54f7894dc..f629960e1 100644 --- a/src/payment/mod.rs +++ b/src/payment/mod.rs @@ -7,6 +7,7 @@ //! Objects for different types of payments. +pub(crate) mod asynchronous; mod bolt11; mod bolt12; mod onchain; diff --git a/src/types.rs b/src/types.rs index b9bc1c317..3635badff 100644 --- a/src/types.rs +++ b/src/types.rs @@ -123,7 +123,7 @@ pub(crate) type OnionMessenger = lightning::onion_message::messenger::OnionMesse Arc, Arc, Arc, - IgnoringMessageHandler, + Arc, IgnoringMessageHandler, IgnoringMessageHandler, >; diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index fa88fe0cc..77f46091d 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -1130,6 +1130,101 @@ fn simple_bolt12_send_receive() { assert_eq!(node_a_payments.first().unwrap().amount_msat, Some(overpaid_amount)); } +#[test] +fn static_invoice_server() { + let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let chain_source = TestChainSource::Esplora(&electrsd); + + let config_sender = random_config(true); + let node_sender = setup_node(&chain_source, config_sender, None); + + let config_sender_lsp = random_config(true); + let node_sender_lsp = setup_node(&chain_source, config_sender_lsp, None); + + let mut config_receiver_lsp = random_config(true); + config_receiver_lsp.node_config.async_payment_services_enabled = true; + let node_receiver_lsp = setup_node(&chain_source, config_receiver_lsp, None); + + let config_receiver = random_config(true); + let node_receiver = setup_node(&chain_source, config_receiver, None); + + let address_sender = node_sender.onchain_payment().new_address().unwrap(); + let address_sender_lsp = node_sender_lsp.onchain_payment().new_address().unwrap(); + let address_receiver_lsp = node_receiver_lsp.onchain_payment().new_address().unwrap(); + let address_receiver = node_receiver.onchain_payment().new_address().unwrap(); + let premine_amount_sat = 4_000_000; + premine_and_distribute_funds( + &bitcoind.client, + &electrsd.client, + vec![address_sender, address_sender_lsp, address_receiver_lsp, address_receiver], + Amount::from_sat(premine_amount_sat), + ); + + node_sender.sync_wallets().unwrap(); + node_sender_lsp.sync_wallets().unwrap(); + node_receiver_lsp.sync_wallets().unwrap(); + node_receiver.sync_wallets().unwrap(); + + open_channel(&node_sender, &node_sender_lsp, 400_000, true, &electrsd); + open_channel(&node_sender_lsp, &node_receiver_lsp, 400_000, true, &electrsd); + open_channel(&node_receiver_lsp, &node_receiver, 400_000, true, &electrsd); + + generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); + + node_sender.sync_wallets().unwrap(); + node_sender_lsp.sync_wallets().unwrap(); + node_receiver_lsp.sync_wallets().unwrap(); + node_receiver.sync_wallets().unwrap(); + + expect_channel_ready_event!(node_sender, node_sender_lsp.node_id()); + expect_channel_ready_event!(node_sender_lsp, node_sender.node_id()); + expect_channel_ready_event!(node_sender_lsp, node_receiver_lsp.node_id()); + expect_channel_ready_event!(node_receiver_lsp, node_sender_lsp.node_id()); + expect_channel_ready_event!(node_receiver_lsp, node_receiver.node_id()); + expect_channel_ready_event!(node_receiver, node_receiver_lsp.node_id()); + + let has_node_announcements = |node: &ldk_node::Node| { + node.network_graph() + .list_nodes() + .iter() + .filter(|n| { + node.network_graph().node(n).map_or(false, |info| info.announcement_info.is_some()) + }) + .count() >= 4 + }; + + // Wait for everyone to see all channels and node announcements. + while node_sender.network_graph().list_channels().len() < 3 + || node_sender_lsp.network_graph().list_channels().len() < 3 + || node_receiver_lsp.network_graph().list_channels().len() < 3 + || node_receiver.network_graph().list_channels().len() < 3 + || !has_node_announcements(&node_sender) + || !has_node_announcements(&node_sender_lsp) + || !has_node_announcements(&node_receiver_lsp) + || !has_node_announcements(&node_receiver) + { + std::thread::sleep(std::time::Duration::from_millis(100)); + } + + let recipient_id = vec![1, 2, 3]; + let blinded_paths = + node_receiver_lsp.bolt12_payment().blinded_paths_for_async_recipient(recipient_id).unwrap(); + node_receiver.bolt12_payment().set_paths_to_static_invoice_server(blinded_paths).unwrap(); + + let offer = loop { + if let Ok(offer) = node_receiver.bolt12_payment().receive_async() { + break offer; + } + + std::thread::sleep(std::time::Duration::from_millis(100)); + }; + + let payment_id = + node_sender.bolt12_payment().send_using_amount(&offer, 5_000, None, None).unwrap(); + + expect_payment_successful_event!(node_sender, Some(payment_id), None); +} + #[test] fn test_node_announcement_propagation() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); From 006a06e157ef6f9d80584b9be33bfd5b8a47cf02 Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Tue, 16 Sep 2025 10:19:33 +0200 Subject: [PATCH 06/27] Adapt channel balance reporting to use confirmed candidate With splicing now implemented, a channel may have multiple holder commitment transactions and corresponding balance candidates. ldk-node now reports the confirmed balance candidate rather than a single static balance, ensuring the exposed value matches the channel's onchain state. Other candidate balances remain internal for now. --- Cargo.toml | 24 ++++++++++++------------ src/balance.rs | 30 ++++++++++++++++++------------ 2 files changed, 30 insertions(+), 24 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 9010ad6d5..c2b7775ac 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,17 +52,17 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab" } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } @@ -109,7 +109,7 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "4e32d85249359d8ef8ece97d89848e40154363ab", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/balance.rs b/src/balance.rs index d0ebc310b..7ba4826a9 100644 --- a/src/balance.rs +++ b/src/balance.rs @@ -73,7 +73,8 @@ pub struct BalanceDetails { pub enum LightningBalance { /// The channel is not yet closed (or the commitment or closing transaction has not yet /// appeared in a block). The given balance is claimable (less on-chain fees) if the channel is - /// force-closed now. + /// force-closed now. Values do not take into account any pending splices and are only based + /// on the confirmed state of the channel. ClaimableOnChannelClose { /// The identifier of the channel this balance belongs to. channel_id: ChannelId, @@ -224,21 +225,26 @@ impl LightningBalance { ) -> Self { match balance { LdkBalance::ClaimableOnChannelClose { - amount_satoshis, - transaction_fee_satoshis, - outbound_payment_htlc_rounded_msat, - outbound_forwarded_htlc_rounded_msat, - inbound_claiming_htlc_rounded_msat, - inbound_htlc_rounded_msat, - } => Self::ClaimableOnChannelClose { - channel_id, - counterparty_node_id, - amount_satoshis, - transaction_fee_satoshis, + balance_candidates, + confirmed_balance_candidate_index, outbound_payment_htlc_rounded_msat, outbound_forwarded_htlc_rounded_msat, inbound_claiming_htlc_rounded_msat, inbound_htlc_rounded_msat, + } => { + // unwrap safety: confirmed_balance_candidate_index is guaranteed to index into balance_candidates + let balance = balance_candidates.get(confirmed_balance_candidate_index).unwrap(); + + Self::ClaimableOnChannelClose { + channel_id, + counterparty_node_id, + amount_satoshis: balance.amount_satoshis, + transaction_fee_satoshis: balance.transaction_fee_satoshis, + outbound_payment_htlc_rounded_msat, + outbound_forwarded_htlc_rounded_msat, + inbound_claiming_htlc_rounded_msat, + inbound_htlc_rounded_msat, + } }, LdkBalance::ClaimableAwaitingConfirmations { amount_satoshis, From c99ff305edc59c256d3315bcc0327d048b68d000 Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Tue, 16 Sep 2025 13:34:46 +0200 Subject: [PATCH 07/27] Log to console with node prefix --- tests/common/logging.rs | 27 ++++++++++++++++++++++++++- tests/integration_tests_rust.rs | 17 ++++++++++++++--- 2 files changed, 40 insertions(+), 4 deletions(-) diff --git a/tests/common/logging.rs b/tests/common/logging.rs index 6bceac29a..d7d59ba32 100644 --- a/tests/common/logging.rs +++ b/tests/common/logging.rs @@ -1,5 +1,4 @@ use chrono::Utc; -#[cfg(not(feature = "uniffi"))] use ldk_node::logger::LogRecord; use ldk_node::logger::{LogLevel, LogWriter}; #[cfg(not(feature = "uniffi"))] @@ -143,3 +142,29 @@ pub(crate) fn validate_log_entry(entry: &String) { let msg = &path_and_msg[msg_start_index..]; assert!(!msg.is_empty()); } + +pub(crate) struct MultiNodeLogger { + node_id: String, +} + +impl MultiNodeLogger { + pub(crate) fn new(node_id: String) -> Self { + Self { node_id } + } +} + +impl LogWriter for MultiNodeLogger { + fn log(&self, record: LogRecord) { + let log = format!( + "[{}] {} {:<5} [{}:{}] {}\n", + self.node_id, + Utc::now().format("%Y-%m-%d %H:%M:%S%.3f"), + record.level.to_string(), + record.module_path, + record.line, + record.args + ); + + print!("{}", log); + } +} diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 77f46091d..c9f2f95fc 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -12,6 +12,7 @@ use common::{ expect_channel_pending_event, expect_channel_ready_event, expect_event, expect_payment_claimable_event, expect_payment_received_event, expect_payment_successful_event, generate_blocks_and_wait, + logging::MultiNodeLogger, logging::{init_log_logger, validate_log_entry, TestLogWriter}, open_channel, premine_and_distribute_funds, premine_blocks, prepare_rbf, random_config, random_listening_addresses, setup_bitcoind_and_electrsd, setup_builder, setup_node, @@ -1135,17 +1136,27 @@ fn static_invoice_server() { let (bitcoind, electrsd) = setup_bitcoind_and_electrsd(); let chain_source = TestChainSource::Esplora(&electrsd); - let config_sender = random_config(true); + let mut config_sender = random_config(true); + config_sender.log_writer = + TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("sender ".to_string()))); let node_sender = setup_node(&chain_source, config_sender, None); - let config_sender_lsp = random_config(true); + let mut config_sender_lsp = random_config(true); + config_sender_lsp.log_writer = + TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("sender_lsp ".to_string()))); let node_sender_lsp = setup_node(&chain_source, config_sender_lsp, None); let mut config_receiver_lsp = random_config(true); config_receiver_lsp.node_config.async_payment_services_enabled = true; + config_receiver_lsp.log_writer = + TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("receiver_lsp".to_string()))); + let node_receiver_lsp = setup_node(&chain_source, config_receiver_lsp, None); - let config_receiver = random_config(true); + let mut config_receiver = random_config(true); + config_receiver.log_writer = + TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("receiver ".to_string()))); + let node_receiver = setup_node(&chain_source, config_receiver, None); let address_sender = node_sender.onchain_payment().new_address().unwrap(); From 8d18d1655e92c25e1b0e772a93cf63bf1c0575fa Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Tue, 16 Sep 2025 11:28:41 +0200 Subject: [PATCH 08/27] Update static invoice store for invoice requests With the merge of https://github.com/lightningdevkit/rust-lightning/pull/4049, it is now possible for a static invoice server to forward the invoice request to the recipient if they are online. --- Cargo.toml | 24 ++++----- src/event.rs | 26 +++++++--- .../asynchronous/static_invoice_store.rs | 50 ++++++++++++++----- 3 files changed, 70 insertions(+), 30 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c2b7775ac..f3038ee96 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,17 +52,17 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0" } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } @@ -109,7 +109,7 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "84398d9e5b3dc61c0a5c71972aa944f19948aef0", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/event.rs b/src/event.rs index 7a6dc4832..cd9146379 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1500,13 +1500,19 @@ where LdkEvent::PersistStaticInvoice { invoice, + invoice_request_path, invoice_slot, recipient_id, invoice_persisted_path, } => { if let Some(store) = self.static_invoice_store.as_ref() { match store - .handle_persist_static_invoice(invoice, invoice_slot, recipient_id) + .handle_persist_static_invoice( + invoice, + invoice_request_path, + invoice_slot, + recipient_id, + ) .await { Ok(_) => { @@ -1519,16 +1525,24 @@ where }; } }, - LdkEvent::StaticInvoiceRequested { recipient_id, invoice_slot, reply_path } => { + LdkEvent::StaticInvoiceRequested { + recipient_id, + invoice_slot, + reply_path, + invoice_request, + } => { if let Some(store) = self.static_invoice_store.as_ref() { let invoice = store.handle_static_invoice_requested(&recipient_id, invoice_slot).await; match invoice { - Ok(Some(invoice)) => { - if let Err(e) = - self.channel_manager.send_static_invoice(invoice, reply_path) - { + Ok(Some((invoice, invoice_request_path))) => { + if let Err(e) = self.channel_manager.respond_to_static_invoice_request( + invoice, + reply_path, + invoice_request, + invoice_request_path, + ) { log_error!(self.logger, "Failed to send static invoice: {:?}", e); } }, diff --git a/src/payment/asynchronous/static_invoice_store.rs b/src/payment/asynchronous/static_invoice_store.rs index eed6720e5..f1aa702a4 100644 --- a/src/payment/asynchronous/static_invoice_store.rs +++ b/src/payment/asynchronous/static_invoice_store.rs @@ -15,11 +15,23 @@ use crate::types::DynStore; use bitcoin::hashes::sha256::Hash as Sha256; use bitcoin::hashes::Hash; -use lightning::{offers::static_invoice::StaticInvoice, util::ser::Writeable}; +use lightning::blinded_path::message::BlindedMessagePath; +use lightning::impl_writeable_tlv_based; +use lightning::{offers::static_invoice::StaticInvoice, util::ser::Readable, util::ser::Writeable}; use std::sync::{Arc, Mutex}; use std::time::Duration; +struct PersistedStaticInvoice { + invoice: StaticInvoice, + request_path: BlindedMessagePath, +} + +impl_writeable_tlv_based!(PersistedStaticInvoice, { + (0, invoice, required), + (2, request_path, required) +}); + pub(crate) struct StaticInvoiceStore { kv_store: Arc, request_rate_limiter: Mutex, @@ -60,7 +72,7 @@ impl StaticInvoiceStore { pub(crate) async fn handle_static_invoice_requested( &self, recipient_id: &[u8], invoice_slot: u16, - ) -> Result, lightning::io::Error> { + ) -> Result, lightning::io::Error> { Self::check_rate_limit(&self.request_rate_limiter, &recipient_id)?; let (secondary_namespace, key) = Self::get_storage_location(invoice_slot, recipient_id); @@ -68,12 +80,16 @@ impl StaticInvoiceStore { self.kv_store .read(STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, &secondary_namespace, &key) .and_then(|data| { - data.try_into().map(Some).map_err(|e| { - lightning::io::Error::new( - lightning::io::ErrorKind::InvalidData, - format!("Failed to parse static invoice: {:?}", e), - ) - }) + PersistedStaticInvoice::read(&mut &*data) + .map(|persisted_invoice| { + Some((persisted_invoice.invoice, persisted_invoice.request_path)) + }) + .map_err(|e| { + lightning::io::Error::new( + lightning::io::ErrorKind::InvalidData, + format!("Failed to parse static invoice: {:?}", e), + ) + }) }) .or_else( |e| { @@ -87,14 +103,18 @@ impl StaticInvoiceStore { } pub(crate) async fn handle_persist_static_invoice( - &self, invoice: StaticInvoice, invoice_slot: u16, recipient_id: Vec, + &self, invoice: StaticInvoice, invoice_request_path: BlindedMessagePath, invoice_slot: u16, + recipient_id: Vec, ) -> Result<(), lightning::io::Error> { Self::check_rate_limit(&self.persist_rate_limiter, &recipient_id)?; let (secondary_namespace, key) = Self::get_storage_location(invoice_slot, &recipient_id); + let persisted_invoice = + PersistedStaticInvoice { invoice, request_path: invoice_request_path }; + let mut buf = Vec::new(); - invoice.write(&mut buf)?; + persisted_invoice.write(&mut buf)?; // Static invoices will be persisted at "static_invoices//". // @@ -144,15 +164,21 @@ mod tests { let static_invoice = invoice(); let recipient_id = vec![1, 1, 1]; + let invoice_request_path = blinded_path(); assert!(static_invoice_store - .handle_persist_static_invoice(static_invoice.clone(), 0, recipient_id.clone()) + .handle_persist_static_invoice( + static_invoice.clone(), + invoice_request_path.clone(), + 0, + recipient_id.clone() + ) .await .is_ok()); let requested_invoice = static_invoice_store.handle_static_invoice_requested(&recipient_id, 0).await.unwrap(); - assert_eq!(requested_invoice.unwrap(), static_invoice); + assert_eq!(requested_invoice.unwrap(), (static_invoice, invoice_request_path)); assert!(static_invoice_store .handle_static_invoice_requested(&recipient_id, 1) From efbef4c4d57a4059eab7558e8127698c0bc4299c Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Wed, 17 Sep 2025 14:14:48 +0200 Subject: [PATCH 09/27] Update static invoice test to use unannounced channels To better align with the expected real life setup. --- src/builder.rs | 4 ++++ tests/common/mod.rs | 11 +++++++++-- tests/integration_tests_rust.rs | 33 ++++++++++++++++++++++----------- 3 files changed, 35 insertions(+), 13 deletions(-) diff --git a/src/builder.rs b/src/builder.rs index d330597ee..b99c44cec 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1378,6 +1378,10 @@ fn build_with_store_internal( 100; } + if config.async_payment_services_enabled { + user_config.accept_forwards_to_priv_channels = true; + } + let message_router = Arc::new(MessageRouter::new(Arc::clone(&network_graph), Arc::clone(&keys_manager))); diff --git a/tests/common/mod.rs b/tests/common/mod.rs index f5bfe76fc..70c9a43a8 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -589,6 +589,13 @@ pub(crate) fn bump_fee_and_broadcast( pub fn open_channel( node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, should_announce: bool, electrsd: &ElectrsD, +) -> OutPoint { + open_channel_push_amt(node_a, node_b, funding_amount_sat, None, should_announce, electrsd) +} + +pub fn open_channel_push_amt( + node_a: &TestNode, node_b: &TestNode, funding_amount_sat: u64, push_amount_msat: Option, + should_announce: bool, electrsd: &ElectrsD, ) -> OutPoint { if should_announce { node_a @@ -596,7 +603,7 @@ pub fn open_channel( node_b.node_id(), node_b.listening_addresses().unwrap().first().unwrap().clone(), funding_amount_sat, - None, + push_amount_msat, None, ) .unwrap(); @@ -606,7 +613,7 @@ pub fn open_channel( node_b.node_id(), node_b.listening_addresses().unwrap().first().unwrap().clone(), funding_amount_sat, - None, + push_amount_msat, None, ) .unwrap(); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index c9f2f95fc..f2e8407cd 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -14,9 +14,9 @@ use common::{ generate_blocks_and_wait, logging::MultiNodeLogger, logging::{init_log_logger, validate_log_entry, TestLogWriter}, - open_channel, premine_and_distribute_funds, premine_blocks, prepare_rbf, random_config, - random_listening_addresses, setup_bitcoind_and_electrsd, setup_builder, setup_node, - setup_two_nodes, wait_for_tx, TestChainSource, TestSyncStore, + open_channel, open_channel_push_amt, premine_and_distribute_funds, premine_blocks, prepare_rbf, + random_config, random_listening_addresses, setup_bitcoind_and_electrsd, setup_builder, + setup_node, setup_two_nodes, wait_for_tx, TestChainSource, TestSyncStore, }; use ldk_node::config::EsploraSyncConfig; @@ -1137,11 +1137,14 @@ fn static_invoice_server() { let chain_source = TestChainSource::Esplora(&electrsd); let mut config_sender = random_config(true); + config_sender.node_config.listening_addresses = None; + config_sender.node_config.node_alias = None; config_sender.log_writer = TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("sender ".to_string()))); let node_sender = setup_node(&chain_source, config_sender, None); let mut config_sender_lsp = random_config(true); + config_sender_lsp.node_config.async_payment_services_enabled = true; config_sender_lsp.log_writer = TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("sender_lsp ".to_string()))); let node_sender_lsp = setup_node(&chain_source, config_sender_lsp, None); @@ -1154,9 +1157,10 @@ fn static_invoice_server() { let node_receiver_lsp = setup_node(&chain_source, config_receiver_lsp, None); let mut config_receiver = random_config(true); + config_receiver.node_config.listening_addresses = None; + config_receiver.node_config.node_alias = None; config_receiver.log_writer = TestLogWriter::Custom(Arc::new(MultiNodeLogger::new("receiver ".to_string()))); - let node_receiver = setup_node(&chain_source, config_receiver, None); let address_sender = node_sender.onchain_payment().new_address().unwrap(); @@ -1176,9 +1180,16 @@ fn static_invoice_server() { node_receiver_lsp.sync_wallets().unwrap(); node_receiver.sync_wallets().unwrap(); - open_channel(&node_sender, &node_sender_lsp, 400_000, true, &electrsd); + open_channel(&node_sender, &node_sender_lsp, 400_000, false, &electrsd); open_channel(&node_sender_lsp, &node_receiver_lsp, 400_000, true, &electrsd); - open_channel(&node_receiver_lsp, &node_receiver, 400_000, true, &electrsd); + open_channel_push_amt( + &node_receiver, + &node_receiver_lsp, + 400_000, + Some(200_000_000), + false, + &electrsd, + ); generate_blocks_and_wait(&bitcoind.client, &electrsd.client, 6); @@ -1201,14 +1212,14 @@ fn static_invoice_server() { .filter(|n| { node.network_graph().node(n).map_or(false, |info| info.announcement_info.is_some()) }) - .count() >= 4 + .count() >= 2 }; // Wait for everyone to see all channels and node announcements. - while node_sender.network_graph().list_channels().len() < 3 - || node_sender_lsp.network_graph().list_channels().len() < 3 - || node_receiver_lsp.network_graph().list_channels().len() < 3 - || node_receiver.network_graph().list_channels().len() < 3 + while node_sender.network_graph().list_channels().len() < 1 + || node_sender_lsp.network_graph().list_channels().len() < 1 + || node_receiver_lsp.network_graph().list_channels().len() < 1 + || node_receiver.network_graph().list_channels().len() < 1 || !has_node_announcements(&node_sender) || !has_node_announcements(&node_sender_lsp) || !has_node_announcements(&node_receiver_lsp) From 3df14770480c3fadb62b34dc57d23ee5b9b150df Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Fri, 19 Sep 2025 14:42:21 +0200 Subject: [PATCH 10/27] Fix wait_for_tx exponential backoff Backoff wasn't actually working and polling would happen without any delay at all. --- tests/common/mod.rs | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 70c9a43a8..0a1e8cbd2 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -437,32 +437,31 @@ pub(crate) fn wait_for_block(electrs: &E, min_height: usize) { } pub(crate) fn wait_for_tx(electrs: &E, txid: Txid) { - let mut tx_res = electrs.transaction_get(&txid); - loop { - if tx_res.is_ok() { - break; - } - tx_res = exponential_backoff_poll(|| { - electrs.ping().unwrap(); - Some(electrs.transaction_get(&txid)) - }); + if electrs.transaction_get(&txid).is_ok() { + return; } + + exponential_backoff_poll(|| { + electrs.ping().unwrap(); + electrs.transaction_get(&txid).ok() + }); } pub(crate) fn wait_for_outpoint_spend(electrs: &E, outpoint: OutPoint) { let tx = electrs.transaction_get(&outpoint.txid).unwrap(); let txout_script = tx.output.get(outpoint.vout as usize).unwrap().clone().script_pubkey; - let mut is_spent = !electrs.script_get_history(&txout_script).unwrap().is_empty(); - loop { - if is_spent { - break; - } - is_spent = exponential_backoff_poll(|| { - electrs.ping().unwrap(); - Some(!electrs.script_get_history(&txout_script).unwrap().is_empty()) - }); + let is_spent = !electrs.script_get_history(&txout_script).unwrap().is_empty(); + if is_spent { + return; } + + exponential_backoff_poll(|| { + electrs.ping().unwrap(); + + let is_spent = !electrs.script_get_history(&txout_script).unwrap().is_empty(); + is_spent.then_some(()) + }); } pub(crate) fn exponential_backoff_poll(mut poll: F) -> T From 97f404f4d895e9f06268facbbe617a40a6358455 Mon Sep 17 00:00:00 2001 From: Joost Jager Date: Thu, 18 Sep 2025 20:34:20 +0200 Subject: [PATCH 11/27] Adapt to new pay_for_offer call in upstream LDK Updated `pay_for_offer` call with `OptionalOfferPaymentParams` and delegate to `pay_for_offer_with_quantity` when needed. --- Cargo.toml | 24 ++++++++++++------------ src/payment/bolt12.rs | 43 ++++++++++++++++++++++++++++--------------- 2 files changed, 40 insertions(+), 27 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f3038ee96..1d3f45bfa 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,17 +52,17 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4" } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } @@ -109,7 +109,7 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "b002e43ec5f9c1cbdcd1ac8588402c5a65ecd2e4", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/payment/bolt12.rs b/src/payment/bolt12.rs index 81349e2bd..601c03d7d 100644 --- a/src/payment/bolt12.rs +++ b/src/payment/bolt12.rs @@ -17,7 +17,7 @@ use crate::payment::store::{PaymentDetails, PaymentDirection, PaymentKind, Payme use crate::types::{ChannelManager, PaymentStore}; use lightning::blinded_path::message::BlindedMessagePath; -use lightning::ln::channelmanager::{PaymentId, Retry}; +use lightning::ln::channelmanager::{OptionalOfferPaymentParams, PaymentId, Retry}; use lightning::offers::offer::{Amount, Offer as LdkOffer, Quantity}; use lightning::offers::parse::Bolt12SemanticError; use lightning::routing::router::RouteParametersConfig; @@ -102,15 +102,19 @@ impl Bolt12Payment { }, }; - match self.channel_manager.pay_for_offer( - &offer, - quantity, - None, - payer_note.clone(), - payment_id, + let params = OptionalOfferPaymentParams { + payer_note: payer_note.clone(), retry_strategy, route_params_config, - ) { + }; + let res = if let Some(quantity) = quantity { + self.channel_manager + .pay_for_offer_with_quantity(&offer, None, payment_id, params, quantity) + } else { + self.channel_manager.pay_for_offer(&offer, None, payment_id, params) + }; + + match res { Ok(()) => { let payee_pubkey = offer.issuer_signing_pubkey(); log_info!( @@ -209,15 +213,24 @@ impl Bolt12Payment { return Err(Error::InvalidAmount); } - match self.channel_manager.pay_for_offer( - &offer, - quantity, - Some(amount_msat), - payer_note.clone(), - payment_id, + let params = OptionalOfferPaymentParams { + payer_note: payer_note.clone(), retry_strategy, route_params_config, - ) { + }; + let res = if let Some(quantity) = quantity { + self.channel_manager.pay_for_offer_with_quantity( + &offer, + Some(amount_msat), + payment_id, + params, + quantity, + ) + } else { + self.channel_manager.pay_for_offer(&offer, Some(amount_msat), payment_id, params) + }; + + match res { Ok(()) => { let payee_pubkey = offer.issuer_signing_pubkey(); log_info!( From 904a05f7eda483e4122a4191620c03d6441f59e4 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Mon, 22 Sep 2025 13:11:23 +0200 Subject: [PATCH 12/27] Try to log status code for `reqwest`'s `Request` error kind We attempt to log a status code when `reqwest` returns a `Request` error kind. It might not be the case that the status code would always/ever be set for this error kind. --- src/chain/esplora.rs | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/src/chain/esplora.rs b/src/chain/esplora.rs index 8e9a4dbd4..2226358c1 100644 --- a/src/chain/esplora.rs +++ b/src/chain/esplora.rs @@ -144,12 +144,22 @@ impl EsploraChainSource { }, Err(e) => match *e { esplora_client::Error::Reqwest(he) => { - log_error!( - self.logger, - "{} of on-chain wallet failed due to HTTP connection error: {}", - if incremental_sync { "Incremental sync" } else { "Sync" }, - he - ); + if let Some(status_code) = he.status() { + log_error!( + self.logger, + "{} of on-chain wallet failed due to HTTP {} error: {}", + if incremental_sync { "Incremental sync" } else { "Sync" }, + status_code, + he, + ); + } else { + log_error!( + self.logger, + "{} of on-chain wallet failed due to HTTP error: {}", + if incremental_sync { "Incremental sync" } else { "Sync" }, + he, + ); + } Err(Error::WalletOperationFailed) }, _ => { From 1192085185eb8bc8b2981c102b596e416276322c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 25 Sep 2025 09:53:59 +0200 Subject: [PATCH 13/27] Bump LDK and account for `FutureSpawner` move The `FutureSpawner` trait moved to `lightning::util::native_async` now. --- Cargo.toml | 24 ++++++++++++------------ src/gossip.rs | 4 +++- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1d3f45bfa..b639b7dc1 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,17 +52,17 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6" } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } @@ -109,7 +109,7 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "50391d3a3efa7a8f32d119d126a633e4b1981ee6", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/gossip.rs b/src/gossip.rs index 258f9f736..efaf3ce89 100644 --- a/src/gossip.rs +++ b/src/gossip.rs @@ -12,7 +12,9 @@ use crate::runtime::Runtime; use crate::types::{GossipSync, Graph, P2PGossipSync, PeerManager, RapidGossipSync, UtxoLookup}; use crate::Error; -use lightning_block_sync::gossip::{FutureSpawner, GossipVerifier}; +use lightning_block_sync::gossip::GossipVerifier; + +use lightning::util::native_async::FutureSpawner; use std::future::Future; use std::sync::atomic::{AtomicU32, Ordering}; From 51eadb8cf00e15d3f138a87a5c42243dfd5f7885 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 09:58:56 +0200 Subject: [PATCH 14/27] Move current VSS `KVStoreSync` logic to `_internal` methods .. first step to make review easier. --- src/io/vss_store.rs | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index 87f966a9b..683eb7534 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -125,10 +125,8 @@ impl VssStore { } Ok(keys) } -} -impl KVStoreSync for VssStore { - fn read( + fn read_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "read")?; @@ -159,7 +157,7 @@ impl KVStoreSync for VssStore { Ok(self.storable_builder.deconstruct(storable)?.0) } - fn write( + fn write_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; @@ -187,7 +185,7 @@ impl KVStoreSync for VssStore { Ok(()) } - fn remove( + fn remove_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; @@ -210,7 +208,9 @@ impl KVStoreSync for VssStore { Ok(()) } - fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + fn list_internal( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, None, "list")?; let keys = self @@ -228,6 +228,30 @@ impl KVStoreSync for VssStore { } } +impl KVStoreSync for VssStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> io::Result> { + self.read_internal(primary_namespace, secondary_namespace, key) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> io::Result<()> { + self.write_internal(primary_namespace, secondary_namespace, key, buf) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> io::Result<()> { + self.remove_internal(primary_namespace, secondary_namespace, key, lazy) + } + + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + self.list_internal(primary_namespace, secondary_namespace) + } +} + fn derive_data_encryption_and_obfuscation_keys(vss_seed: &[u8; 32]) -> ([u8; 32], [u8; 32]) { let hkdf = |initial_key_material: &[u8], salt: &[u8]| -> [u8; 32] { let mut engine = HmacEngine::::new(salt); From 8d26c630ec722591812e875deb0d491f24b03a1b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 10:03:30 +0200 Subject: [PATCH 15/27] Make VSS internal methods `async`, move `block_on` to `impl KVStoreSync` .. as we're gonna reuse the `async` `_internal` methods shortly. --- src/io/vss_store.rs | 34 +++++++++++++++++++--------------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index 683eb7534..fbf1622b5 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -126,7 +126,7 @@ impl VssStore { Ok(keys) } - fn read_internal( + async fn read_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "read")?; @@ -134,7 +134,7 @@ impl VssStore { store_id: self.store_id.clone(), key: self.build_key(primary_namespace, secondary_namespace, key)?, }; - let resp = self.runtime.block_on(self.client.get_object(&request)).map_err(|e| { + let resp = self.client.get_object(&request).await.map_err(|e| { let msg = format!( "Failed to read from key {}/{}/{}: {}", primary_namespace, secondary_namespace, key, e @@ -144,6 +144,7 @@ impl VssStore { _ => Error::new(ErrorKind::Other, msg), } })?; + // unwrap safety: resp.value must be always present for a non-erroneous VSS response, otherwise // it is an API-violation which is converted to [`VssError::InternalServerError`] in [`VssClient`] let storable = Storable::decode(&resp.value.unwrap().value[..]).map_err(|e| { @@ -157,7 +158,7 @@ impl VssStore { Ok(self.storable_builder.deconstruct(storable)?.0) } - fn write_internal( + async fn write_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; @@ -174,7 +175,7 @@ impl VssStore { delete_items: vec![], }; - self.runtime.block_on(self.client.put_object(&request)).map_err(|e| { + self.client.put_object(&request).await.map_err(|e| { let msg = format!( "Failed to write to key {}/{}/{}: {}", primary_namespace, secondary_namespace, key, e @@ -185,7 +186,7 @@ impl VssStore { Ok(()) } - fn remove_internal( + async fn remove_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; @@ -198,25 +199,24 @@ impl VssStore { }), }; - self.runtime.block_on(self.client.delete_object(&request)).map_err(|e| { + self.client.delete_object(&request).await.map_err(|e| { let msg = format!( "Failed to delete key {}/{}/{}: {}", primary_namespace, secondary_namespace, key, e ); Error::new(ErrorKind::Other, msg) })?; + Ok(()) } - fn list_internal( + async fn list_internal( &self, primary_namespace: &str, secondary_namespace: &str, ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, None, "list")?; - let keys = self - .runtime - .block_on(self.list_all_keys(primary_namespace, secondary_namespace)) - .map_err(|e| { + let keys = + self.list_all_keys(primary_namespace, secondary_namespace).await.map_err(|e| { let msg = format!( "Failed to retrieve keys in namespace: {}/{} : {}", primary_namespace, secondary_namespace, e @@ -232,23 +232,27 @@ impl KVStoreSync for VssStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { - self.read_internal(primary_namespace, secondary_namespace, key) + let fut = self.read_internal(primary_namespace, secondary_namespace, key); + self.runtime.block_on(fut) } fn write( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { - self.write_internal(primary_namespace, secondary_namespace, key, buf) + let fut = self.write_internal(primary_namespace, secondary_namespace, key, buf); + self.runtime.block_on(fut) } fn remove( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, ) -> io::Result<()> { - self.remove_internal(primary_namespace, secondary_namespace, key, lazy) + let fut = self.remove_internal(primary_namespace, secondary_namespace, key, lazy); + self.runtime.block_on(fut) } fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { - self.list_internal(primary_namespace, secondary_namespace) + let fut = self.list_internal(primary_namespace, secondary_namespace); + self.runtime.block_on(fut) } } From 0686ecec691fc2b9cafd01ae25df712695a3f674 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 10:40:22 +0200 Subject: [PATCH 16/27] Split `VssStore` into `VssStore` and `VssStoreInner` .. where the former holds the latter in an `Arc` that can be used in async/`Future` contexts more easily. --- src/io/vss_store.rs | 78 ++++++++++++++++++++++++++------------------- 1 file changed, 46 insertions(+), 32 deletions(-) diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index fbf1622b5..052224916 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -40,17 +40,59 @@ type CustomRetryPolicy = FilteredRetryPolicy< /// A [`KVStoreSync`] implementation that writes to and reads from a [VSS](https://github.com/lightningdevkit/vss-server/blob/main/README.md) backend. pub struct VssStore { + inner: Arc, + runtime: Arc, +} + +impl VssStore { + pub(crate) fn new( + base_url: String, store_id: String, vss_seed: [u8; 32], + header_provider: Arc, runtime: Arc, + ) -> Self { + let inner = Arc::new(VssStoreInner::new(base_url, store_id, vss_seed, header_provider)); + Self { inner, runtime } + } +} + +impl KVStoreSync for VssStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> io::Result> { + let fut = self.inner.read_internal(primary_namespace, secondary_namespace, key); + self.runtime.block_on(fut) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> io::Result<()> { + let fut = self.inner.write_internal(primary_namespace, secondary_namespace, key, buf); + self.runtime.block_on(fut) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> io::Result<()> { + let fut = self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy); + self.runtime.block_on(fut) + } + + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + let fut = self.inner.list_internal(primary_namespace, secondary_namespace); + self.runtime.block_on(fut) + } +} + +struct VssStoreInner { client: VssClient, store_id: String, - runtime: Arc, storable_builder: StorableBuilder, key_obfuscator: KeyObfuscator, } -impl VssStore { +impl VssStoreInner { pub(crate) fn new( base_url: String, store_id: String, vss_seed: [u8; 32], - header_provider: Arc, runtime: Arc, + header_provider: Arc, ) -> Self { let (data_encryption_key, obfuscation_master_key) = derive_data_encryption_and_obfuscation_keys(&vss_seed); @@ -70,7 +112,7 @@ impl VssStore { }) as _); let client = VssClient::new_with_headers(base_url, retry_policy, header_provider); - Self { client, store_id, runtime, storable_builder, key_obfuscator } + Self { client, store_id, storable_builder, key_obfuscator } } fn build_key( @@ -228,34 +270,6 @@ impl VssStore { } } -impl KVStoreSync for VssStore { - fn read( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> io::Result> { - let fut = self.read_internal(primary_namespace, secondary_namespace, key); - self.runtime.block_on(fut) - } - - fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, - ) -> io::Result<()> { - let fut = self.write_internal(primary_namespace, secondary_namespace, key, buf); - self.runtime.block_on(fut) - } - - fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, - ) -> io::Result<()> { - let fut = self.remove_internal(primary_namespace, secondary_namespace, key, lazy); - self.runtime.block_on(fut) - } - - fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { - let fut = self.list_internal(primary_namespace, secondary_namespace); - self.runtime.block_on(fut) - } -} - fn derive_data_encryption_and_obfuscation_keys(vss_seed: &[u8; 32]) -> ([u8; 32], [u8; 32]) { let hkdf = |initial_key_material: &[u8], salt: &[u8]| -> [u8; 32] { let mut engine = HmacEngine::::new(salt); From 523900fe69fc4cc57d59507cd5a71c774ae42710 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 10:47:46 +0200 Subject: [PATCH 17/27] WIP Implement `KVStore` for `VssStore` We implement the async `KVStore` trait for `VssStore`. --- src/io/vss_store.rs | 50 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index 052224916..881cc68bc 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -10,7 +10,7 @@ use crate::runtime::Runtime; use bitcoin::hashes::{sha256, Hash, HashEngine, Hmac, HmacEngine}; use lightning::io::{self, Error, ErrorKind}; -use lightning::util::persist::KVStoreSync; +use lightning::util::persist::{KVStore, KVStoreSync}; use prost::Message; use rand::RngCore; #[cfg(test)] @@ -31,6 +31,10 @@ use vss_client::util::retry::{ }; use vss_client::util::storable_builder::{EntropySource, StorableBuilder}; +use std::boxed::Box; +use std::future::Future; +use std::pin::Pin; + type CustomRetryPolicy = FilteredRetryPolicy< JitteredRetryPolicy< MaxTotalDelayRetryPolicy>>, @@ -82,6 +86,50 @@ impl KVStoreSync for VssStore { } } +impl KVStore for VssStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + Box::pin(async move { + inner.read_internal(&primary_namespace, &secondary_namespace, &key).await + }) + } + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + Box::pin(async move { + inner.write_internal(&primary_namespace, &secondary_namespace, &key, buf).await + }) + } + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + Box::pin(async move { + inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy).await + }) + } + fn list( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let inner = Arc::clone(&self.inner); + Box::pin(async move { inner.list_internal(&primary_namespace, &secondary_namespace).await }) + } +} + struct VssStoreInner { client: VssClient, store_id: String, From addefaab33641c5c32970e0d63b2834a306c0cfd Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 11:00:04 +0200 Subject: [PATCH 18/27] Move `SqliteStore` logic to `_internal` methods .. to be easier reusable via `KVStore` also --- src/io/sqlite_store/mod.rs | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index 4006ab2cc..72d29112c 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -128,10 +128,8 @@ impl SqliteStore { pub fn get_data_dir(&self) -> PathBuf { self.data_dir.clone() } -} -impl KVStoreSync for SqliteStore { - fn read( + fn read_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "read")?; @@ -179,7 +177,7 @@ impl KVStoreSync for SqliteStore { Ok(res) } - fn write( + fn write_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "write")?; @@ -215,7 +213,7 @@ impl KVStoreSync for SqliteStore { }) } - fn remove( + fn remove_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; @@ -247,7 +245,9 @@ impl KVStoreSync for SqliteStore { Ok(()) } - fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + fn list_internal( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> io::Result> { check_namespace_key_validity(primary_namespace, secondary_namespace, None, "list")?; let locked_conn = self.connection.lock().unwrap(); @@ -287,6 +287,30 @@ impl KVStoreSync for SqliteStore { } } +impl KVStoreSync for SqliteStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> io::Result> { + self.read_internal(primary_namespace, secondary_namespace, key) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> io::Result<()> { + self.write_internal(primary_namespace, secondary_namespace, key, buf) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> io::Result<()> { + self.remove_internal(primary_namespace, secondary_namespace, key, lazy) + } + + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + self.list_internal(primary_namespace, secondary_namespace) + } +} + #[cfg(test)] mod tests { use super::*; From db6ed3c88934e2214722117541acf2a6a0a59d6f Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 11:18:42 +0200 Subject: [PATCH 19/27] Split `SqliteStore` into `SqliteStore` and `SqliteStoreInner` .. where the former holds the latter in an `Arc` that can be used in async/`Future` contexts more easily. --- src/io/sqlite_store/mod.rs | 79 ++++++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 33 deletions(-) diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index 72d29112c..1ec23aa0e 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -39,9 +39,7 @@ const SCHEMA_USER_VERSION: u16 = 2; /// /// [SQLite]: https://sqlite.org pub struct SqliteStore { - connection: Arc>, - data_dir: PathBuf, - kv_table_name: String, + inner: Arc, } impl SqliteStore { @@ -53,6 +51,50 @@ impl SqliteStore { /// Similarly, the given `kv_table_name` will be used or default to [`DEFAULT_KV_TABLE_NAME`]. pub fn new( data_dir: PathBuf, db_file_name: Option, kv_table_name: Option, + ) -> io::Result { + let inner = Arc::new(SqliteStoreInner::new(data_dir, db_file_name, kv_table_name)?); + Ok(Self { inner }) + } + + /// Returns the data directory. + pub fn get_data_dir(&self) -> PathBuf { + self.inner.data_dir.clone() + } +} + +impl KVStoreSync for SqliteStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> io::Result> { + self.inner.read_internal(primary_namespace, secondary_namespace, key) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> io::Result<()> { + self.inner.write_internal(primary_namespace, secondary_namespace, key, buf) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> io::Result<()> { + self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy) + } + + fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { + self.inner.list_internal(primary_namespace, secondary_namespace) + } +} + +struct SqliteStoreInner { + connection: Arc>, + data_dir: PathBuf, + kv_table_name: String, +} + +impl SqliteStoreInner { + fn new( + data_dir: PathBuf, db_file_name: Option, kv_table_name: Option, ) -> io::Result { let db_file_name = db_file_name.unwrap_or(DEFAULT_SQLITE_DB_FILE_NAME.to_string()); let kv_table_name = kv_table_name.unwrap_or(DEFAULT_KV_TABLE_NAME.to_string()); @@ -124,11 +166,6 @@ impl SqliteStore { Ok(Self { connection, data_dir, kv_table_name }) } - /// Returns the data directory. - pub fn get_data_dir(&self) -> PathBuf { - self.data_dir.clone() - } - fn read_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result> { @@ -287,30 +324,6 @@ impl SqliteStore { } } -impl KVStoreSync for SqliteStore { - fn read( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> io::Result> { - self.read_internal(primary_namespace, secondary_namespace, key) - } - - fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, - ) -> io::Result<()> { - self.write_internal(primary_namespace, secondary_namespace, key, buf) - } - - fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, - ) -> io::Result<()> { - self.remove_internal(primary_namespace, secondary_namespace, key, lazy) - } - - fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { - self.list_internal(primary_namespace, secondary_namespace) - } -} - #[cfg(test)] mod tests { use super::*; @@ -320,7 +333,7 @@ mod tests { impl Drop for SqliteStore { fn drop(&mut self) { - match fs::remove_dir_all(&self.data_dir) { + match fs::remove_dir_all(&self.inner.data_dir) { Err(e) => println!("Failed to remove test store directory: {}", e), _ => {}, } From 69b9631e70fc3bcae1184043cb617cbad474ff37 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Fri, 12 Sep 2025 11:42:47 +0200 Subject: [PATCH 20/27] Implement `KVStore` for `SqliteStore` --- src/io/sqlite_store/mod.rs | 75 +++++++++++++++++++++++++++++++++++++- 1 file changed, 74 insertions(+), 1 deletion(-) diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index 1ec23aa0e..7d63022f2 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -9,14 +9,17 @@ use crate::io::utils::check_namespace_key_validity; use lightning::io; -use lightning::util::persist::KVStoreSync; +use lightning::util::persist::{KVStore, KVStoreSync}; use lightning_types::string::PrintableString; use rusqlite::{named_params, Connection}; +use std::boxed::Box; use std::fs; +use std::future::Future; use std::path::PathBuf; +use std::pin::Pin; use std::sync::{Arc, Mutex}; mod migrations; @@ -62,6 +65,76 @@ impl SqliteStore { } } +impl KVStore for SqliteStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.read_internal(&primary_namespace, &secondary_namespace, &key) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.write_internal(&primary_namespace, &secondary_namespace, &key, buf) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn list( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.list_internal(&primary_namespace, &secondary_namespace) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } +} + impl KVStoreSync for SqliteStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, From 5a98b7fabedcd22abcdc8082096957776613a15b Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 18 Sep 2025 11:13:46 +0200 Subject: [PATCH 21/27] Move `TestStoreSync` logic to `_internal` methods .. to be easier reusable via `KVStore` also --- tests/common/mod.rs | 36 ++++++++++++++++++++++++++++++------ 1 file changed, 30 insertions(+), 6 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 0a1e8cbd2..db3ce7a22 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1240,10 +1240,8 @@ impl TestSyncStore { }, } } -} -impl KVStoreSync for TestSyncStore { - fn read( + fn read_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> lightning::io::Result> { let _guard = self.serializer.read().unwrap(); @@ -1268,7 +1266,7 @@ impl KVStoreSync for TestSyncStore { } } - fn write( + fn write_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); @@ -1297,7 +1295,7 @@ impl KVStoreSync for TestSyncStore { } } - fn remove( + fn remove_internal( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); @@ -1325,10 +1323,36 @@ impl KVStoreSync for TestSyncStore { } } - fn list( + fn list_internal( &self, primary_namespace: &str, secondary_namespace: &str, ) -> lightning::io::Result> { let _guard = self.serializer.read().unwrap(); self.do_list(primary_namespace, secondary_namespace) } } + +impl KVStoreSync for TestSyncStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> lightning::io::Result> { + self.read_internal(primary_namespace, secondary_namespace, key) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> lightning::io::Result<()> { + self.write_internal(primary_namespace, secondary_namespace, key, buf) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> lightning::io::Result<()> { + self.remove_internal(primary_namespace, secondary_namespace, key, lazy) + } + + fn list( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> lightning::io::Result> { + self.list_internal(primary_namespace, secondary_namespace) + } +} From c4648a01123ed0c03f883456665bb9f51a551e7d Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 18 Sep 2025 11:16:36 +0200 Subject: [PATCH 22/27] Split `TestSyncStore` into `TestSyncStore` and `TestSyncStoreInner` .. where the former holds the latter in an `Arc` that can be used in async/`Future` contexts more easily. --- tests/common/mod.rs | 67 ++++++++++++++++++++++++++------------------- 1 file changed, 39 insertions(+), 28 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index db3ce7a22..77e89f0ba 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1188,14 +1188,51 @@ pub(crate) fn do_channel_full_cycle( // A `KVStore` impl for testing purposes that wraps all our `KVStore`s and asserts their synchronicity. pub(crate) struct TestSyncStore { + inner: Arc, +} + +impl TestSyncStore { + pub(crate) fn new(dest_dir: PathBuf) -> Self { + let inner = Arc::new(TestSyncStoreInner::new(dest_dir)); + Self { inner } + } +} + +impl KVStoreSync for TestSyncStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> lightning::io::Result> { + self.inner.read_internal(primary_namespace, secondary_namespace, key) + } + + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> lightning::io::Result<()> { + self.inner.write_internal(primary_namespace, secondary_namespace, key, buf) + } + + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> lightning::io::Result<()> { + self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy) + } + + fn list( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> lightning::io::Result> { + self.inner.list_internal(primary_namespace, secondary_namespace) + } +} + +struct TestSyncStoreInner { serializer: RwLock<()>, test_store: TestStore, fs_store: FilesystemStore, sqlite_store: SqliteStore, } -impl TestSyncStore { - pub(crate) fn new(dest_dir: PathBuf) -> Self { +impl TestSyncStoreInner { + fn new(dest_dir: PathBuf) -> Self { let serializer = RwLock::new(()); let mut fs_dir = dest_dir.clone(); fs_dir.push("fs_store"); @@ -1330,29 +1367,3 @@ impl TestSyncStore { self.do_list(primary_namespace, secondary_namespace) } } - -impl KVStoreSync for TestSyncStore { - fn read( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, - ) -> lightning::io::Result> { - self.read_internal(primary_namespace, secondary_namespace, key) - } - - fn write( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, - ) -> lightning::io::Result<()> { - self.write_internal(primary_namespace, secondary_namespace, key, buf) - } - - fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, - ) -> lightning::io::Result<()> { - self.remove_internal(primary_namespace, secondary_namespace, key, lazy) - } - - fn list( - &self, primary_namespace: &str, secondary_namespace: &str, - ) -> lightning::io::Result> { - self.list_internal(primary_namespace, secondary_namespace) - } -} From 3c7cf027ee0cf1e68c78bf51e669152e8cf607fe Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 18 Sep 2025 11:18:54 +0200 Subject: [PATCH 23/27] Implement `KVStore` for `TestSyncStore` --- tests/common/mod.rs | 137 ++++++++++++++++++++++++++++++++++++++------ 1 file changed, 121 insertions(+), 16 deletions(-) diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 77e89f0ba..4c9acc86f 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -19,9 +19,10 @@ use ldk_node::{ Builder, CustomTlvRecord, Event, LightningBalance, Node, NodeError, PendingSweepBalance, }; +use lightning::io; use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; -use lightning::util::persist::KVStoreSync; +use lightning::util::persist::{KVStore, KVStoreSync}; use lightning::util::test_utils::TestStore; use lightning_invoice::{Bolt11InvoiceDescription, Description}; @@ -44,9 +45,12 @@ use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use serde_json::{json, Value}; +use std::boxed::Box; use std::collections::{HashMap, HashSet}; use std::env; +use std::future::Future; use std::path::PathBuf; +use std::pin::Pin; use std::sync::{Arc, RwLock}; use std::time::Duration; @@ -1198,6 +1202,76 @@ impl TestSyncStore { } } +impl KVStore for TestSyncStore { + fn read( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.read_internal(&primary_namespace, &secondary_namespace, &key) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn write( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.write_internal(&primary_namespace, &secondary_namespace, &key, buf) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn remove( + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + ) -> Pin> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let key = key.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } + fn list( + &self, primary_namespace: &str, secondary_namespace: &str, + ) -> Pin, io::Error>> + Send>> { + let primary_namespace = primary_namespace.to_string(); + let secondary_namespace = secondary_namespace.to_string(); + let inner = Arc::clone(&self.inner); + let fut = tokio::task::spawn_blocking(move || { + inner.list_internal(&primary_namespace, &secondary_namespace) + }); + Box::pin(async move { + fut.await.unwrap_or_else(|e| { + let msg = format!("Failed to IO operation due join error: {}", e); + Err(io::Error::new(io::ErrorKind::Other, msg)) + }) + }) + } +} + impl KVStoreSync for TestSyncStore { fn read( &self, primary_namespace: &str, secondary_namespace: &str, key: &str, @@ -1252,9 +1326,10 @@ impl TestSyncStoreInner { fn do_list( &self, primary_namespace: &str, secondary_namespace: &str, ) -> lightning::io::Result> { - let fs_res = self.fs_store.list(primary_namespace, secondary_namespace); - let sqlite_res = self.sqlite_store.list(primary_namespace, secondary_namespace); - let test_res = self.test_store.list(primary_namespace, secondary_namespace); + let fs_res = KVStoreSync::list(&self.fs_store, primary_namespace, secondary_namespace); + let sqlite_res = + KVStoreSync::list(&self.sqlite_store, primary_namespace, secondary_namespace); + let test_res = KVStoreSync::list(&self.test_store, primary_namespace, secondary_namespace); match fs_res { Ok(mut list) => { @@ -1283,9 +1358,11 @@ impl TestSyncStoreInner { ) -> lightning::io::Result> { let _guard = self.serializer.read().unwrap(); - let fs_res = self.fs_store.read(primary_namespace, secondary_namespace, key); - let sqlite_res = self.sqlite_store.read(primary_namespace, secondary_namespace, key); - let test_res = self.test_store.read(primary_namespace, secondary_namespace, key); + let fs_res = KVStoreSync::read(&self.fs_store, primary_namespace, secondary_namespace, key); + let sqlite_res = + KVStoreSync::read(&self.sqlite_store, primary_namespace, secondary_namespace, key); + let test_res = + KVStoreSync::read(&self.test_store, primary_namespace, secondary_namespace, key); match fs_res { Ok(read) => { @@ -1307,11 +1384,27 @@ impl TestSyncStoreInner { &self, primary_namespace: &str, secondary_namespace: &str, key: &str, buf: Vec, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); - let fs_res = self.fs_store.write(primary_namespace, secondary_namespace, key, buf.clone()); - let sqlite_res = - self.sqlite_store.write(primary_namespace, secondary_namespace, key, buf.clone()); - let test_res = - self.test_store.write(primary_namespace, secondary_namespace, key, buf.clone()); + let fs_res = KVStoreSync::write( + &self.fs_store, + primary_namespace, + secondary_namespace, + key, + buf.clone(), + ); + let sqlite_res = KVStoreSync::write( + &self.sqlite_store, + primary_namespace, + secondary_namespace, + key, + buf.clone(), + ); + let test_res = KVStoreSync::write( + &self.test_store, + primary_namespace, + secondary_namespace, + key, + buf.clone(), + ); assert!(self .do_list(primary_namespace, secondary_namespace) @@ -1336,10 +1429,22 @@ impl TestSyncStoreInner { &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); - let fs_res = self.fs_store.remove(primary_namespace, secondary_namespace, key, lazy); - let sqlite_res = - self.sqlite_store.remove(primary_namespace, secondary_namespace, key, lazy); - let test_res = self.test_store.remove(primary_namespace, secondary_namespace, key, lazy); + let fs_res = + KVStoreSync::remove(&self.fs_store, primary_namespace, secondary_namespace, key, lazy); + let sqlite_res = KVStoreSync::remove( + &self.sqlite_store, + primary_namespace, + secondary_namespace, + key, + lazy, + ); + let test_res = KVStoreSync::remove( + &self.test_store, + primary_namespace, + secondary_namespace, + key, + lazy, + ); assert!(!self .do_list(primary_namespace, secondary_namespace) From 8c2ff8f0688ee35dc9ae97a1b5dd7a5f1d60152c Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Wed, 17 Sep 2025 12:57:25 +0200 Subject: [PATCH 24/27] Require both types of `KVStore` As an intermediary step, we require any store to implement both `KVStore` and `KVStoreSync`, allowing us to switch over step-by-step. We already switch to the fully-async background processor variant here. --- Cargo.toml | 8 +- src/builder.rs | 7 +- src/data_store.rs | 49 ++++--- src/event.rs | 45 +++--- src/io/utils.rs | 128 ++++++++++-------- src/lib.rs | 41 +++--- .../asynchronous/static_invoice_store.rs | 59 ++++---- src/peer_store.rs | 59 ++++---- src/types.rs | 18 ++- tests/integration_tests_rust.rs | 5 +- 10 files changed, 236 insertions(+), 183 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b639b7dc1..7385b5c46 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -32,7 +32,7 @@ default = [] #lightning-types = { version = "0.2.0" } #lightning-invoice = { version = "0.33.0", features = ["std"] } #lightning-net-tokio = { version = "0.1.0" } -#lightning-persister = { version = "0.1.0" } +#lightning-persister = { version = "0.1.0", features = ["tokio"] } #lightning-background-processor = { version = "0.1.0" } #lightning-rapid-gossip-sync = { version = "0.1.0" } #lightning-block-sync = { version = "0.1.0", features = ["rest-client", "rpc-client", "tokio"] } @@ -44,7 +44,7 @@ default = [] #lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["std"] } #lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -#lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } +#lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["tokio"] } #lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main", features = ["rest-client", "rpc-client", "tokio"] } @@ -56,7 +56,7 @@ lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = " lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std"] } lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["tokio"] } lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["rest-client", "rpc-client", "tokio"] } @@ -68,7 +68,7 @@ lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", #lightning-types = { path = "../rust-lightning/lightning-types" } #lightning-invoice = { path = "../rust-lightning/lightning-invoice", features = ["std"] } #lightning-net-tokio = { path = "../rust-lightning/lightning-net-tokio" } -#lightning-persister = { path = "../rust-lightning/lightning-persister" } +#lightning-persister = { path = "../rust-lightning/lightning-persister", features = ["tokio"] } #lightning-background-processor = { path = "../rust-lightning/lightning-background-processor" } #lightning-rapid-gossip-sync = { path = "../rust-lightning/lightning-rapid-gossip-sync" } #lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rest-client", "rpc-client", "tokio"] } diff --git a/src/builder.rs b/src/builder.rs index b99c44cec..70db3382b 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -51,7 +51,7 @@ use lightning::routing::scoring::{ use lightning::sign::{EntropySource, NodeSigner}; use lightning::util::persist::{ - read_channel_monitors, CHANNEL_MANAGER_PERSISTENCE_KEY, + read_channel_monitors, KVStoreSync, CHANNEL_MANAGER_PERSISTENCE_KEY, CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::ReadableArgs; @@ -1387,7 +1387,8 @@ fn build_with_store_internal( // Initialize the ChannelManager let channel_manager = { - if let Ok(res) = kv_store.read( + if let Ok(res) = KVStoreSync::read( + &*kv_store, CHANNEL_MANAGER_PERSISTENCE_PRIMARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_SECONDARY_NAMESPACE, CHANNEL_MANAGER_PERSISTENCE_KEY, @@ -1610,7 +1611,7 @@ fn build_with_store_internal( Ok(output_sweeper) => Arc::new(output_sweeper), Err(e) => { if e.kind() == std::io::ErrorKind::NotFound { - Arc::new(OutputSweeper::new_with_kv_store_sync( + Arc::new(OutputSweeper::new( channel_manager.current_best_block(), Arc::clone(&tx_broadcaster), Arc::clone(&fee_estimator), diff --git a/src/data_store.rs b/src/data_store.rs index 45802c272..ba68acd4b 100644 --- a/src/data_store.rs +++ b/src/data_store.rs @@ -9,6 +9,7 @@ use crate::logger::{log_error, LdkLogger}; use crate::types::DynStore; use crate::Error; +use lightning::util::persist::KVStoreSync; use lightning::util::ser::{Readable, Writeable}; use std::collections::hash_map; @@ -98,19 +99,24 @@ where let removed = self.objects.lock().unwrap().remove(id).is_some(); if removed { let store_key = id.encode_to_hex_str(); - self.kv_store - .remove(&self.primary_namespace, &self.secondary_namespace, &store_key, false) - .map_err(|e| { - log_error!( - self.logger, - "Removing object data for key {}/{}/{} failed due to: {}", - &self.primary_namespace, - &self.secondary_namespace, - store_key, - e - ); - Error::PersistenceFailed - })?; + KVStoreSync::remove( + &*self.kv_store, + &self.primary_namespace, + &self.secondary_namespace, + &store_key, + false, + ) + .map_err(|e| { + log_error!( + self.logger, + "Removing object data for key {}/{}/{} failed due to: {}", + &self.primary_namespace, + &self.secondary_namespace, + store_key, + e + ); + Error::PersistenceFailed + })?; } Ok(()) } @@ -142,9 +148,14 @@ where fn persist(&self, object: &SO) -> Result<(), Error> { let store_key = object.id().encode_to_hex_str(); let data = object.encode(); - self.kv_store - .write(&self.primary_namespace, &self.secondary_namespace, &store_key, data) - .map_err(|e| { + KVStoreSync::write( + &*self.kv_store, + &self.primary_namespace, + &self.secondary_namespace, + &store_key, + data, + ) + .map_err(|e| { log_error!( self.logger, "Write for key {}/{}/{} failed due to: {}", @@ -243,13 +254,15 @@ mod tests { let store_key = id.encode_to_hex_str(); // Check we start empty. - assert!(store.read(&primary_namespace, &secondary_namespace, &store_key).is_err()); + assert!(KVStoreSync::read(&*store, &primary_namespace, &secondary_namespace, &store_key) + .is_err()); // Check we successfully store an object and return `false` let object = TestObject { id, data: [23u8; 3] }; assert_eq!(Ok(false), data_store.insert(object.clone())); assert_eq!(Some(object), data_store.get(&id)); - assert!(store.read(&primary_namespace, &secondary_namespace, &store_key).is_ok()); + assert!(KVStoreSync::read(&*store, &primary_namespace, &secondary_namespace, &store_key) + .is_ok()); // Test re-insertion returns `true` let mut override_object = object.clone(); diff --git a/src/event.rs b/src/event.rs index cd9146379..a3ad693c0 100644 --- a/src/event.rs +++ b/src/event.rs @@ -42,6 +42,7 @@ use lightning::util::config::{ ChannelConfigOverrides, ChannelConfigUpdate, ChannelHandshakeConfigUpdate, }; use lightning::util::errors::APIError; +use lightning::util::persist::KVStoreSync; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use lightning_types::payment::{PaymentHash, PaymentPreimage}; @@ -355,24 +356,24 @@ where fn persist_queue(&self, locked_queue: &VecDeque) -> Result<(), Error> { let data = EventQueueSerWrapper(locked_queue).encode(); - self.kv_store - .write( + KVStoreSync::write( + &*self.kv_store, + EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, + EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, + EVENT_QUEUE_PERSISTENCE_KEY, + data, + ) + .map_err(|e| { + log_error!( + self.logger, + "Write for key {}/{}/{} failed due to: {}", EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_KEY, - data, - ) - .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, - EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, - EVENT_QUEUE_PERSISTENCE_KEY, - e - ); - Error::PersistenceFailed - })?; + e + ); + Error::PersistenceFailed + })?; Ok(()) } } @@ -1598,13 +1599,13 @@ mod tests { } // Check we can read back what we persisted. - let persisted_bytes = store - .read( - EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, - EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, - EVENT_QUEUE_PERSISTENCE_KEY, - ) - .unwrap(); + let persisted_bytes = KVStoreSync::read( + &*store, + EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, + EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, + EVENT_QUEUE_PERSISTENCE_KEY, + ) + .unwrap(); let deser_event_queue = EventQueue::read(&mut &persisted_bytes[..], (Arc::clone(&store), logger)).unwrap(); assert_eq!(deser_event_queue.wait_next_event(), expected_event); diff --git a/src/io/utils.rs b/src/io/utils.rs index 51e7be505..988c2a7fb 100644 --- a/src/io/utils.rs +++ b/src/io/utils.rs @@ -24,11 +24,12 @@ use lightning::ln::msgs::DecodeError; use lightning::routing::gossip::NetworkGraph; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringDecayParameters}; use lightning::util::persist::{ - KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, NETWORK_GRAPH_PERSISTENCE_KEY, - NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, - OUTPUT_SWEEPER_PERSISTENCE_KEY, OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, - OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, - SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, + KVStoreSync, KVSTORE_NAMESPACE_KEY_ALPHABET, KVSTORE_NAMESPACE_KEY_MAX_LEN, + NETWORK_GRAPH_PERSISTENCE_KEY, NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, + NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_KEY, + OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, + SCORER_PERSISTENCE_KEY, SCORER_PERSISTENCE_PRIMARY_NAMESPACE, + SCORER_PERSISTENCE_SECONDARY_NAMESPACE, }; use lightning::util::ser::{Readable, ReadableArgs, Writeable}; use lightning::util::sweep::OutputSweeper; @@ -135,7 +136,8 @@ pub(crate) fn read_network_graph( where L::Target: LdkLogger, { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, NETWORK_GRAPH_PERSISTENCE_PRIMARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_SECONDARY_NAMESPACE, NETWORK_GRAPH_PERSISTENCE_KEY, @@ -154,7 +156,8 @@ where L::Target: LdkLogger, { let params = ProbabilisticScoringDecayParameters::default(); - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, SCORER_PERSISTENCE_PRIMARY_NAMESPACE, SCORER_PERSISTENCE_SECONDARY_NAMESPACE, SCORER_PERSISTENCE_KEY, @@ -173,7 +176,8 @@ pub(crate) fn read_event_queue( where L::Target: LdkLogger, { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, EVENT_QUEUE_PERSISTENCE_PRIMARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_SECONDARY_NAMESPACE, EVENT_QUEUE_PERSISTENCE_KEY, @@ -191,7 +195,8 @@ pub(crate) fn read_peer_info( where L::Target: LdkLogger, { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, PEER_INFO_PERSISTENCE_KEY, @@ -211,11 +216,13 @@ where { let mut res = Vec::new(); - for stored_key in kv_store.list( + for stored_key in KVStoreSync::list( + &*kv_store, PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, )? { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, PAYMENT_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PAYMENT_INFO_PERSISTENCE_SECONDARY_NAMESPACE, &stored_key, @@ -238,7 +245,8 @@ pub(crate) fn read_output_sweeper( chain_data_source: Arc, keys_manager: Arc, kv_store: Arc, logger: Arc, ) -> Result { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, OUTPUT_SWEEPER_PERSISTENCE_PRIMARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_SECONDARY_NAMESPACE, OUTPUT_SWEEPER_PERSISTENCE_KEY, @@ -252,7 +260,7 @@ pub(crate) fn read_output_sweeper( kv_store, logger.clone(), ); - OutputSweeper::read_with_kv_store_sync(&mut reader, args).map_err(|e| { + OutputSweeper::read(&mut reader, args).map_err(|e| { log_error!(logger, "Failed to deserialize OutputSweeper: {}", e); std::io::Error::new(std::io::ErrorKind::InvalidData, "Failed to deserialize OutputSweeper") }) @@ -264,7 +272,8 @@ pub(crate) fn read_node_metrics( where L::Target: LdkLogger, { - let mut reader = Cursor::new(kv_store.read( + let mut reader = Cursor::new(KVStoreSync::read( + &*kv_store, NODE_METRICS_PRIMARY_NAMESPACE, NODE_METRICS_SECONDARY_NAMESPACE, NODE_METRICS_KEY, @@ -282,24 +291,24 @@ where L::Target: LdkLogger, { let data = node_metrics.encode(); - kv_store - .write( + KVStoreSync::write( + &*kv_store, + NODE_METRICS_PRIMARY_NAMESPACE, + NODE_METRICS_SECONDARY_NAMESPACE, + NODE_METRICS_KEY, + data, + ) + .map_err(|e| { + log_error!( + logger, + "Writing data to key {}/{}/{} failed due to: {}", NODE_METRICS_PRIMARY_NAMESPACE, NODE_METRICS_SECONDARY_NAMESPACE, NODE_METRICS_KEY, - data, - ) - .map_err(|e| { - log_error!( - logger, - "Writing data to key {}/{}/{} failed due to: {}", - NODE_METRICS_PRIMARY_NAMESPACE, - NODE_METRICS_SECONDARY_NAMESPACE, - NODE_METRICS_KEY, - e - ); - Error::PersistenceFailed - }) + e + ); + Error::PersistenceFailed + }) } pub(crate) fn is_valid_kvstore_str(key: &str) -> bool { @@ -401,24 +410,26 @@ macro_rules! impl_read_write_change_set_type { where L::Target: LdkLogger, { - let bytes = match kv_store.read($primary_namespace, $secondary_namespace, $key) { - Ok(bytes) => bytes, - Err(e) => { - if e.kind() == lightning::io::ErrorKind::NotFound { - return Ok(None); - } else { - log_error!( - logger, - "Reading data from key {}/{}/{} failed due to: {}", - $primary_namespace, - $secondary_namespace, - $key, - e - ); - return Err(e.into()); - } - }, - }; + let bytes = + match KVStoreSync::read(&*kv_store, $primary_namespace, $secondary_namespace, $key) + { + Ok(bytes) => bytes, + Err(e) => { + if e.kind() == lightning::io::ErrorKind::NotFound { + return Ok(None); + } else { + log_error!( + logger, + "Reading data from key {}/{}/{} failed due to: {}", + $primary_namespace, + $secondary_namespace, + $key, + e + ); + return Err(e.into()); + } + }, + }; let mut reader = Cursor::new(bytes); let res: Result, DecodeError> = @@ -442,17 +453,18 @@ macro_rules! impl_read_write_change_set_type { L::Target: LdkLogger, { let data = ChangeSetSerWrapper(value).encode(); - kv_store.write($primary_namespace, $secondary_namespace, $key, data).map_err(|e| { - log_error!( - logger, - "Writing data to key {}/{}/{} failed due to: {}", - $primary_namespace, - $secondary_namespace, - $key, - e - ); - e.into() - }) + KVStoreSync::write(&*kv_store, $primary_namespace, $secondary_namespace, $key, data) + .map_err(|e| { + log_error!( + logger, + "Writing data to key {}/{}/{} failed due to: {}", + $primary_namespace, + $secondary_namespace, + $key, + e + ); + e.into() + }) } }; } diff --git a/src/lib.rs b/src/lib.rs index e7e27273b..1b4369717 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -144,10 +144,12 @@ use payment::{ use peer_store::{PeerInfo, PeerStore}; use runtime::Runtime; use types::{ - Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, DynStore, Graph, - KeysManager, OnionMessenger, PaymentStore, PeerManager, Router, Scorer, Sweeper, Wallet, + Broadcaster, BumpTransactionEventHandler, ChainMonitor, ChannelManager, Graph, KeysManager, + OnionMessenger, PaymentStore, PeerManager, Router, Scorer, Sweeper, Wallet, +}; +pub use types::{ + ChannelDetails, CustomTlvRecord, DynStore, PeerDetails, SyncAndAsyncKVStore, UserChannelId, }; -pub use types::{ChannelDetails, CustomTlvRecord, PeerDetails, UserChannelId}; use logger::{log_debug, log_error, log_info, log_trace, LdkLogger, Logger}; @@ -158,8 +160,9 @@ use lightning::ln::channel_state::ChannelShutdownState; use lightning::ln::channelmanager::PaymentId; use lightning::ln::msgs::SocketAddress; use lightning::routing::gossip::NodeAlias; +use lightning::util::persist::KVStoreSync; -use lightning_background_processor::process_events_async_with_kv_store_sync; +use lightning_background_processor::process_events_async; use bitcoin::secp256k1::PublicKey; @@ -558,7 +561,7 @@ impl Node { }; self.runtime.spawn_background_processor_task(async move { - process_events_async_with_kv_store_sync( + process_events_async( background_persister, |e| background_event_handler.handle_event(e), background_chain_mon, @@ -1476,20 +1479,20 @@ impl Node { /// Exports the current state of the scorer. The result can be shared with and merged by light nodes that only have /// a limited view of the network. pub fn export_pathfinding_scores(&self) -> Result, Error> { - self.kv_store - .read( - lightning::util::persist::SCORER_PERSISTENCE_PRIMARY_NAMESPACE, - lightning::util::persist::SCORER_PERSISTENCE_SECONDARY_NAMESPACE, - lightning::util::persist::SCORER_PERSISTENCE_KEY, - ) - .map_err(|e| { - log_error!( - self.logger, - "Failed to access store while exporting pathfinding scores: {}", - e - ); - Error::PersistenceFailed - }) + KVStoreSync::read( + &*self.kv_store, + lightning::util::persist::SCORER_PERSISTENCE_PRIMARY_NAMESPACE, + lightning::util::persist::SCORER_PERSISTENCE_SECONDARY_NAMESPACE, + lightning::util::persist::SCORER_PERSISTENCE_KEY, + ) + .map_err(|e| { + log_error!( + self.logger, + "Failed to access store while exporting pathfinding scores: {}", + e + ); + Error::PersistenceFailed + }) } } diff --git a/src/payment/asynchronous/static_invoice_store.rs b/src/payment/asynchronous/static_invoice_store.rs index f1aa702a4..d83addbb9 100644 --- a/src/payment/asynchronous/static_invoice_store.rs +++ b/src/payment/asynchronous/static_invoice_store.rs @@ -17,6 +17,7 @@ use bitcoin::hashes::Hash; use lightning::blinded_path::message::BlindedMessagePath; use lightning::impl_writeable_tlv_based; +use lightning::util::persist::KVStoreSync; use lightning::{offers::static_invoice::StaticInvoice, util::ser::Readable, util::ser::Writeable}; use std::sync::{Arc, Mutex}; @@ -77,29 +78,33 @@ impl StaticInvoiceStore { let (secondary_namespace, key) = Self::get_storage_location(invoice_slot, recipient_id); - self.kv_store - .read(STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, &secondary_namespace, &key) - .and_then(|data| { - PersistedStaticInvoice::read(&mut &*data) - .map(|persisted_invoice| { - Some((persisted_invoice.invoice, persisted_invoice.request_path)) - }) - .map_err(|e| { - lightning::io::Error::new( - lightning::io::ErrorKind::InvalidData, - format!("Failed to parse static invoice: {:?}", e), - ) - }) - }) - .or_else( - |e| { - if e.kind() == lightning::io::ErrorKind::NotFound { - Ok(None) - } else { - Err(e) - } - }, - ) + KVStoreSync::read( + &*self.kv_store, + STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, + &secondary_namespace, + &key, + ) + .and_then(|data| { + PersistedStaticInvoice::read(&mut &*data) + .map(|persisted_invoice| { + Some((persisted_invoice.invoice, persisted_invoice.request_path)) + }) + .map_err(|e| { + lightning::io::Error::new( + lightning::io::ErrorKind::InvalidData, + format!("Failed to parse static invoice: {:?}", e), + ) + }) + }) + .or_else( + |e| { + if e.kind() == lightning::io::ErrorKind::NotFound { + Ok(None) + } else { + Err(e) + } + }, + ) } pub(crate) async fn handle_persist_static_invoice( @@ -119,7 +124,13 @@ impl StaticInvoiceStore { // Static invoices will be persisted at "static_invoices//". // // Example: static_invoices/039058c6f2c0cb492c533b0a4d14ef77cc0f78abccced5287d84a1a2011cfb81/00001 - self.kv_store.write(STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, &secondary_namespace, &key, buf) + KVStoreSync::write( + &*self.kv_store, + STATIC_INVOICE_STORE_PRIMARY_NAMESPACE, + &secondary_namespace, + &key, + buf, + ) } fn get_storage_location(invoice_slot: u16, recipient_id: &[u8]) -> (String, String) { diff --git a/src/peer_store.rs b/src/peer_store.rs index cf3755d23..525cee0c4 100644 --- a/src/peer_store.rs +++ b/src/peer_store.rs @@ -14,6 +14,7 @@ use crate::types::DynStore; use crate::{Error, SocketAddress}; use lightning::impl_writeable_tlv_based; +use lightning::util::persist::KVStoreSync; use lightning::util::ser::{Readable, ReadableArgs, Writeable, Writer}; use bitcoin::secp256k1::PublicKey; @@ -68,24 +69,24 @@ where fn persist_peers(&self, locked_peers: &HashMap) -> Result<(), Error> { let data = PeerStoreSerWrapper(&*locked_peers).encode(); - self.kv_store - .write( + KVStoreSync::write( + &*self.kv_store, + PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + PEER_INFO_PERSISTENCE_KEY, + data, + ) + .map_err(|e| { + log_error!( + self.logger, + "Write for key {}/{}/{} failed due to: {}", PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, PEER_INFO_PERSISTENCE_KEY, - data, - ) - .map_err(|e| { - log_error!( - self.logger, - "Write for key {}/{}/{} failed due to: {}", - PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - PEER_INFO_PERSISTENCE_KEY, - e - ); - Error::PersistenceFailed - })?; + e + ); + Error::PersistenceFailed + })?; Ok(()) } } @@ -167,23 +168,23 @@ mod tests { .unwrap(); let address = SocketAddress::from_str("127.0.0.1:9738").unwrap(); let expected_peer_info = PeerInfo { node_id, address }; - assert!(store - .read( - PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - PEER_INFO_PERSISTENCE_KEY, - ) - .is_err()); + assert!(KVStoreSync::read( + &*store, + PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + PEER_INFO_PERSISTENCE_KEY, + ) + .is_err()); peer_store.add_peer(expected_peer_info.clone()).unwrap(); // Check we can read back what we persisted. - let persisted_bytes = store - .read( - PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, - PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, - PEER_INFO_PERSISTENCE_KEY, - ) - .unwrap(); + let persisted_bytes = KVStoreSync::read( + &*store, + PEER_INFO_PERSISTENCE_PRIMARY_NAMESPACE, + PEER_INFO_PERSISTENCE_SECONDARY_NAMESPACE, + PEER_INFO_PERSISTENCE_KEY, + ) + .unwrap(); let deser_peer_store = PeerStore::read(&mut &persisted_bytes[..], (Arc::clone(&store), logger)).unwrap(); diff --git a/src/types.rs b/src/types.rs index 3635badff..eef3a3c8d 100644 --- a/src/types.rs +++ b/src/types.rs @@ -25,8 +25,8 @@ use lightning::routing::gossip; use lightning::routing::router::DefaultRouter; use lightning::routing::scoring::{ProbabilisticScorer, ProbabilisticScoringFeeParameters}; use lightning::sign::InMemorySigner; +use lightning::util::persist::KVStore; use lightning::util::persist::KVStoreSync; -use lightning::util::persist::KVStoreSyncWrapper; use lightning::util::ser::{Readable, Writeable, Writer}; use lightning::util::sweep::OutputSweeper; @@ -41,7 +41,19 @@ use bitcoin::OutPoint; use std::sync::{Arc, Mutex}; -pub(crate) type DynStore = dyn KVStoreSync + Sync + Send; +/// A supertrait that requires that a type implements both [`KVStore`] and [`KVStoreSync`] at the +/// same time. +pub trait SyncAndAsyncKVStore: KVStore + KVStoreSync {} + +impl SyncAndAsyncKVStore for T +where + T: KVStore, + T: KVStoreSync, +{ +} + +/// A type alias for [`SyncAndAsyncKVStore`] with `Sync`/`Send` markers; +pub type DynStore = dyn SyncAndAsyncKVStore + Sync + Send; pub(crate) type ChainMonitor = chainmonitor::ChainMonitor< InMemorySigner, @@ -139,7 +151,7 @@ pub(crate) type Sweeper = OutputSweeper< Arc, Arc, Arc, - KVStoreSyncWrapper>, + Arc, Arc, Arc, >; diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index f2e8407cd..26a2e9b36 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -25,12 +25,11 @@ use ldk_node::payment::{ ConfirmationStatus, PaymentDetails, PaymentDirection, PaymentKind, PaymentStatus, QrPaymentResult, }; -use ldk_node::{Builder, Event, NodeError}; +use ldk_node::{Builder, DynStore, Event, NodeError}; use lightning::ln::channelmanager::PaymentId; use lightning::routing::gossip::{NodeAlias, NodeId}; use lightning::routing::router::RouteParametersConfig; -use lightning::util::persist::KVStoreSync; use lightning_invoice::{Bolt11InvoiceDescription, Description}; use lightning_types::payment::{PaymentHash, PaymentPreimage}; @@ -248,7 +247,7 @@ fn start_stop_reinit() { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); - let test_sync_store: Arc = + let test_sync_store: Arc = Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.clone().into())); let sync_config = EsploraSyncConfig { background_sync_config: None }; From c79caaba13a2af5d41672a4c03f09784b79f86d3 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 25 Sep 2025 09:10:47 +0200 Subject: [PATCH 25/27] Account for `LiquidityManager` persistence We recently implemented persistence for the `lightning-liquidity` service state. Here we make corresponding changes in LDK Node to have our service state persisted. --- Cargo.toml | 24 +++++----- src/builder.rs | 4 +- src/event.rs | 117 +++++++++++++++++++++++++---------------------- src/liquidity.rs | 91 ++++++++++++++++++++---------------- src/types.rs | 1 + 5 files changed, 130 insertions(+), 107 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 7385b5c46..be454febc 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,17 +52,17 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["tokio"] } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994" } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } +lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } +lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["tokio"] } +lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } #lightning = { path = "../rust-lightning/lightning", features = ["std"] } #lightning-types = { path = "../rust-lightning/lightning-types" } @@ -109,7 +109,7 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "3e21ba37a133977d4247e86f25df983b39326994", features = ["std", "_test_utils"] } +lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std", "_test_utils"] } #lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/builder.rs b/src/builder.rs index 70db3382b..a6d834b27 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -1510,6 +1510,7 @@ fn build_with_store_internal( Arc::clone(&channel_manager), Arc::clone(&keys_manager), Arc::clone(&chain_source), + Arc::clone(&kv_store), Arc::clone(&config), Arc::clone(&logger), ); @@ -1543,7 +1544,8 @@ fn build_with_store_internal( liquidity_source_builder.lsps2_service(promise_secret, config.clone()) }); - let liquidity_source = Arc::new(liquidity_source_builder.build()); + let liquidity_source = runtime + .block_on(async move { liquidity_source_builder.build().await.map(Arc::new) })?; let custom_message_handler = Arc::new(NodeCustomMessageHandler::new_liquidity(Arc::clone(&liquidity_source))); (Some(liquidity_source), custom_message_handler) diff --git a/src/event.rs b/src/event.rs index a3ad693c0..18c660e6a 100644 --- a/src/event.rs +++ b/src/event.rs @@ -1048,7 +1048,7 @@ where LdkEvent::ProbeFailed { .. } => {}, LdkEvent::HTLCHandlingFailed { failure_type, .. } => { if let Some(liquidity_source) = self.liquidity_source.as_ref() { - liquidity_source.handle_htlc_handling_failed(failure_type); + liquidity_source.handle_htlc_handling_failed(failure_type).await; } }, LdkEvent::SpendableOutputs { outputs, channel_id } => { @@ -1231,40 +1231,46 @@ where claim_from_onchain_tx, outbound_amount_forwarded_msat, } => { - let read_only_network_graph = self.network_graph.read_only(); - let nodes = read_only_network_graph.nodes(); - let channels = self.channel_manager.list_channels(); - - let node_str = |channel_id: &Option| { - channel_id - .and_then(|channel_id| channels.iter().find(|c| c.channel_id == channel_id)) - .and_then(|channel| { - nodes.get(&NodeId::from_pubkey(&channel.counterparty.node_id)) - }) - .map_or("private_node".to_string(), |node| { - node.announcement_info - .as_ref() - .map_or("unnamed node".to_string(), |ann| { - format!("node {}", ann.alias()) - }) - }) - }; - let channel_str = |channel_id: &Option| { - channel_id - .map(|channel_id| format!(" with channel {}", channel_id)) - .unwrap_or_default() - }; - let from_prev_str = format!( - " from {}{}", - node_str(&prev_channel_id), - channel_str(&prev_channel_id) - ); - let to_next_str = - format!(" to {}{}", node_str(&next_channel_id), channel_str(&next_channel_id)); + { + let read_only_network_graph = self.network_graph.read_only(); + let nodes = read_only_network_graph.nodes(); + let channels = self.channel_manager.list_channels(); + + let node_str = |channel_id: &Option| { + channel_id + .and_then(|channel_id| { + channels.iter().find(|c| c.channel_id == channel_id) + }) + .and_then(|channel| { + nodes.get(&NodeId::from_pubkey(&channel.counterparty.node_id)) + }) + .map_or("private_node".to_string(), |node| { + node.announcement_info + .as_ref() + .map_or("unnamed node".to_string(), |ann| { + format!("node {}", ann.alias()) + }) + }) + }; + let channel_str = |channel_id: &Option| { + channel_id + .map(|channel_id| format!(" with channel {}", channel_id)) + .unwrap_or_default() + }; + let from_prev_str = format!( + " from {}{}", + node_str(&prev_channel_id), + channel_str(&prev_channel_id) + ); + let to_next_str = format!( + " to {}{}", + node_str(&next_channel_id), + channel_str(&next_channel_id) + ); - let fee_earned = total_fee_earned_msat.unwrap_or(0); - if claim_from_onchain_tx { - log_info!( + let fee_earned = total_fee_earned_msat.unwrap_or(0); + if claim_from_onchain_tx { + log_info!( self.logger, "Forwarded payment{}{} of {}msat, earning {}msat in fees from claiming onchain.", from_prev_str, @@ -1272,19 +1278,20 @@ where outbound_amount_forwarded_msat.unwrap_or(0), fee_earned, ); - } else { - log_info!( - self.logger, - "Forwarded payment{}{} of {}msat, earning {}msat in fees.", - from_prev_str, - to_next_str, - outbound_amount_forwarded_msat.unwrap_or(0), - fee_earned, - ); + } else { + log_info!( + self.logger, + "Forwarded payment{}{} of {}msat, earning {}msat in fees.", + from_prev_str, + to_next_str, + outbound_amount_forwarded_msat.unwrap_or(0), + fee_earned, + ); + } } if let Some(liquidity_source) = self.liquidity_source.as_ref() { - liquidity_source.handle_payment_forwarded(next_channel_id); + liquidity_source.handle_payment_forwarded(next_channel_id).await; } let event = Event::PaymentForwarded { @@ -1377,11 +1384,9 @@ where ); if let Some(liquidity_source) = self.liquidity_source.as_ref() { - liquidity_source.handle_channel_ready( - user_channel_id, - &channel_id, - &counterparty_node_id, - ); + liquidity_source + .handle_channel_ready(user_channel_id, &channel_id, &counterparty_node_id) + .await; } let event = Event::ChannelReady { @@ -1430,12 +1435,14 @@ where .. } => { if let Some(liquidity_source) = self.liquidity_source.as_ref() { - liquidity_source.handle_htlc_intercepted( - requested_next_hop_scid, - intercept_id, - expected_outbound_amount_msat, - payment_hash, - ); + liquidity_source + .handle_htlc_intercepted( + requested_next_hop_scid, + intercept_id, + expected_outbound_amount_msat, + payment_hash, + ) + .await; } }, LdkEvent::InvoiceReceived { .. } => { diff --git a/src/liquidity.rs b/src/liquidity.rs index 5d0bf5afe..4d521bf04 100644 --- a/src/liquidity.rs +++ b/src/liquidity.rs @@ -7,11 +7,12 @@ //! Objects related to liquidity management. +use crate::builder::BuildError; use crate::chain::ChainSource; use crate::connection::ConnectionManager; use crate::logger::{log_debug, log_error, log_info, LdkLogger, Logger}; use crate::runtime::Runtime; -use crate::types::{ChannelManager, KeysManager, LiquidityManager, PeerManager, Wallet}; +use crate::types::{ChannelManager, DynStore, KeysManager, LiquidityManager, PeerManager, Wallet}; use crate::{total_anchor_channels_reserve_sats, Config, Error}; use lightning::events::HTLCHandlingFailureType; @@ -147,6 +148,7 @@ where channel_manager: Arc, keys_manager: Arc, chain_source: Arc, + kv_store: Arc, config: Arc, logger: L, } @@ -157,7 +159,7 @@ where { pub(crate) fn new( wallet: Arc, channel_manager: Arc, keys_manager: Arc, - chain_source: Arc, config: Arc, logger: L, + chain_source: Arc, kv_store: Arc, config: Arc, logger: L, ) -> Self { let lsps1_client = None; let lsps2_client = None; @@ -170,6 +172,7 @@ where channel_manager, keys_manager, chain_source, + kv_store, config, logger, } @@ -220,7 +223,7 @@ where self } - pub(crate) fn build(self) -> LiquiditySource { + pub(crate) async fn build(self) -> Result, BuildError> { let liquidity_service_config = self.lsps2_service.as_ref().map(|s| { let lsps2_service_config = Some(s.ldk_service_config.clone()); let lsps5_service_config = None; @@ -237,17 +240,22 @@ where lsps5_client_config, }); - let liquidity_manager = Arc::new(LiquidityManager::new( - Arc::clone(&self.keys_manager), - Arc::clone(&self.keys_manager), - Arc::clone(&self.channel_manager), - Some(Arc::clone(&self.chain_source)), - None, - liquidity_service_config, - liquidity_client_config, - )); + let liquidity_manager = Arc::new( + LiquidityManager::new( + Arc::clone(&self.keys_manager), + Arc::clone(&self.keys_manager), + Arc::clone(&self.channel_manager), + Some(Arc::clone(&self.chain_source)), + None, + Arc::clone(&self.kv_store), + liquidity_service_config, + liquidity_client_config, + ) + .await + .map_err(|_| BuildError::ReadFailed)?, + ); - LiquiditySource { + Ok(LiquiditySource { lsps1_client: self.lsps1_client, lsps2_client: self.lsps2_client, lsps2_service: self.lsps2_service, @@ -258,7 +266,7 @@ where liquidity_manager, config: self.config, logger: self.logger, - } + }) } } @@ -581,14 +589,17 @@ where } } - match lsps2_service_handler.invoice_parameters_generated( - &counterparty_node_id, - request_id, - intercept_scid, - LSPS2_CHANNEL_CLTV_EXPIRY_DELTA, - LSPS2_CLIENT_TRUSTS_LSP_MODE, - user_channel_id, - ) { + match lsps2_service_handler + .invoice_parameters_generated( + &counterparty_node_id, + request_id, + intercept_scid, + LSPS2_CHANNEL_CLTV_EXPIRY_DELTA, + LSPS2_CLIENT_TRUSTS_LSP_MODE, + user_channel_id, + ) + .await + { Ok(()) => {}, Err(e) => { log_error!( @@ -1246,15 +1257,14 @@ where }) } - pub(crate) fn handle_channel_ready( + pub(crate) async fn handle_channel_ready( &self, user_channel_id: u128, channel_id: &ChannelId, counterparty_node_id: &PublicKey, ) { if let Some(lsps2_service_handler) = self.liquidity_manager.lsps2_service_handler() { - if let Err(e) = lsps2_service_handler.channel_ready( - user_channel_id, - channel_id, - counterparty_node_id, - ) { + if let Err(e) = lsps2_service_handler + .channel_ready(user_channel_id, channel_id, counterparty_node_id) + .await + { log_error!( self.logger, "LSPS2 service failed to handle ChannelReady event: {:?}", @@ -1264,17 +1274,20 @@ where } } - pub(crate) fn handle_htlc_intercepted( + pub(crate) async fn handle_htlc_intercepted( &self, intercept_scid: u64, intercept_id: InterceptId, expected_outbound_amount_msat: u64, payment_hash: PaymentHash, ) { if let Some(lsps2_service_handler) = self.liquidity_manager.lsps2_service_handler() { - if let Err(e) = lsps2_service_handler.htlc_intercepted( - intercept_scid, - intercept_id, - expected_outbound_amount_msat, - payment_hash, - ) { + if let Err(e) = lsps2_service_handler + .htlc_intercepted( + intercept_scid, + intercept_id, + expected_outbound_amount_msat, + payment_hash, + ) + .await + { log_error!( self.logger, "LSPS2 service failed to handle HTLCIntercepted event: {:?}", @@ -1284,9 +1297,9 @@ where } } - pub(crate) fn handle_htlc_handling_failed(&self, failure_type: HTLCHandlingFailureType) { + pub(crate) async fn handle_htlc_handling_failed(&self, failure_type: HTLCHandlingFailureType) { if let Some(lsps2_service_handler) = self.liquidity_manager.lsps2_service_handler() { - if let Err(e) = lsps2_service_handler.htlc_handling_failed(failure_type) { + if let Err(e) = lsps2_service_handler.htlc_handling_failed(failure_type).await { log_error!( self.logger, "LSPS2 service failed to handle HTLCHandlingFailed event: {:?}", @@ -1296,10 +1309,10 @@ where } } - pub(crate) fn handle_payment_forwarded(&self, next_channel_id: Option) { + pub(crate) async fn handle_payment_forwarded(&self, next_channel_id: Option) { if let Some(next_channel_id) = next_channel_id { if let Some(lsps2_service_handler) = self.liquidity_manager.lsps2_service_handler() { - if let Err(e) = lsps2_service_handler.payment_forwarded(next_channel_id) { + if let Err(e) = lsps2_service_handler.payment_forwarded(next_channel_id).await { log_error!( self.logger, "LSPS2 service failed to handle PaymentForwarded: {:?}", diff --git a/src/types.rs b/src/types.rs index eef3a3c8d..e911e3889 100644 --- a/src/types.rs +++ b/src/types.rs @@ -81,6 +81,7 @@ pub(crate) type LiquidityManager = lightning_liquidity::LiquidityManager< Arc, Arc, Arc, + Arc, Arc, >; From 7b86727c99f5abb1f0198273af07d9acbe9e3a06 Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 25 Sep 2025 16:19:55 +0200 Subject: [PATCH 26/27] Account for dropped `Arc` for `DefaultTimeProvider` --- src/types.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/types.rs b/src/types.rs index e911e3889..c2a3eda86 100644 --- a/src/types.rs +++ b/src/types.rs @@ -82,7 +82,7 @@ pub(crate) type LiquidityManager = lightning_liquidity::LiquidityManager< Arc, Arc, Arc, - Arc, + DefaultTimeProvider, >; pub(crate) type ChannelManager = lightning::ln::channelmanager::ChannelManager< From 61290975c1d0327a44148301da6860dc35eb017d Mon Sep 17 00:00:00 2001 From: Elias Rohrer Date: Thu, 25 Sep 2025 16:20:22 +0200 Subject: [PATCH 27/27] Account for `lazy` being dropped from `KVStore::remove` --- Cargo.toml | 50 +++++++++++++++++++------------------- src/data_store.rs | 1 - src/io/sqlite_store/mod.rs | 10 ++++---- src/io/test_utils.rs | 4 +-- src/io/vss_store.rs | 10 ++++---- tests/common/mod.rs | 14 +++++------ 6 files changed, 43 insertions(+), 46 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index be454febc..42ffb2190 100755 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,29 +52,29 @@ default = [] #lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } #lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", branch = "main" } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } -lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } -lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } -lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } -lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["tokio"] } -lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } -lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } -lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["rest-client", "rpc-client", "tokio"] } -lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } -lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } - -#lightning = { path = "../rust-lightning/lightning", features = ["std"] } -#lightning-types = { path = "../rust-lightning/lightning-types" } -#lightning-invoice = { path = "../rust-lightning/lightning-invoice", features = ["std"] } -#lightning-net-tokio = { path = "../rust-lightning/lightning-net-tokio" } -#lightning-persister = { path = "../rust-lightning/lightning-persister", features = ["tokio"] } -#lightning-background-processor = { path = "../rust-lightning/lightning-background-processor" } -#lightning-rapid-gossip-sync = { path = "../rust-lightning/lightning-rapid-gossip-sync" } -#lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rest-client", "rpc-client", "tokio"] } -#lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } -#lightning-liquidity = { path = "../rust-lightning/lightning-liquidity", features = ["std"] } -#lightning-macros = { path = "../rust-lightning/lightning-macros" } +#lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } +#lightning-types = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +#lightning-invoice = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std"] } +#lightning-net-tokio = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +#lightning-persister = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["tokio"] } +#lightning-background-processor = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +#lightning-rapid-gossip-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +#lightning-block-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["rest-client", "rpc-client", "tokio"] } +#lightning-transaction-sync = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +#lightning-liquidity = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } +#lightning-macros = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204" } + +lightning = { path = "../rust-lightning/lightning", features = ["std"] } +lightning-types = { path = "../rust-lightning/lightning-types" } +lightning-invoice = { path = "../rust-lightning/lightning-invoice", features = ["std"] } +lightning-net-tokio = { path = "../rust-lightning/lightning-net-tokio" } +lightning-persister = { path = "../rust-lightning/lightning-persister", features = ["tokio"] } +lightning-background-processor = { path = "../rust-lightning/lightning-background-processor" } +lightning-rapid-gossip-sync = { path = "../rust-lightning/lightning-rapid-gossip-sync" } +lightning-block-sync = { path = "../rust-lightning/lightning-block-sync", features = ["rest-client", "rpc-client", "tokio"] } +lightning-transaction-sync = { path = "../rust-lightning/lightning-transaction-sync", features = ["esplora-async-https", "electrum-rustls-ring", "time"] } +lightning-liquidity = { path = "../rust-lightning/lightning-liquidity", features = ["std"] } +lightning-macros = { path = "../rust-lightning/lightning-macros" } bdk_chain = { version = "0.23.0", default-features = false, features = ["std"] } bdk_esplora = { version = "0.22.0", default-features = false, features = ["async-https-rustls", "tokio"]} @@ -109,8 +109,8 @@ winapi = { version = "0.3", features = ["winbase"] } [dev-dependencies] #lightning = { version = "0.1.0", features = ["std", "_test_utils"] } #lightning = { git = "https://github.com/lightningdevkit/rust-lightning", branch="main", features = ["std", "_test_utils"] } -lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std", "_test_utils"] } -#lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } +#lightning = { git = "https://github.com/lightningdevkit/rust-lightning", rev = "d0765847c85f1c3dc753c17c3e05dbcb1d300204", features = ["std", "_test_utils"] } +lightning = { path = "../rust-lightning/lightning", features = ["std", "_test_utils"] } proptest = "1.0.0" regex = "1.5.6" diff --git a/src/data_store.rs b/src/data_store.rs index ba68acd4b..1b08e084c 100644 --- a/src/data_store.rs +++ b/src/data_store.rs @@ -104,7 +104,6 @@ where &self.primary_namespace, &self.secondary_namespace, &store_key, - false, ) .map_err(|e| { log_error!( diff --git a/src/io/sqlite_store/mod.rs b/src/io/sqlite_store/mod.rs index 7d63022f2..8418203e6 100644 --- a/src/io/sqlite_store/mod.rs +++ b/src/io/sqlite_store/mod.rs @@ -101,14 +101,14 @@ impl KVStore for SqliteStore { }) } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> Pin> + Send>> { let primary_namespace = primary_namespace.to_string(); let secondary_namespace = secondary_namespace.to_string(); let key = key.to_string(); let inner = Arc::clone(&self.inner); let fut = tokio::task::spawn_blocking(move || { - inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy) + inner.remove_internal(&primary_namespace, &secondary_namespace, &key) }); Box::pin(async move { fut.await.unwrap_or_else(|e| { @@ -149,9 +149,9 @@ impl KVStoreSync for SqliteStore { } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result<()> { - self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy) + self.inner.remove_internal(primary_namespace, secondary_namespace, key) } fn list(&self, primary_namespace: &str, secondary_namespace: &str) -> io::Result> { @@ -324,7 +324,7 @@ impl SqliteStoreInner { } fn remove_internal( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; diff --git a/src/io/test_utils.rs b/src/io/test_utils.rs index 244dd9cdc..335b1e344 100644 --- a/src/io/test_utils.rs +++ b/src/io/test_utils.rs @@ -57,7 +57,7 @@ pub(crate) fn do_read_write_remove_list_persist( let read_data = kv_store.read(primary_namespace, secondary_namespace, key).unwrap(); assert_eq!(data, &*read_data); - kv_store.remove(primary_namespace, secondary_namespace, key, false).unwrap(); + kv_store.remove(primary_namespace, secondary_namespace, key).unwrap(); let listed_keys = kv_store.list(primary_namespace, secondary_namespace).unwrap(); assert_eq!(listed_keys.len(), 0); @@ -73,7 +73,7 @@ pub(crate) fn do_read_write_remove_list_persist( let read_data = kv_store.read(&max_chars, &max_chars, &max_chars).unwrap(); assert_eq!(data, &*read_data); - kv_store.remove(&max_chars, &max_chars, &max_chars, false).unwrap(); + kv_store.remove(&max_chars, &max_chars, &max_chars).unwrap(); let listed_keys = kv_store.list(&max_chars, &max_chars).unwrap(); assert_eq!(listed_keys.len(), 0); diff --git a/src/io/vss_store.rs b/src/io/vss_store.rs index 881cc68bc..d61a7d127 100644 --- a/src/io/vss_store.rs +++ b/src/io/vss_store.rs @@ -74,9 +74,9 @@ impl KVStoreSync for VssStore { } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result<()> { - let fut = self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy); + let fut = self.inner.remove_internal(primary_namespace, secondary_namespace, key); self.runtime.block_on(fut) } @@ -110,14 +110,14 @@ impl KVStore for VssStore { }) } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> Pin> + Send>> { let primary_namespace = primary_namespace.to_string(); let secondary_namespace = secondary_namespace.to_string(); let key = key.to_string(); let inner = Arc::clone(&self.inner); Box::pin(async move { - inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy).await + inner.remove_internal(&primary_namespace, &secondary_namespace, &key).await }) } fn list( @@ -277,7 +277,7 @@ impl VssStoreInner { } async fn remove_internal( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, _lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> io::Result<()> { check_namespace_key_validity(primary_namespace, secondary_namespace, Some(key), "remove")?; let request = DeleteObjectRequest { diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 4c9acc86f..8158985a8 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -1238,14 +1238,14 @@ impl KVStore for TestSyncStore { }) } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> Pin> + Send>> { let primary_namespace = primary_namespace.to_string(); let secondary_namespace = secondary_namespace.to_string(); let key = key.to_string(); let inner = Arc::clone(&self.inner); let fut = tokio::task::spawn_blocking(move || { - inner.remove_internal(&primary_namespace, &secondary_namespace, &key, lazy) + inner.remove_internal(&primary_namespace, &secondary_namespace, &key) }); Box::pin(async move { fut.await.unwrap_or_else(|e| { @@ -1286,9 +1286,9 @@ impl KVStoreSync for TestSyncStore { } fn remove( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> lightning::io::Result<()> { - self.inner.remove_internal(primary_namespace, secondary_namespace, key, lazy) + self.inner.remove_internal(primary_namespace, secondary_namespace, key) } fn list( @@ -1426,24 +1426,22 @@ impl TestSyncStoreInner { } fn remove_internal( - &self, primary_namespace: &str, secondary_namespace: &str, key: &str, lazy: bool, + &self, primary_namespace: &str, secondary_namespace: &str, key: &str, ) -> lightning::io::Result<()> { let _guard = self.serializer.write().unwrap(); let fs_res = - KVStoreSync::remove(&self.fs_store, primary_namespace, secondary_namespace, key, lazy); + KVStoreSync::remove(&self.fs_store, primary_namespace, secondary_namespace, key); let sqlite_res = KVStoreSync::remove( &self.sqlite_store, primary_namespace, secondary_namespace, key, - lazy, ); let test_res = KVStoreSync::remove( &self.test_store, primary_namespace, secondary_namespace, key, - lazy, ); assert!(!self