diff --git a/.msggen.json b/.msggen.json index 12d622e1812a..33f9655a2c6c 100644 --- a/.msggen.json +++ b/.msggen.json @@ -14921,6 +14921,30 @@ "added": "pre-v0.10.1", "deprecated": "v25.09" }, + "commitment_revocation": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.channel_id": { + "added": "v0.10.2", + "deprecated": null + }, + "commitment_revocation.commitment_txid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.commitnum": { + "added": "v0.10.2", + "deprecated": null + }, + "commitment_revocation.penalty_tx": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "commitment_revocation.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, "connect": { "added": "pre-v0.10.1", "deprecated": null @@ -14965,6 +14989,38 @@ "added": "v24.02", "deprecated": null }, + "custommsg_hook": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.peer_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "custommsg_hook.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.data_version": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "db_write.writes[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, "deprecated_oneshot": { "added": "v24.02", "deprecated": null @@ -15021,232 +15077,780 @@ "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.resolved_time": { + "forward_event.resolved_time": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "forward_event.status": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "forward_event.style": { + "added": "v23.11", + "deprecated": null + }, + "htlc_accepted": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.extra_tlvs": { + "added": "v25.09", + "deprecated": null + }, + "htlc_accepted.failure_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.failure_onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.forward_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.amount_msat": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.htlc.cltv_expiry": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.cltv_expiry_relative": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.extra_tlvs": { + "added": "v25.09", + "deprecated": null + }, + "htlc_accepted.htlc.id": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.htlc.payment_hash": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.htlc.short_channel_id": { + "added": "v0.12.0", + "deprecated": null + }, + "htlc_accepted.invoice_msat": { + "added": "v25.12", + "deprecated": null + }, + "htlc_accepted.onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.forward_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.next_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.next_onion": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.outgoing_cltv_value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payment_metadata": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.payment_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.shared_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.short_channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.total_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.onion.type": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.payload": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.payment_key": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "htlc_accepted.peer_id": { + "added": "v25.12", + "deprecated": null + }, + "htlc_accepted.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_creation.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment.outpoint": { + "added": "v23.11", + "deprecated": null + }, + "invoice_payment.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.failure_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.label": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.payment.preimage": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "invoice_payment_hook.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "log": { + "added": "v24.02", + "deprecated": null + }, + "log.level": { + "added": "v24.02", + "deprecated": null + }, + "log.log": { + "added": "v24.02", + "deprecated": null + }, + "log.source": { + "added": "v24.02", + "deprecated": null + }, + "log.time": { + "added": "v24.02", + "deprecated": null + }, + "log.timestamp": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type.bits[]": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].channel_type.names[]": { + "added": "v24.02", + "deprecated": null + }, + "multifundchannel.channel_ids[].close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.channel_ids[].outnum": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.commitment_feerate": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].amount": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].announce": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].compact_lease": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].mindepth": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].push_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].request_amt": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.destinations[].reserve": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error.code": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].error.message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.failed[].method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.feerate": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.minchannels": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.minconf": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.tx": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.txid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "multifundchannel.utxos[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice_error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.invoice_request": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_path_key": { + "added": "v24.11", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_scid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.first_scid_dir": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[].blinded_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.reply_blindedpath.hops[].encrypted_recipient_data": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[].number": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.onion_message.unknown_fields[].value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice_error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.invoice_request": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.pathsecret": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_path_key": { + "added": "v24.11", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_scid": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.first_scid_dir": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[].blinded_node_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.reply_blindedpath.hops[].encrypted_recipient_data": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[]": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[].number": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.onion_message.unknown_fields[].value": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onion_message_recv_secret.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "onionmessage_forward_fail": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.incoming": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.next_node_id": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.next_short_channel_id_dir": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.outgoing": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.path_key": { + "added": "v24.11", + "deprecated": null + }, + "onionmessage_forward_fail.source": { + "added": "v24.11", + "deprecated": null + }, + "openchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.close_to": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.mindepth": { + "added": "v0.12.0", + "deprecated": null + }, + "openchannel.openchannel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_flags": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_reserve_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "openchannel.openchannel.channel_type": { + "added": "v25.09", + "deprecated": null + }, + "openchannel.openchannel.channel_type.bits[]": { "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.status": { + "openchannel.openchannel.channel_type.names[]": { "added": "pre-v0.10.1", "deprecated": null }, - "forward_event.style": { - "added": "v23.11", + "openchannel.openchannel.dust_limit_msat": { + "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation": { + "openchannel.openchannel.feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.label": { + "openchannel.openchannel.funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.msat": { + "openchannel.openchannel.htlc_minimum_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_creation.preimage": { + "openchannel.openchannel.id": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment": { + "openchannel.openchannel.max_accepted_htlcs": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.label": { + "openchannel.openchannel.max_htlc_value_in_flight_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.msat": { + "openchannel.openchannel.push_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.outpoint": { - "added": "v23.11", + "openchannel.openchannel.shutdown_scriptpubkey": { + "added": "pre-v0.10.1", "deprecated": null }, - "invoice_payment.preimage": { + "openchannel.openchannel.to_self_delay": { "added": "pre-v0.10.1", "deprecated": null }, - "log": { - "added": "v24.02", + "openchannel.reserve": { + "added": "v22.11", "deprecated": null }, - "log.level": { - "added": "v24.02", + "openchannel.result": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.log": { - "added": "v24.02", + "openchannel2": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.source": { - "added": "v24.02", + "openchannel2.close_to": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.time": { - "added": "v24.02", + "openchannel2.error_message": { + "added": "pre-v0.10.1", "deprecated": null }, - "log.timestamp": { - "added": "v24.02", + "openchannel2.openchannel2": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel": { + "openchannel2.openchannel2.channel_flags": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[]": { + "openchannel2.openchannel2.channel_id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_id": { + "openchannel2.openchannel2.channel_max_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type": { + "added": "v25.09", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type.bits[]": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type.bits[]": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].channel_type.names[]": { - "added": "v24.02", + "openchannel2.openchannel2.channel_type.names[]": { + "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].close_to": { + "openchannel2.openchannel2.commitment_feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].id": { + "openchannel2.openchannel2.dust_limit_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.channel_ids[].outnum": { + "openchannel2.openchannel2.feerate_our_max": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.commitment_feerate": { + "openchannel2.openchannel2.feerate_our_min": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[]": { + "openchannel2.openchannel2.funding_feerate_per_kw": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].amount": { + "openchannel2.openchannel2.htlc_minimum_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].announce": { + "openchannel2.openchannel2.id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].close_to": { + "openchannel2.openchannel2.lease_blockheight_start": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].compact_lease": { + "openchannel2.openchannel2.locktime": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].id": { + "openchannel2.openchannel2.max_accepted_htlcs": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].mindepth": { + "openchannel2.openchannel2.max_htlc_value_in_flight_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].push_msat": { + "openchannel2.openchannel2.node_blockheight": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].request_amt": { + "openchannel2.openchannel2.requested_lease_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.destinations[].reserve": { - "added": "pre-v0.10.1", + "openchannel2.openchannel2.require_confirmed_inputs": { + "added": "v23.02", "deprecated": null }, - "multifundchannel.failed[]": { + "openchannel2.openchannel2.shutdown_scriptpubkey": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error": { + "openchannel2.openchannel2.their_funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error.code": { + "openchannel2.openchannel2.to_self_delay": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].error.message": { + "openchannel2.our_funding_msat": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].id": { + "openchannel2.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.failed[].method": { + "openchannel2.result": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.feerate": { + "openchannel2_changed": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.minchannels": { + "openchannel2_changed.openchannel2_changed": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.minconf": { + "openchannel2_changed.openchannel2_changed.channel_id": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.tx": { + "openchannel2_changed.openchannel2_changed.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "multifundchannel.txid": { - "added": "pre-v0.10.1", + "openchannel2_changed.openchannel2_changed.require_confirmed_inputs": { + "added": "v23.02", "deprecated": null }, - "multifundchannel.utxos[]": { + "openchannel2_changed.psbt": { "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail": { - "added": "v24.11", + "openchannel2_changed.result": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.incoming": { - "added": "v24.11", + "openchannel2_sign": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.next_node_id": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.next_short_channel_id_dir": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign.channel_id": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.outgoing": { - "added": "v24.11", + "openchannel2_sign.openchannel2_sign.psbt": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.path_key": { - "added": "v24.11", + "openchannel2_sign.psbt": { + "added": "pre-v0.10.1", "deprecated": null }, - "onionmessage_forward_fail.source": { - "added": "v24.11", + "openchannel2_sign.result": { + "added": "pre-v0.10.1", "deprecated": null }, "openchannel_peer_sigs": { @@ -15357,6 +15961,42 @@ "added": "v25.09", "deprecated": null }, + "peer_connected": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.addr": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.direction": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.features": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.peer.remote_addr": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "peer_connected.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, "plugin_started": { "added": "v25.02", "deprecated": null @@ -15389,6 +16029,154 @@ "added": "v25.02", "deprecated": null }, + "rbf_channel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.error_message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.our_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.psbt": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.channel_id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.channel_max_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.feerate_our_max": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.feerate_our_min": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.funding_feerate_per_kw": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.locktime": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.our_last_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.requested_lease_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.require_confirmed_inputs": { + "added": "v23.02", + "deprecated": null + }, + "rbf_channel.rbf_channel.their_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.rbf_channel.their_last_funding_msat": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rbf_channel.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "recover_hook": { + "added": "v23.08", + "deprecated": null + }, + "recover_hook.codex32": { + "added": "v23.08", + "deprecated": null + }, + "recover_hook.result": { + "added": "v23.08", + "deprecated": null + }, + "rpc_command": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.jsonrpc": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.replace.params": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error.code": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.error.message": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.return.result": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.id": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.method": { + "added": "pre-v0.10.1", + "deprecated": null + }, + "rpc_command.rpc_command.params": { + "added": "pre-v0.10.1", + "deprecated": null + }, "sendpay_failure": { "added": "pre-v0.10.1", "deprecated": null @@ -15613,5 +16401,63 @@ "added": "v24.02", "deprecated": null } + }, + "rpc-only-enum-map": { + "CommitmentRevocationResult": { + "continue": 0 + }, + "CustommsgHookResult": { + "continue": 0 + }, + "DbWriteResult": { + "continue": 0 + }, + "HtlcAcceptedOnionType": { + "tlv": 0 + }, + "HtlcAcceptedResult": { + "continue": 0, + "fail": 1, + "resolve": 2 + }, + "InvoicePaymentHookResult": { + "continue": 0, + "reject": 1 + }, + "Openchannel2ChangedResult": { + "continue": 0 + }, + "Openchannel2Result": { + "continue": 0, + "reject": 1 + }, + "Openchannel2SignResult": { + "continue": 0 + }, + "OpenchannelResult": { + "continue": 0, + "reject": 1 + }, + "PeerConnectedPeerDirection": { + "in": 0, + "out": 1 + }, + "PeerConnectedResult": { + "continue": 0, + "disconnect": 1 + }, + "RbfChannelResult": { + "continue": 0, + "reject": 1 + }, + "RecoverHookResult": { + "continue": 0 + }, + "RpcCommandReplaceJsonrpc": { + "2.0": 0 + }, + "RpcCommandResult": { + "continue": 0 + } } } \ No newline at end of file diff --git a/cln-grpc/Cargo.toml b/cln-grpc/Cargo.toml index 133106079c77..c43039441244 100644 --- a/cln-grpc/Cargo.toml +++ b/cln-grpc/Cargo.toml @@ -27,7 +27,6 @@ tokio = { version = "1.36.0", features = ["sync"] } futures-core = "0.3.30" tokio-util = "0.7.10" -[dev-dependencies] serde_json = "1.0.72" [build-dependencies] diff --git a/cln-grpc/proto/primitives.proto b/cln-grpc/proto/primitives.proto index fa62fdcfd732..b91d52926633 100644 --- a/cln-grpc/proto/primitives.proto +++ b/cln-grpc/proto/primitives.proto @@ -147,3 +147,40 @@ enum PluginSubcommand { STARTDIR = 3; LIST = 4; } + +message JsonObjectOrArray { + oneof structure { + JsonObject object = 1; + JsonArray array = 2; + } +} + +message JsonObject { + map fields = 1; +} + +message JsonArray { + repeated JsonValue values = 1; +} + +message JsonValue { + oneof kind { + bool bool_value = 1; + int64 int_value = 2; + uint64 uint_value = 3; + double double_value = 4; + string string_value = 5; + JsonArray array = 6; + JsonObject object = 7; + } +} + +message JsonScalar { + oneof scalar { + bool bool_value = 1; + int64 int_value = 2; + uint64 uint_value = 3; + double double_value = 4; + string string_value = 5; + } +} diff --git a/cln-grpc/src/pb.rs b/cln-grpc/src/pb.rs index b518703bc37a..a1935cf6b1a4 100644 --- a/cln-grpc/src/pb.rs +++ b/cln-grpc/src/pb.rs @@ -10,7 +10,8 @@ mod convert { use cln_rpc::primitives::{ Amount as JAmount, AmountOrAll as JAmountOrAll, AmountOrAny as JAmountOrAny, - Feerate as JFeerate, Outpoint as JOutpoint, OutputDesc as JOutputDesc, + Feerate as JFeerate, JsonObjectOrArray as JJsonObjectOrArray, JsonScalar as JJsonScalar, + Outpoint as JOutpoint, OutputDesc as JOutputDesc, }; impl From for Amount { @@ -281,6 +282,149 @@ mod convert { } } + impl From for JsonValue { + fn from(v: serde_json::Value) -> Self { + let kind = match v { + serde_json::Value::Null => None, + serde_json::Value::Bool(b) => Some(json_value::Kind::BoolValue(b)), + serde_json::Value::Number(n) => { + if let Some(u) = n.as_u64() { + Some(json_value::Kind::UintValue(u)) + } else if let Some(i) = n.as_i64() { + Some(json_value::Kind::IntValue(i)) + } else if let Some(f) = n.as_f64() { + Some(json_value::Kind::DoubleValue(f)) + } else { + let error = format!("Failed to parse number: `{}`", n); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + } + serde_json::Value::String(s) => Some(json_value::Kind::StringValue(s)), + serde_json::Value::Array(arr) => Some(json_value::Kind::Array(JsonArray { + values: arr.into_iter().map(JsonValue::from).collect(), + })), + serde_json::Value::Object(obj) => Some(json_value::Kind::Object(JsonObject { + fields: obj + .into_iter() + .map(|(k, v)| (k, JsonValue::from(v))) + .collect(), + })), + }; + JsonValue { kind } + } + } + + impl From for JsonObjectOrArray { + fn from(v: JJsonObjectOrArray) -> Self { + let structure = match v { + JJsonObjectOrArray::Array(arr) => { + Some(json_object_or_array::Structure::Array(JsonArray { + values: arr.into_iter().map(JsonValue::from).collect(), + })) + } + JJsonObjectOrArray::Object(obj) => { + Some(json_object_or_array::Structure::Object(JsonObject { + fields: obj + .into_iter() + .map(|(k, v)| (k, JsonValue::from(v))) + .collect(), + })) + } + }; + JsonObjectOrArray { structure } + } + } + + impl From for serde_json::Value { + fn from(v: JsonValue) -> Self { + match v.kind { + None => serde_json::Value::Null, + Some(json_value::Kind::BoolValue(b)) => serde_json::Value::Bool(b), + Some(json_value::Kind::UintValue(u)) => serde_json::Value::Number(u.into()), + Some(json_value::Kind::IntValue(i)) => serde_json::Value::Number(i.into()), + Some(json_value::Kind::DoubleValue(f)) => match serde_json::Number::from_f64(f) { + Some(num) => serde_json::Value::Number(num), + None => { + let error = format!("Failed to parse number: `{}`", f); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + }, + Some(json_value::Kind::StringValue(s)) => serde_json::Value::String(s), + Some(json_value::Kind::Array(arr)) => serde_json::Value::Array( + arr.values + .into_iter() + .map(serde_json::Value::from) + .collect(), + ), + Some(json_value::Kind::Object(obj)) => serde_json::Value::Object( + obj.fields + .into_iter() + .map(|(k, v)| (k, serde_json::Value::from(v))) + .collect(), + ), + } + } + } + + impl From for JJsonObjectOrArray { + fn from(v: JsonObjectOrArray) -> Self { + match v.structure { + Some(json_object_or_array::Structure::Array(arr)) => JJsonObjectOrArray::Array( + arr.values + .into_iter() + .map(serde_json::Value::from) + .collect(), + ), + Some(json_object_or_array::Structure::Object(obj)) => JJsonObjectOrArray::Object( + obj.fields + .into_iter() + .map(|(k, v)| (k, serde_json::Value::from(v))) + .collect(), + ), + None => JJsonObjectOrArray::Array(vec![]), // or handle as error + } + } + } + + impl From for JJsonScalar { + fn from(v: JsonScalar) -> Self { + match v.scalar { + None => JJsonScalar::Null, + Some(json_scalar::Scalar::BoolValue(b)) => JJsonScalar::Bool(b), + Some(json_scalar::Scalar::IntValue(i)) => JJsonScalar::Number(i.into()), + Some(json_scalar::Scalar::DoubleValue(d)) => { + match serde_json::Number::from_f64(d) { + Some(num) => JJsonScalar::Number(num), + None => { + let error = format!("Failed to parse number: `{}`", d); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message": error}}) + ); + std::process::exit(1); + } + } + } + Some(json_scalar::Scalar::UintValue(u)) => JJsonScalar::Number(u.into()), + Some(json_scalar::Scalar::StringValue(s)) => JJsonScalar::String(s), + } + } + } + #[cfg(test)] mod test { use super::*; diff --git a/cln-rpc/Makefile b/cln-rpc/Makefile index 808dd71cca13..342685b6509a 100644 --- a/cln-rpc/Makefile +++ b/cln-rpc/Makefile @@ -2,7 +2,7 @@ cln-rpc-wrongdir: $(MAKE) -C .. cln-rpc-all CLN_RPC_EXAMPLES := target/${RUST_PROFILE}/examples/cln-rpc-getinfo -CLN_RPC_GENALL = cln-rpc/src/model.rs cln-rpc/src/notifications.rs +CLN_RPC_GENALL = cln-rpc/src/model.rs cln-rpc/src/notifications.rs cln-rpc/src/hooks.rs CLN_RPC_SOURCES = $(shell find cln-rpc -name *.rs) ${CLN_RPC_GENALL} DEFAULT_TARGETS += $(CLN_RPC_EXAMPLES) $(CLN_RPC_GENALL) diff --git a/cln-rpc/src/hooks.rs b/cln-rpc/src/hooks.rs new file mode 100644 index 000000000000..61dc899b5e34 --- /dev/null +++ b/cln-rpc/src/hooks.rs @@ -0,0 +1,978 @@ +// This file is autogenerated by `msggen` +// Do not edit it manually, your changes will be overwritten + + + +use serde::{Serialize, Deserialize}; +#[derive(Clone, Debug, Deserialize, Serialize)] +pub enum Hook { + #[serde(rename = "peer_connected")] + PeerConnected(events::PeerConnectedEvent), + #[serde(rename = "recover_hook")] + RecoverHook(events::RecoverHookEvent), + #[serde(rename = "commitment_revocation")] + CommitmentRevocation(events::CommitmentRevocationEvent), + #[serde(rename = "db_write")] + DbWrite(events::DbWriteEvent), + #[serde(rename = "invoice_payment_hook")] + InvoicePaymentHook(events::InvoicePaymentHookEvent), + #[serde(rename = "openchannel")] + Openchannel(events::OpenchannelEvent), + #[serde(rename = "openchannel2")] + Openchannel2(events::Openchannel2Event), + #[serde(rename = "openchannel2_changed")] + Openchannel2Changed(events::Openchannel2ChangedEvent), + #[serde(rename = "openchannel2_sign")] + Openchannel2Sign(events::Openchannel2SignEvent), + #[serde(rename = "rbf_channel")] + RbfChannel(events::RbfChannelEvent), + #[serde(rename = "htlc_accepted")] + HtlcAccepted(events::HtlcAcceptedEvent), + #[serde(rename = "rpc_command")] + RpcCommand(events::RpcCommandEvent), + #[serde(rename = "custommsg_hook")] + CustommsgHook(events::CustommsgHookEvent), + #[serde(rename = "onion_message_recv")] + OnionMessageRecv(events::OnionMessageRecvEvent), + #[serde(rename = "onion_message_recv_secret")] + OnionMessageRecvSecret(events::OnionMessageRecvSecretEvent), +} + + +pub mod events{ + use crate::primitives::*; + use serde::{Serialize, Deserialize}; + + /// ['Connection direction: `in` for incoming, `out` for outgoing.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum PeerConnectedPeerDirection { + #[serde(rename = "in")] + IN = 0, + #[serde(rename = "out")] + OUT = 1, + } + + impl TryFrom for PeerConnectedPeerDirection { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(PeerConnectedPeerDirection::IN), + 1 => Ok(PeerConnectedPeerDirection::OUT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum PeerConnectedPeerDirection", o)), + } + } + } + + impl ToString for PeerConnectedPeerDirection { + fn to_string(&self) -> String { + match self { + PeerConnectedPeerDirection::IN => "IN", + PeerConnectedPeerDirection::OUT => "OUT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedPeer { + #[serde(skip_serializing_if = "Option::is_none")] + pub remote_addr: Option, + // Path `peer_connected.peer.direction` + pub direction: PeerConnectedPeerDirection, + pub addr: String, + pub features: String, + pub id: PublicKey, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedEvent { + pub peer: PeerConnectedPeer, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RecoverHookEvent { + pub codex32: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CommitmentRevocationEvent { + pub channel_id: Sha256, + pub commitment_txid: String, + pub commitnum: u64, + pub penalty_tx: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct DbWriteEvent { + pub data_version: u32, + pub writes: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookPayment { + pub label: String, + pub msat: Amount, + pub preimage: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookEvent { + pub payment: InvoicePaymentHookPayment, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelOpenchannelChannelType { + pub bits: Vec, + pub names: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelOpenchannel { + #[serde(skip_serializing_if = "Option::is_none")] + pub channel_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub shutdown_scriptpubkey: Option, + pub channel_flags: u8, + pub channel_reserve_msat: Amount, + pub dust_limit_msat: Amount, + pub feerate_per_kw: u32, + pub funding_msat: Amount, + pub htlc_minimum_msat: Amount, + pub id: PublicKey, + pub max_accepted_htlcs: u32, + pub max_htlc_value_in_flight_msat: Amount, + pub push_msat: Amount, + pub to_self_delay: u32, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelEvent { + pub openchannel: OpenchannelOpenchannel, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Openchannel2ChannelType { + pub bits: Vec, + pub names: Vec, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Openchannel2 { + #[serde(skip_serializing_if = "Option::is_none")] + pub channel_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub lease_blockheight_start: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub node_blockheight: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub requested_lease_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub shutdown_scriptpubkey: Option, + pub channel_flags: u8, + pub channel_id: Sha256, + pub channel_max_msat: Amount, + pub commitment_feerate_per_kw: u32, + pub dust_limit_msat: Amount, + pub feerate_our_max: u32, + pub feerate_our_min: u32, + pub funding_feerate_per_kw: u32, + pub htlc_minimum_msat: Amount, + pub id: PublicKey, + pub locktime: u32, + pub max_accepted_htlcs: u16, + pub max_htlc_value_in_flight_msat: Amount, + pub require_confirmed_inputs: bool, + pub their_funding_msat: Amount, + pub to_self_delay: u16, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Event { + pub openchannel2: Openchannel2Openchannel2, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedOpenchannel2Changed { + pub channel_id: Sha256, + pub psbt: String, + pub require_confirmed_inputs: bool, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedEvent { + pub openchannel2_changed: Openchannel2ChangedOpenchannel2Changed, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignOpenchannel2Sign { + pub channel_id: Sha256, + pub psbt: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignEvent { + pub openchannel2_sign: Openchannel2SignOpenchannel2Sign, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelRbfChannel { + #[serde(skip_serializing_if = "Option::is_none")] + pub requested_lease_msat: Option, + pub channel_id: Sha256, + pub channel_max_msat: Amount, + pub feerate_our_max: u32, + pub feerate_our_min: u32, + pub funding_feerate_per_kw: u32, + pub id: PublicKey, + pub locktime: u32, + pub our_last_funding_msat: Amount, + pub require_confirmed_inputs: bool, + pub their_funding_msat: Amount, + pub their_last_funding_msat: Amount, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelEvent { + pub rbf_channel: RbfChannelRbfChannel, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedHtlc { + #[serde(skip_serializing_if = "Option::is_none")] + pub extra_tlvs: Option, + pub amount_msat: Amount, + pub cltv_expiry: u32, + pub cltv_expiry_relative: u32, + pub id: u64, + pub payment_hash: Sha256, + pub short_channel_id: ShortChannelId, + } + + /// ['Indicates that the payload is TLV formatted.', 'Only present if the payload was successfully parsed.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum HtlcAcceptedOnionType { + #[serde(rename = "tlv")] + TLV = 0, + } + + impl TryFrom for HtlcAcceptedOnionType { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(HtlcAcceptedOnionType::TLV), + o => Err(anyhow::anyhow!("Unknown variant {} for enum HtlcAcceptedOnionType", o)), + } + } + } + + impl ToString for HtlcAcceptedOnionType { + fn to_string(&self) -> String { + match self { + HtlcAcceptedOnionType::TLV => "TLV", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedOnion { + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub item_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub next_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub outgoing_cltv_value: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_metadata: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_secret: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub short_channel_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub total_msat: Option, + pub next_onion: String, + pub payload: String, + pub shared_secret: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedEvent { + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub peer_id: Option, + pub htlc: HtlcAcceptedHtlc, + pub onion: HtlcAcceptedOnion, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandRpcCommand { + pub id: JsonScalar, + pub method: String, + pub params: JsonObjectOrArray, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandEvent { + pub rpc_command: RpcCommandRpcCommand, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CustommsgHookEvent { + pub payload: String, + pub peer_id: PublicKey, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageReplyBlindedpathHops { + pub blinded_node_id: PublicKey, + pub encrypted_recipient_data: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageReplyBlindedpath { + #[serde(skip_serializing_if = "Option::is_none")] + pub first_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_path_key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid_dir: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub hops: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessageUnknownFields { + pub number: u64, + pub value: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvOnionMessage { + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_request: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reply_blindedpath: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub unknown_fields: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvEvent { + pub onion_message: OnionMessageRecvOnionMessage, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageReplyBlindedpathHops { + pub blinded_node_id: PublicKey, + pub encrypted_recipient_data: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageReplyBlindedpath { + #[serde(skip_serializing_if = "Option::is_none")] + pub first_node_id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_path_key: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub first_scid_dir: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub hops: Option>, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessageUnknownFields { + pub number: u64, + pub value: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretOnionMessage { + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_request: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reply_blindedpath: Option, + #[serde(skip_serializing_if = "crate::is_none_or_empty")] + pub unknown_fields: Option>, + pub pathsecret: Secret, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretEvent { + pub onion_message: OnionMessageRecvSecretOnionMessage, + } + +} +pub mod actions{ + use crate::primitives::*; + use serde::{Serialize, Deserialize}; + + /// ['Whether to allow the connection to proceed or disconnect the peer.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum PeerConnectedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "disconnect")] + DISCONNECT = 1, + } + + impl TryFrom for PeerConnectedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(PeerConnectedResult::CONTINUE), + 1 => Ok(PeerConnectedResult::DISCONNECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum PeerConnectedResult", o)), + } + } + } + + impl ToString for PeerConnectedResult { + fn to_string(&self) -> String { + match self { + PeerConnectedResult::CONTINUE => "CONTINUE", + PeerConnectedResult::DISCONNECT => "DISCONNECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct PeerConnectedAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + // Path `peer_connected.result` + pub result: PeerConnectedResult, + } + + /// ['Returning "continue" resumes normal execution.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RecoverHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for RecoverHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RecoverHookResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RecoverHookResult", o)), + } + } + } + + impl ToString for RecoverHookResult { + fn to_string(&self) -> String { + match self { + RecoverHookResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RecoverHookAction { + // Path `recover_hook.result` + pub result: RecoverHookResult, + } + + /// ['Plugins should always return "continue", otherwise subsequent hook subscribers would not get called.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum CommitmentRevocationResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for CommitmentRevocationResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(CommitmentRevocationResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum CommitmentRevocationResult", o)), + } + } + } + + impl ToString for CommitmentRevocationResult { + fn to_string(&self) -> String { + match self { + CommitmentRevocationResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CommitmentRevocationAction { + // Path `commitment_revocation.result` + pub result: CommitmentRevocationResult, + } + + /// ['Must be "continue" for the database commit to proceed.', 'Any other value will abort the commit and cause `lightningd` to error.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum DbWriteResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for DbWriteResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(DbWriteResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum DbWriteResult", o)), + } + } + } + + impl ToString for DbWriteResult { + fn to_string(&self) -> String { + match self { + DbWriteResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct DbWriteAction { + // Path `db_write.result` + pub result: DbWriteResult, + } + + /// ['Controls whether the payment is accepted or rejected.', '"continue" accepts the payment.', '"reject" fails the payment.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum InvoicePaymentHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for InvoicePaymentHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(InvoicePaymentHookResult::CONTINUE), + 1 => Ok(InvoicePaymentHookResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum InvoicePaymentHookResult", o)), + } + } + } + + impl ToString for InvoicePaymentHookResult { + fn to_string(&self) -> String { + match self { + InvoicePaymentHookResult::CONTINUE => "CONTINUE", + InvoicePaymentHookResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct InvoicePaymentHookAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_message: Option, + // Path `invoice_payment_hook.result` + pub result: InvoicePaymentHookResult, + } + + /// ['Whether to accept or reject the channel opening request.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum OpenchannelResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for OpenchannelResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(OpenchannelResult::CONTINUE), + 1 => Ok(OpenchannelResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum OpenchannelResult", o)), + } + } + } + + impl ToString for OpenchannelResult { + fn to_string(&self) -> String { + match self { + OpenchannelResult::CONTINUE => "CONTINUE", + OpenchannelResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OpenchannelAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub close_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub mindepth: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub reserve: Option, + // Path `openchannel.result` + pub result: OpenchannelResult, + } + + /// ['Indicates whether to accept or reject the channel proposal.', 'Returning "continue" allows the channel negotiation to proceed.', 'Returning "reject" aborts the channel opening.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2Result { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for Openchannel2Result { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2Result::CONTINUE), + 1 => Ok(Openchannel2Result::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2Result", o)), + } + } + } + + impl ToString for Openchannel2Result { + fn to_string(&self) -> String { + match self { + Openchannel2Result::CONTINUE => "CONTINUE", + Openchannel2Result::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2Action { + #[serde(skip_serializing_if = "Option::is_none")] + pub close_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub our_funding_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub psbt: Option, + // Path `openchannel2.result` + pub result: Openchannel2Result, + } + + /// ['Must be set to `continue` to proceed with the channel opening negotiation.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2ChangedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for Openchannel2ChangedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2ChangedResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2ChangedResult", o)), + } + } + } + + impl ToString for Openchannel2ChangedResult { + fn to_string(&self) -> String { + match self { + Openchannel2ChangedResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2ChangedAction { + // Path `openchannel2_changed.result` + pub result: Openchannel2ChangedResult, + pub psbt: String, + } + + /// ['Must be set to `continue` to proceed with channel opening.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum Openchannel2SignResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for Openchannel2SignResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(Openchannel2SignResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum Openchannel2SignResult", o)), + } + } + } + + impl ToString for Openchannel2SignResult { + fn to_string(&self) -> String { + match self { + Openchannel2SignResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct Openchannel2SignAction { + // Path `openchannel2_sign.result` + pub result: Openchannel2SignResult, + pub psbt: String, + } + + /// ['Whether to accept or reject the RBF proposal.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RbfChannelResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "reject")] + REJECT = 1, + } + + impl TryFrom for RbfChannelResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RbfChannelResult::CONTINUE), + 1 => Ok(RbfChannelResult::REJECT), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RbfChannelResult", o)), + } + } + } + + impl ToString for RbfChannelResult { + fn to_string(&self) -> String { + match self { + RbfChannelResult::CONTINUE => "CONTINUE", + RbfChannelResult::REJECT => "REJECT", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RbfChannelAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub error_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub our_funding_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub psbt: Option, + // Path `rbf_channel.result` + pub result: RbfChannelResult, + } + + /// ['Determines how the HTLC should be handled.', '', '`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,', "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", '', 'It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,', 'so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;', "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", '', 'It can also specify forward_to in the response, replacing the destination.', 'This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.', '', 'Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.', '', 'If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.', '', '', '`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).', '', 'Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).', "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", 'returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.', '', '', '`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.', 'Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum HtlcAcceptedResult { + #[serde(rename = "continue")] + CONTINUE = 0, + #[serde(rename = "fail")] + FAIL = 1, + #[serde(rename = "resolve")] + RESOLVE = 2, + } + + impl TryFrom for HtlcAcceptedResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(HtlcAcceptedResult::CONTINUE), + 1 => Ok(HtlcAcceptedResult::FAIL), + 2 => Ok(HtlcAcceptedResult::RESOLVE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum HtlcAcceptedResult", o)), + } + } + } + + impl ToString for HtlcAcceptedResult { + fn to_string(&self) -> String { + match self { + HtlcAcceptedResult::CONTINUE => "CONTINUE", + HtlcAcceptedResult::FAIL => "FAIL", + HtlcAcceptedResult::RESOLVE => "RESOLVE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct HtlcAcceptedAction { + #[serde(skip_serializing_if = "Option::is_none")] + pub extra_tlvs: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_message: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub failure_onion: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub forward_to: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub invoice_msat: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payload: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub payment_key: Option, + // Path `htlc_accepted.result` + pub result: HtlcAcceptedResult, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturnError { + pub code: i64, + pub message: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturnResult { + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReturn { + #[serde(skip_serializing_if = "Option::is_none")] + pub error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + } + + /// ['The JSON-RPC version.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RpcCommandReplaceJsonrpc { + #[serde(rename = "2.0")] + NUM_2_0 = 0, + } + + impl TryFrom for RpcCommandReplaceJsonrpc { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RpcCommandReplaceJsonrpc::NUM_2_0), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RpcCommandReplaceJsonrpc", o)), + } + } + } + + impl ToString for RpcCommandReplaceJsonrpc { + fn to_string(&self) -> String { + match self { + RpcCommandReplaceJsonrpc::NUM_2_0 => "NUM_2_0", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandReplace { + // Path `rpc_command.replace.jsonrpc` + pub jsonrpc: RpcCommandReplaceJsonrpc, + pub id: JsonScalar, + pub method: String, + pub params: JsonObjectOrArray, + } + + /// ['Indicates that lightningd should continue processing the RPC command normally.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum RpcCommandResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for RpcCommandResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(RpcCommandResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum RpcCommandResult", o)), + } + } + } + + impl ToString for RpcCommandResult { + fn to_string(&self) -> String { + match self { + RpcCommandResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct RpcCommandAction { + #[serde(rename = "return")] + #[serde(skip_serializing_if = "Option::is_none")] + pub return_: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub replace: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub result: Option, + } + + /// ['Must always be `continue`. Any other value will cause the hook to fail.'] + #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] + #[allow(non_camel_case_types)] + pub enum CustommsgHookResult { + #[serde(rename = "continue")] + CONTINUE = 0, + } + + impl TryFrom for CustommsgHookResult { + type Error = anyhow::Error; + fn try_from(c: i32) -> Result { + match c { + 0 => Ok(CustommsgHookResult::CONTINUE), + o => Err(anyhow::anyhow!("Unknown variant {} for enum CustommsgHookResult", o)), + } + } + } + + impl ToString for CustommsgHookResult { + fn to_string(&self) -> String { + match self { + CustommsgHookResult::CONTINUE => "CONTINUE", + }.to_string() + } + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct CustommsgHookAction { + // Path `custommsg_hook.result` + pub result: CustommsgHookResult, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvAction { + pub result: String, + } + + #[derive(Clone, Debug, Deserialize, Serialize)] + pub struct OnionMessageRecvSecretAction { + pub result: String, + } + +} diff --git a/cln-rpc/src/lib.rs b/cln-rpc/src/lib.rs index 31c54622208c..9ca81b46feb7 100644 --- a/cln-rpc/src/lib.rs +++ b/cln-rpc/src/lib.rs @@ -91,11 +91,15 @@ use tokio::net::UnixStream; use tokio_util::codec::{FramedRead, FramedWrite}; pub mod codec; +pub mod hooks; pub mod jsonrpc; pub mod model; pub mod notifications; pub mod primitives; +#[cfg(test)] +mod test; + pub use crate::model::TypedRequest; pub use crate::{ model::{Request, Response}, @@ -328,291 +332,3 @@ where { f.as_ref().map_or(true, |value| value.is_empty()) } - -#[cfg(test)] -mod test { - use self::notifications::{BlockAddedNotification, CustomMsgNotification}; - - use super::*; - use crate::model::*; - use crate::primitives::PublicKey; - use futures_util::StreamExt; - use serde_json::json; - use std::str::FromStr; - use tokio_util::codec::{Framed, FramedRead}; - - #[tokio::test] - async fn call_raw_request() { - // Set up a pair of unix-streams - // The frame is a mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // Define the request and response send in the RPC-message - let rpc_request = serde_json::json!({ - "id" : 1, - "jsonrpc" : "2.0", - "params" : {}, - "method" : "some_method" - }); - let rpc_request2 = rpc_request.clone(); - - let rpc_response = serde_json::json!({ - "jsonrpc" : "2.0", - "id" : "1", - "result" : {"field_6" : 6} - }); - - // Spawn the task that performs the RPC-call - // Check that it reads the response correctly - let handle = tokio::task::spawn(async move { cln.call_raw_request(rpc_request2).await }); - - // Verify that our emulated server received a request - // and sendt the response - let read_req = dbg!(frame.next().await.unwrap().unwrap()); - assert_eq!(&rpc_request, &read_req); - frame.send(rpc_response).await.unwrap(); - - // Get the result from `call_raw_request` and verify - let actual_response: Result = handle.await.unwrap(); - let actual_response = actual_response.unwrap(); - assert_eq!(actual_response, json!({"field_6" : 6})); - } - - #[tokio::test] - async fn call_raw() { - let req = serde_json::json!({}); - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - - let mut read = FramedRead::new(uds2, JsonCodec::default()); - tokio::task::spawn(async move { - let _: serde_json::Value = cln.call_raw("getinfo", &req).await.unwrap(); - }); - - let read_req = dbg!(read.next().await.unwrap().unwrap()); - - assert_eq!( - json!({"id": 1, "method": "getinfo", "params": {}, "jsonrpc": "2.0"}), - read_req - ); - } - - #[tokio::test] - async fn test_call_enum_remote_error() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // Construct the request and response - let req = Request::Ping(requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }); - - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "error" : { - "code" : 666, - "message" : "MOCK_ERROR" - } - }); - - // Spawn the task which calls the rpc - let handle = tokio::task::spawn(async move { cln.call(req).await }); - - // Ensure the mock receives the request and returns a response - let _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(mock_resp).await.unwrap(); - - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - let rpc_error: RpcError = rpc_response.unwrap_err(); - - println!("RPC_ERROR : {:?}", rpc_error); - assert_eq!(rpc_error.code.unwrap(), 666); - assert_eq!(rpc_error.message, "MOCK_ERROR"); - } - - #[tokio::test] - async fn test_call_enum() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // We'll use the Ping request here because both the request - // and response have few arguments - let req = Request::Ping(requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }); - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "result" : { "totlen" : 123 } - }); - - // we create a task that sends the response and returns the response - let handle = tokio::task::spawn(async move { cln.call(req).await }); - - // Ensure our mock receives the request and sends the response - let read_req = dbg!(frame.next().await.unwrap().unwrap()); - assert_eq!( - read_req, - json!({"id" : 1, "jsonrpc" : "2.0", "method" : "ping", "params" : {"id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b"}}) - ); - frame.send(mock_resp).await.unwrap(); - - // Verify that the error response is correct - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - match rpc_response.unwrap() { - Response::Ping(ping) => { - assert_eq!(ping.totlen, 123); - } - _ => panic!("A Request::Getinfo should return Response::Getinfo"), - } - } - - #[tokio::test] - async fn test_call_typed() { - // Set up the rpc-connection - // The frame represents a Mock rpc-server - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - let mut frame = Framed::new(uds2, JsonCodec::default()); - - // We'll use the Ping request here because both the request - // and response have few arguments - let req = requests::PingRequest { - id: PublicKey::from_str( - "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - ) - .unwrap(), - len: None, - pongbytes: None, - }; - let mock_resp = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "result" : { "totlen" : 123 } - }); - - // we create a task that sends the response and returns the response - let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); - - // Ensure our mock receives the request and sends the response - _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(mock_resp).await.unwrap(); - - // Verify that the error response is correct - let rpc_response: Result<_, RpcError> = handle.await.unwrap(); - let ping_response = rpc_response.unwrap(); - assert_eq!(ping_response.totlen, 123); - } - - #[tokio::test] - async fn test_call_typed_remote_error() { - // Create a dummy rpc-request - let req = requests::GetinfoRequest {}; - - // Create a dummy error response - let response = json!({ - "id" : 1, - "jsonrpc" : "2.0", - "error" : { - "code" : 666, - "message" : "MOCK_ERROR", - }}); - - let (uds1, uds2) = UnixStream::pair().unwrap(); - let mut cln = ClnRpc::from_stream(uds1).unwrap(); - - // Send out the request - let mut frame = Framed::new(uds2, JsonCodec::default()); - - let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); - - // Dummy-server ensures the request has been received and send the error response - let _ = dbg!(frame.next().await.unwrap().unwrap()); - frame.send(response).await.unwrap(); - - let rpc_response = handle.await.unwrap(); - let rpc_error = rpc_response.expect_err("Must be an RPC-error response"); - - assert_eq!(rpc_error.code.unwrap(), 666); - assert_eq!(rpc_error.message, "MOCK_ERROR"); - } - - #[test] - fn serialize_custom_msg_notification() { - let msg = CustomMsgNotification { - peer_id : PublicKey::from_str("0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b").unwrap(), - payload : String::from("941746573749") - }; - - let notification = Notification::CustomMsg(msg); - - assert_eq!( - serde_json::to_value(notification).unwrap(), - serde_json::json!( - { - "custommsg" : { - "peer_id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", - "payload" : "941746573749" - } - } - ) - ); - - } - - #[test] - fn serialize_block_added_notification() { - let block_added = BlockAddedNotification { - hash : crate::primitives::Sha256::from_str("000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca").unwrap(), - height : 830702 - }; - - let notification = Notification::BlockAdded(block_added); - - assert_eq!( - serde_json::to_value(notification).unwrap(), - serde_json::json!({ - "block_added" : { - "hash" : "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", - "height" : 830702 - } - }) - ) - } - - #[test] - fn deserialize_connect_notification() { - let connect_json = serde_json::json!({ - "connect" : { - "address" : { - "address" : "127.0.0.1", - "port" : 38012, - "type" : "ipv4" - }, - "direction" : "in", - "id" : "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59" - } - }); - - let _ : Notification = serde_json::from_value(connect_json).unwrap(); - } -} diff --git a/cln-rpc/src/primitives.rs b/cln-rpc/src/primitives.rs index c99c94d7d39c..adaf75393c43 100644 --- a/cln-rpc/src/primitives.rs +++ b/cln-rpc/src/primitives.rs @@ -1173,3 +1173,19 @@ impl Serialize for TlvStream { map.end() } } + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum JsonObjectOrArray { + Object(serde_json::Map), + Array(Vec), +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(untagged)] +pub enum JsonScalar { + String(String), + Number(serde_json::Number), + Bool(bool), + Null, +} diff --git a/cln-rpc/src/test.rs b/cln-rpc/src/test.rs new file mode 100644 index 000000000000..a5d988031832 --- /dev/null +++ b/cln-rpc/src/test.rs @@ -0,0 +1,1093 @@ +/// Verify serde round-trip: serialize to JSON, deserialize back, and +/// check the re-serialized value matches the first serialization. +macro_rules! assert_serde_roundtrip { + ($value:expr, $type:ty) => {{ + let v = serde_json::to_value(&$value).unwrap(); + let rt: $type = serde_json::from_value(v.clone()).unwrap(); + let v2 = serde_json::to_value(&rt).unwrap(); + assert_eq!(v, v2); + }}; +} + +use crate::{ + codec::JsonCodec, + hooks::{actions::*, events::*}, + notifications::{BlockAddedNotification, CustomMsgNotification}, + primitives::{Amount, JsonObjectOrArray, JsonScalar}, + ClnRpc, Notification, RpcError, +}; + +use super::*; +use crate::model::*; +use crate::primitives::PublicKey; +use futures_util::{SinkExt, StreamExt}; +use serde_json::json; +use std::str::FromStr; +use tokio::net::UnixStream; +use tokio_util::codec::{Framed, FramedRead}; + +#[tokio::test] +async fn call_raw_request() { + // Set up a pair of unix-streams + // The frame is a mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // Define the request and response send in the RPC-message + let rpc_request = serde_json::json!({ + "id" : 1, + "jsonrpc" : "2.0", + "params" : {}, + "method" : "some_method" + }); + let rpc_request2 = rpc_request.clone(); + + let rpc_response = serde_json::json!({ + "jsonrpc" : "2.0", + "id" : "1", + "result" : {"field_6" : 6} + }); + + // Spawn the task that performs the RPC-call + // Check that it reads the response correctly + let handle = tokio::task::spawn(async move { cln.call_raw_request(rpc_request2).await }); + + // Verify that our emulated server received a request + // and sendt the response + let read_req = dbg!(frame.next().await.unwrap().unwrap()); + assert_eq!(&rpc_request, &read_req); + frame.send(rpc_response).await.unwrap(); + + // Get the result from `call_raw_request` and verify + let actual_response: Result = handle.await.unwrap(); + let actual_response = actual_response.unwrap(); + assert_eq!(actual_response, json!({"field_6" : 6})); +} + +#[tokio::test] +async fn call_raw() { + let req = serde_json::json!({}); + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + + let mut read = FramedRead::new(uds2, JsonCodec::default()); + tokio::task::spawn(async move { + let _: serde_json::Value = cln.call_raw("getinfo", &req).await.unwrap(); + }); + + let read_req = dbg!(read.next().await.unwrap().unwrap()); + + assert_eq!( + json!({"id": 1, "method": "getinfo", "params": {}, "jsonrpc": "2.0"}), + read_req + ); +} + +#[tokio::test] +async fn test_call_enum_remote_error() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // Construct the request and response + let req = Request::Ping(requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }); + + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "error" : { + "code" : 666, + "message" : "MOCK_ERROR" + } + }); + + // Spawn the task which calls the rpc + let handle = tokio::task::spawn(async move { cln.call(req).await }); + + // Ensure the mock receives the request and returns a response + let _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(mock_resp).await.unwrap(); + + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + let rpc_error: RpcError = rpc_response.unwrap_err(); + + println!("RPC_ERROR : {:?}", rpc_error); + assert_eq!(rpc_error.code.unwrap(), 666); + assert_eq!(rpc_error.message, "MOCK_ERROR"); +} + +#[tokio::test] +async fn test_call_enum() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // We'll use the Ping request here because both the request + // and response have few arguments + let req = Request::Ping(requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }); + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "result" : { "totlen" : 123 } + }); + + // we create a task that sends the response and returns the response + let handle = tokio::task::spawn(async move { cln.call(req).await }); + + // Ensure our mock receives the request and sends the response + let read_req = dbg!(frame.next().await.unwrap().unwrap()); + assert_eq!( + read_req, + json!({"id" : 1, "jsonrpc" : "2.0", "method" : "ping", "params" : {"id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b"}}) + ); + frame.send(mock_resp).await.unwrap(); + + // Verify that the error response is correct + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + match rpc_response.unwrap() { + Response::Ping(ping) => { + assert_eq!(ping.totlen, 123); + } + _ => panic!("A Request::Getinfo should return Response::Getinfo"), + } +} + +#[tokio::test] +async fn test_call_typed() { + // Set up the rpc-connection + // The frame represents a Mock rpc-server + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + let mut frame = Framed::new(uds2, JsonCodec::default()); + + // We'll use the Ping request here because both the request + // and response have few arguments + let req = requests::PingRequest { + id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + len: None, + pongbytes: None, + }; + let mock_resp = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "result" : { "totlen" : 123 } + }); + + // we create a task that sends the response and returns the response + let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); + + // Ensure our mock receives the request and sends the response + _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(mock_resp).await.unwrap(); + + // Verify that the error response is correct + let rpc_response: Result<_, RpcError> = handle.await.unwrap(); + let ping_response = rpc_response.unwrap(); + assert_eq!(ping_response.totlen, 123); +} + +#[tokio::test] +async fn test_call_typed_remote_error() { + // Create a dummy rpc-request + let req = requests::GetinfoRequest {}; + + // Create a dummy error response + let response = json!({ + "id" : 1, + "jsonrpc" : "2.0", + "error" : { + "code" : 666, + "message" : "MOCK_ERROR", + }}); + + let (uds1, uds2) = UnixStream::pair().unwrap(); + let mut cln = ClnRpc::from_stream(uds1).unwrap(); + + // Send out the request + let mut frame = Framed::new(uds2, JsonCodec::default()); + + let handle = tokio::task::spawn(async move { cln.call_typed(&req).await }); + + // Dummy-server ensures the request has been received and send the error response + let _ = dbg!(frame.next().await.unwrap().unwrap()); + frame.send(response).await.unwrap(); + + let rpc_response = handle.await.unwrap(); + let rpc_error = rpc_response.expect_err("Must be an RPC-error response"); + + assert_eq!(rpc_error.code.unwrap(), 666); + assert_eq!(rpc_error.message, "MOCK_ERROR"); +} + +#[test] +fn serialize_custom_msg_notification() { + let msg = CustomMsgNotification { + peer_id: PublicKey::from_str( + "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + ) + .unwrap(), + payload: String::from("941746573749"), + }; + + let notification = Notification::CustomMsg(msg); + + assert_eq!( + serde_json::to_value(notification).unwrap(), + serde_json::json!( + { + "custommsg" : { + "peer_id" : "0364aeb75519be29d1af7b8cc6232dbda9fdabb79b66e4e1f6a223750954db210b", + "payload" : "941746573749" + } + } + ) + ); +} + +#[test] +fn serialize_block_added_notification() { + let block_added = BlockAddedNotification { + hash: crate::primitives::Sha256::from_str( + "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", + ) + .unwrap(), + height: 830702, + }; + + let notification = Notification::BlockAdded(block_added); + + assert_eq!( + serde_json::to_value(notification).unwrap(), + serde_json::json!({ + "block_added" : { + "hash" : "000000000000000000000acab8abe0c67a52ed7e5a90a19c64930ff11fa84eca", + "height" : 830702 + } + }) + ) +} + +#[test] +fn deserialize_connect_notification() { + let connect_json = serde_json::json!({ + "connect" : { + "address" : { + "address" : "127.0.0.1", + "port" : 38012, + "type" : "ipv4" + }, + "direction" : "in", + "id" : "022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59" + } + }); + + let _: Notification = serde_json::from_value(connect_json).unwrap(); +} + +#[test] +fn test_peer_connected_hook() { + let peer_connected_payload = serde_json::json!({ + "peer": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "direction": "in", + "addr": "34.239.230.56:9735", + "features": "" + + } + }); + let peer_connected: PeerConnectedEvent = + serde_json::from_value(peer_connected_payload).unwrap(); + assert_eq!( + peer_connected.peer.id, + PublicKey::from_str("03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f") + .unwrap() + ); + assert_eq!(peer_connected.peer.addr, "34.239.230.56:9735"); + assert_eq!( + peer_connected.peer.direction, + PeerConnectedPeerDirection::IN + ); + assert_eq!(peer_connected.peer.features, ""); + assert_serde_roundtrip!(peer_connected, PeerConnectedEvent); +} + +#[test] +fn test_recover_hook() { + let r = serde_json::json!( + { + "codex32": "cl10leetsllhdmn9m42vcsamx24zrxgs3qrl7ahwvhw4fnzrhve25gvezzyqqjdsjnzedu43ns" + }); + let d: RecoverHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.codex32, + "cl10leetsllhdmn9m42vcsamx24zrxgs3qrl7ahwvhw4fnzrhve25gvezzyqqjdsjnzedu43ns" + ); + assert_serde_roundtrip!(d, RecoverHookEvent); +} + +#[test] +fn test_commitment_revocation_hook() { + let r = serde_json::json!({ + "commitment_txid": "58eea2cf538cfed79f4d6b809b920b40bb6b35962c4bb4cc81f5550a7728ab05", + "penalty_tx": "02000000000101...ac00000000", + "channel_id": "fb16398de93e8690c665873715ef590c038dfac5dd6c49a9d4b61dccfcedc2fb", + "commitnum": 21 + }); + let d: CommitmentRevocationEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.commitment_txid, + "58eea2cf538cfed79f4d6b809b920b40bb6b35962c4bb4cc81f5550a7728ab05" + ); + assert_eq!(d.penalty_tx, "02000000000101...ac00000000"); + assert_eq!( + d.channel_id.to_string(), + "fb16398de93e8690c665873715ef590c038dfac5dd6c49a9d4b61dccfcedc2fb" + ); + assert_eq!(d.commitnum, 21); + assert_serde_roundtrip!(d, CommitmentRevocationEvent); +} + +#[test] +fn test_db_write_hook() { + let r = serde_json::json!({ + "data_version": 42, + "writes": [ + "PRAGMA foreign_keys = ON" + ] + }); + let d: DbWriteEvent = serde_json::from_value(r).unwrap(); + assert_eq!(d.data_version, 42); + assert_eq!(d.writes, vec!["PRAGMA foreign_keys = ON"]); + assert_serde_roundtrip!(d, DbWriteEvent); +} + +#[test] +fn test_invoice_payment_hook() { + let r = serde_json::json!({ + "payment": { + "label": "unique-label-for-invoice", + "preimage": "0000000000000000000000000000000000000000000000000000000000000000", + "msat": 10000 + } + }); + let d: InvoicePaymentHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!(d.payment.label, "unique-label-for-invoice"); + assert_eq!( + hex::encode(d.payment.preimage.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!(d.payment.msat, Amount::from_msat(10000)); + assert_serde_roundtrip!(d, InvoicePaymentHookEvent); +} + +#[test] +fn test_openchannel_hook() { + let r = serde_json::json!({ + "openchannel": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "funding_msat": 100000000, + "push_msat": 0, + "dust_limit_msat": 546000, + "max_htlc_value_in_flight_msat": 18446744073709551615u64, + "channel_reserve_msat": 1000000, + "htlc_minimum_msat": 0, + "feerate_per_kw": 7500, + "to_self_delay": 5, + "max_accepted_htlcs": 483, + "channel_flags": 1 + } + }); + let d: OpenchannelEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!(d.openchannel.funding_msat, Amount::from_msat(100000000)); + assert_eq!(d.openchannel.push_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel.dust_limit_msat, Amount::from_msat(546000)); + assert_eq!( + d.openchannel.max_htlc_value_in_flight_msat, + Amount::from_msat(18446744073709551615) + ); + assert_eq!( + d.openchannel.channel_reserve_msat, + Amount::from_msat(1000000) + ); + assert_eq!(d.openchannel.htlc_minimum_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel.feerate_per_kw, 7500); + assert_eq!(d.openchannel.to_self_delay, 5); + assert_eq!(d.openchannel.max_accepted_htlcs, 483); + assert_eq!(d.openchannel.channel_flags, 1); + assert_serde_roundtrip!(d, OpenchannelEvent); +} + +#[test] +fn test_openchannel2_hook() { + let r = serde_json::json!({ + "openchannel2": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "their_funding_msat": 100000000, + "dust_limit_msat": 546000, + "max_htlc_value_in_flight_msat": 18446744073709551615u64, + "htlc_minimum_msat": 0, + "funding_feerate_per_kw": 7500, + "commitment_feerate_per_kw": 7500, + "feerate_our_max": 10000, + "feerate_our_min": 253, + "to_self_delay": 5, + "max_accepted_htlcs": 483, + "channel_flags": 1, + "channel_type": {"bits": [12, 22], "names": ["static_remotekey/even", "anchors/even"]}, + "locktime": 2453, + "channel_max_msat": 16777215000u64, + "requested_lease_msat": 100000000, + "lease_blockheight_start": 683990, + "node_blockheight": 683990, + "require_confirmed_inputs": true + } + }); + let d: Openchannel2Event = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!( + d.openchannel2.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.openchannel2.their_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!(d.openchannel2.dust_limit_msat, Amount::from_msat(546000)); + assert_eq!( + d.openchannel2.max_htlc_value_in_flight_msat, + Amount::from_msat(18446744073709551615) + ); + assert_eq!(d.openchannel2.htlc_minimum_msat, Amount::from_msat(0)); + assert_eq!(d.openchannel2.funding_feerate_per_kw, 7500); + assert_eq!(d.openchannel2.commitment_feerate_per_kw, 7500); + assert_eq!(d.openchannel2.feerate_our_max, 10000); + assert_eq!(d.openchannel2.feerate_our_min, 253); + assert_eq!(d.openchannel2.to_self_delay, 5); + assert_eq!(d.openchannel2.max_accepted_htlcs, 483); + assert_eq!(d.openchannel2.channel_flags, 1); + assert_eq!( + d.openchannel2.channel_type.as_ref().unwrap().bits, + vec![12, 22] + ); + assert_eq!( + d.openchannel2.channel_type.as_ref().unwrap().names, + vec!["static_remotekey/even", "anchors/even"] + ); + assert_eq!(d.openchannel2.locktime, 2453); + assert_eq!( + d.openchannel2.channel_max_msat, + Amount::from_msat(16777215000) + ); + assert_eq!( + d.openchannel2.requested_lease_msat.as_ref().unwrap(), + &Amount::from_msat(100000000) + ); + assert_eq!( + *d.openchannel2.lease_blockheight_start.as_ref().unwrap(), + 683990 + ); + assert_eq!(*d.openchannel2.node_blockheight.as_ref().unwrap(), 683990); + assert!(d.openchannel2.require_confirmed_inputs); + assert_serde_roundtrip!(d, Openchannel2Event); +} + +#[test] +fn test_openchannel2_changed_hook() { + let r = serde_json::json!({ + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": false + } + }); + let d: Openchannel2ChangedEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2_changed.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.openchannel2_changed.psbt, + "cHNidP8BADMCAAAAAQ+yBipSVZr..." + ); + assert!(!d.openchannel2_changed.require_confirmed_inputs); + assert_serde_roundtrip!(d, Openchannel2ChangedEvent); +} + +#[test] +fn test_openchannel2_sign_hook() { + let r = serde_json::json!({ + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + }); + let d: Openchannel2SignEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.openchannel2_sign.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!(d.openchannel2_sign.psbt, "cHNidP8BADMCAAAAAQ+yBipSVZr..."); + assert_serde_roundtrip!(d, Openchannel2SignEvent); +} + +#[test] +fn test_rbf_channel_hook() { + let r = serde_json::json!({ + "rbf_channel": { + "id": "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", + "channel_id": "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7", + "their_last_funding_msat": 100000000, + "their_funding_msat": 100000000, + "our_last_funding_msat": 100000000, + "funding_feerate_per_kw": 7500, + "feerate_our_max": 10000, + "feerate_our_min": 253, + "channel_max_msat": 16777215000u64, + "locktime": 2453, + "requested_lease_msat": 100000000, + "require_confirmed_inputs": true + } + }); + let d: RbfChannelEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.rbf_channel.id.to_string(), + "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f" + ); + assert_eq!( + d.rbf_channel.channel_id.to_string(), + "252d1b0a1e57895e84137f28cf19ab2c35847e284c112fefdecc7afeaa5c1de7" + ); + assert_eq!( + d.rbf_channel.their_last_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!( + d.rbf_channel.their_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!( + d.rbf_channel.our_last_funding_msat, + Amount::from_msat(100000000) + ); + assert_eq!(d.rbf_channel.funding_feerate_per_kw, 7500); + assert_eq!(d.rbf_channel.feerate_our_max, 10000); + assert_eq!(d.rbf_channel.feerate_our_min, 253); + assert_eq!( + d.rbf_channel.channel_max_msat, + Amount::from_msat(16777215000) + ); + assert_eq!(d.rbf_channel.locktime, 2453); + assert_eq!( + d.rbf_channel.requested_lease_msat.unwrap(), + Amount::from_msat(100000000) + ); + assert!(d.rbf_channel.require_confirmed_inputs); + assert_serde_roundtrip!(d, RbfChannelEvent); +} + +#[test] +fn test_htlc_accepted_hook() { + let r = serde_json::json!({ + "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "onion": { + "payload": "", + "short_channel_id": "1x2x3", + "forward_msat": 42, + "outgoing_cltv_value": 500014, + "shared_secret": "0000000000000000000000000000000000000000000000000000000000000000", + "next_onion": "[1365bytes of serialized onion]" + }, + "htlc": { + "short_channel_id": "4x5x6", + "id": 27, + "amount_msat": 43, + "cltv_expiry": 500028, + "cltv_expiry_relative": 10, + "payment_hash": "0000000000000000000000000000000000000000000000000000000000000000", + "extra_tlvs": "fdffff012afe00010001020539" + }, + "forward_to": "0000000000000000000000000000000000000000000000000000000000000000" + }); + let d: HtlcAcceptedEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.peer_id.unwrap().to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + + assert_eq!(d.onion.payload, ""); + assert_eq!(d.onion.short_channel_id.unwrap().to_string(), "1x2x3"); + assert_eq!(d.onion.forward_msat.unwrap(), Amount::from_msat(42)); + assert_eq!(d.onion.outgoing_cltv_value.unwrap(), 500014); + assert_eq!( + hex::encode(d.onion.shared_secret.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!(d.onion.next_onion, "[1365bytes of serialized onion]"); + + assert_eq!(d.htlc.short_channel_id.to_string(), "4x5x6"); + assert_eq!(d.htlc.id, 27); + assert_eq!(d.htlc.amount_msat, Amount::from_msat(43)); + assert_eq!(d.htlc.cltv_expiry, 500028); + assert_eq!(d.htlc.cltv_expiry_relative, 10); + assert_eq!( + d.htlc.payment_hash.to_string(), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!( + d.htlc.extra_tlvs.as_ref().unwrap(), + "fdffff012afe00010001020539" + ); + + assert_eq!( + d.forward_to.unwrap().to_string(), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_serde_roundtrip!(d, HtlcAcceptedEvent); +} + +#[test] +fn test_rpc_command_hook() { + let r = serde_json::json!({ + "rpc_command": { + "id": "3", + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let d: RpcCommandEvent = serde_json::from_value(r).unwrap(); + match &d.rpc_command.id { + JsonScalar::String(s) => assert_eq!(s, "3"), + _ => panic!("should be string"), + } + assert_eq!(d.rpc_command.method, "method_name"); + + let mut params = serde_json::Map::new(); + params.insert("param_1".to_string(), serde_json::Value::Array(vec![])); + params.insert( + "param_2".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + params.insert( + "param_n".to_string(), + serde_json::Value::String("".to_string()), + ); + assert_eq!( + d.rpc_command.params, + JsonObjectOrArray::Object(params.clone()) + ); + assert_serde_roundtrip!(d, RpcCommandEvent); + + let q = serde_json::json!({ + "replace": { + "jsonrpc": "2.0", + "id": "3", + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let e: RpcCommandAction = serde_json::from_value(q).unwrap(); + match &e.replace.as_ref().unwrap().id { + JsonScalar::String(s) => assert_eq!(s, "3"), + _ => panic!("should be string"), + } + assert_eq!(e.replace.as_ref().unwrap().method, "method_name"); + assert_eq!( + e.replace.as_ref().unwrap().params, + JsonObjectOrArray::Object(params) + ); + assert_serde_roundtrip!(e, RpcCommandAction); + + let r = serde_json::json!({ + "rpc_command": { + "id": 3, + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let d: RpcCommandEvent = serde_json::from_value(r).unwrap(); + match &d.rpc_command.id { + JsonScalar::Number(number) => assert_eq!(number.as_u64().unwrap(), 3), + _ => panic!("should be number"), + } + assert_eq!(d.rpc_command.method, "method_name"); + + let mut params = serde_json::Map::new(); + params.insert("param_1".to_string(), serde_json::Value::Array(vec![])); + params.insert( + "param_2".to_string(), + serde_json::Value::Object(serde_json::Map::new()), + ); + params.insert( + "param_n".to_string(), + serde_json::Value::String("".to_string()), + ); + assert_eq!( + d.rpc_command.params, + JsonObjectOrArray::Object(params.clone()) + ); + assert_serde_roundtrip!(d, RpcCommandEvent); + + let q = serde_json::json!({ + "replace": { + "jsonrpc": "2.0", + "id": 3, + "method": "method_name", + "params": { + "param_1": [], + "param_2": {}, + "param_n": "", + } + } + }); + let e: RpcCommandAction = serde_json::from_value(q).unwrap(); + match &e.replace.as_ref().unwrap().id { + JsonScalar::Number(number) => assert_eq!(number.as_u64().unwrap(), 3), + _ => panic!("should be number"), + } + assert_eq!(e.replace.as_ref().unwrap().method, "method_name"); + assert_eq!( + e.replace.as_ref().unwrap().params, + JsonObjectOrArray::Object(params) + ); + assert_serde_roundtrip!(e, RpcCommandAction); +} + +#[test] +fn test_custommsg_hook() { + let r = serde_json::json!({ + "peer_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "payload": "1337ffffffff" + }); + let d: CustommsgHookEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.peer_id.to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!(d.payload, "1337ffffffff"); + assert_serde_roundtrip!(d, CustommsgHookEvent); +} + +#[test] +fn test_onionmessage_recv() { + let r = serde_json::json!({ + "onion_message": { + "reply_blindedpath": { + "first_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "first_scid": "100x200x300", + "first_scid_dir": 1, + "first_path_key": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "hops": [ + { + "blinded_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "encrypted_recipient_data": "0a020d0da" + } + ] + }, + "invoice_request": "0a020d0db", + "invoice": "0a020d0dc", + "invoice_error": "0a020d0dd", + "unknown_fields": [ + { + "number": 12345, + "value": "0a020d0de" + } + ] + } + }); + let d: OnionMessageRecvEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_node_id + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid + .as_ref() + .unwrap() + .to_string(), + "100x200x300" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid_dir + .as_ref() + .unwrap(), + &1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_path_key + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .len(), + 1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .blinded_node_id + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .encrypted_recipient_data, + "0a020d0da" + ); + assert_eq!( + d.onion_message.invoice_request.as_ref().unwrap(), + "0a020d0db" + ); + assert_eq!(d.onion_message.invoice.as_ref().unwrap(), "0a020d0dc"); + assert_eq!(d.onion_message.invoice_error.as_ref().unwrap(), "0a020d0dd"); + assert_eq!(d.onion_message.unknown_fields.as_ref().unwrap().len(), 1); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .number, + 12345 + ); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .value, + "0a020d0de" + ); + assert_serde_roundtrip!(d, OnionMessageRecvEvent); +} + +#[test] +fn test_onionmessage_recv_secret() { + let r = serde_json::json!({ + "onion_message": { + "pathsecret": "0000000000000000000000000000000000000000000000000000000000000000", + "reply_blindedpath": { + "first_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "first_scid": "100x200x300", + "first_scid_dir": 1, + "first_path_key": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "hops": [ + { + "blinded_node_id": "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f", + "encrypted_recipient_data": "0a020d0da" + } + ] + }, + "invoice_request": "0a020d0db", + "invoice": "0a020d0dc", + "invoice_error": "0a020d0dd", + "unknown_fields": [ + { + "number": 12345, + "value": "0a020d0de" + } + ] + } + }); + let d: OnionMessageRecvSecretEvent = serde_json::from_value(r).unwrap(); + assert_eq!( + hex::encode(d.onion_message.pathsecret.to_vec()), + "0000000000000000000000000000000000000000000000000000000000000000" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_node_id + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid + .as_ref() + .unwrap() + .to_string(), + "100x200x300" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_scid_dir + .as_ref() + .unwrap(), + &1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .first_path_key + .as_ref() + .unwrap() + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .len(), + 1 + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .blinded_node_id + .to_string(), + "02df5ffe895c778e10f7742a6c5b8a0cefbe9465df58b92fadeb883752c8107c8f" + ); + assert_eq!( + d.onion_message + .reply_blindedpath + .as_ref() + .unwrap() + .hops + .as_ref() + .unwrap() + .first() + .unwrap() + .encrypted_recipient_data, + "0a020d0da" + ); + assert_eq!( + d.onion_message.invoice_request.as_ref().unwrap(), + "0a020d0db" + ); + assert_eq!(d.onion_message.invoice.as_ref().unwrap(), "0a020d0dc"); + assert_eq!(d.onion_message.invoice_error.as_ref().unwrap(), "0a020d0dd"); + assert_eq!(d.onion_message.unknown_fields.as_ref().unwrap().len(), 1); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .number, + 12345 + ); + assert_eq!( + d.onion_message + .unknown_fields + .as_ref() + .unwrap() + .first() + .unwrap() + .value, + "0a020d0de" + ); + assert_serde_roundtrip!(d, OnionMessageRecvSecretEvent); +} diff --git a/contrib/msggen/msggen/__main__.py b/contrib/msggen/msggen/__main__.py index 46d0e28e4458..d637d3c02c2c 100644 --- a/contrib/msggen/msggen/__main__.py +++ b/contrib/msggen/msggen/__main__.py @@ -4,7 +4,7 @@ from pathlib import Path from msggen.gen.grpc import GrpcGenerator, GrpcConverterGenerator, GrpcUnconverterGenerator, GrpcServerGenerator from msggen.gen.grpc2py import Grpc2PyGenerator -from msggen.gen.rpc import RustGenerator, NotificationGenerator +from msggen.gen.rpc import RustGenerator, NotificationGenerator, HookGenerator from msggen.gen.generator import GeneratorChain from msggen.utils import load_jsonrpc_service, combine_schemas import logging @@ -54,6 +54,10 @@ def add_handler_gen_rust_jsonrpc(generator_chain: GeneratorChain, meta): dest = open(fname, "w") generator_chain.add_generator(NotificationGenerator(dest, meta)) + fname = Path("cln-rpc") / "src" / "hooks.rs" + dest = open(fname, "w") + generator_chain.add_generator(HookGenerator(dest, meta)) + def load_msggen_meta(): meta = json.load(open('.msggen.json', 'r')) diff --git a/contrib/msggen/msggen/gen/grpc/convert.py b/contrib/msggen/msggen/gen/grpc/convert.py index a65c447db4fc..a1ea3f5064c0 100644 --- a/contrib/msggen/msggen/gen/grpc/convert.py +++ b/contrib/msggen/msggen/gen/grpc/convert.py @@ -133,8 +133,13 @@ def generate_composite(self, prefix, field: CompositeField, override=None): "DecodeRoutehintList?": f"c.{name}.map(|drl| drl.into())", "string_map": f"Some(c.{name})", "string_map?": f"c.{name}.unwrap_or(HashMap::new())", + "json_object_or_array": f"Some(c.{name})", + "json_object_or_array?": f"c.{name}.map(|f| f.into())", + "json_scalar": f"Some(c.{name})", + "json_scalar?": f"c.{name}.map(|f| f.into())", }.get( - typ, f"c.{name}" # default to just assignment + typ, + f"c.{name}", # default to just assignment ) if f.deprecated: diff --git a/contrib/msggen/msggen/gen/grpc/unconvert.py b/contrib/msggen/msggen/gen/grpc/unconvert.py index 73984f75a47a..69f4d9d88965 100644 --- a/contrib/msggen/msggen/gen/grpc/unconvert.py +++ b/contrib/msggen/msggen/gen/grpc/unconvert.py @@ -134,8 +134,13 @@ def generate_composite(self, prefix, field: CompositeField, override=None) -> No "TlvStream?": f"c.{name}.map(|s| s.into())", "string_map": f"c.{name}.unwrap()", "string_map?": f"Some(c.{name})", + "json_object_or_array": f"c.{name}.unwrap()", + "json_object_or_array?": f"Some(c.{name})", + "json_scalar": f"c.{name}.unwrap()", + "json_scalar?": f"Some(c.{name})", }.get( - typ, f"c.{name}" # default to just assignment + typ, + f"c.{name}", # default to just assignment ) self.write(f"{name}: {rhs}, // Rule #1 for type {typ}\n", numindent=3) elif isinstance(f, CompositeField): diff --git a/contrib/msggen/msggen/gen/grpc/util.py b/contrib/msggen/msggen/gen/grpc/util.py index 267691d59be6..fcfb1512255a 100644 --- a/contrib/msggen/msggen/gen/grpc/util.py +++ b/contrib/msggen/msggen/gen/grpc/util.py @@ -36,6 +36,8 @@ "bip340sig": "string", "hash": "bytes", "string_map": "map", + "json_object_or_array": "JsonObjectOrArray", + "json_scalar": "JsonScalar", } diff --git a/contrib/msggen/msggen/gen/grpc2py.py b/contrib/msggen/msggen/gen/grpc2py.py index 9a1feba9aa4f..3f4e1809eadb 100644 --- a/contrib/msggen/msggen/gen/grpc2py.py +++ b/contrib/msggen/msggen/gen/grpc2py.py @@ -21,8 +21,8 @@ def decamelcase(c): "ListPeers.peers[].channels[].state_changes[]": None, } override_field = { - 'RoutehintList': '"routes": [[decodekeysend_routes2py(i) for i in routehints] for routehints in m.routes]', - 'DecodeRoutehintList': '"routes": [[decodepay_routes2py(i) for i in routehints] for routehints in m.routes]', + "RoutehintList": '"routes": [[decodekeysend_routes2py(i) for i in routehints] for routehints in m.routes]', + "DecodeRoutehintList": '"routes": [[decodepay_routes2py(i) for i in routehints] for routehints in m.routes]', } @@ -59,6 +59,9 @@ def __init__(self, dest: TextIO): "currency": "m.{name}", "number": "m.{name}", "outpoint": "m.{name}", + "string_map": "m.{name}", + "json_object_or_array": "m.{name}", + "json_scalar": "m.{name}", } def generate_responses(self, service): @@ -168,7 +171,10 @@ def {converter_name}(m): for f in field.fields: name = f.normalized() if isinstance(f, PrimitiveField) and f.typename in override_field: - self.write(f' {override_field[f.typename]}, # OverrideField in {f.typename}\n', cleanup=False) + self.write( + f" {override_field[f.typename]}, # OverrideField in {f.typename}\n", + cleanup=False, + ) elif isinstance(f, PrimitiveField): typ = f.typename diff --git a/contrib/msggen/msggen/gen/rpc/__init__.py b/contrib/msggen/msggen/gen/rpc/__init__.py index dee5ba87eede..0eaba3f813b5 100644 --- a/contrib/msggen/msggen/gen/rpc/__init__.py +++ b/contrib/msggen/msggen/gen/rpc/__init__.py @@ -1,4 +1,5 @@ +from msggen.gen.rpc.hook import HookGenerator from msggen.gen.rpc.notification import NotificationGenerator from msggen.gen.rpc.rust import RustGenerator -__all__ = [RustGenerator, NotificationGenerator] +__all__ = [RustGenerator, NotificationGenerator, HookGenerator] diff --git a/contrib/msggen/msggen/gen/rpc/hook.py b/contrib/msggen/msggen/gen/rpc/hook.py new file mode 100644 index 000000000000..5a084fb617cb --- /dev/null +++ b/contrib/msggen/msggen/gen/rpc/hook.py @@ -0,0 +1,133 @@ +import logging +from textwrap import dedent, indent +from typing import Any, Dict, Optional, TextIO, List + +from msggen.model import Service, TypeName, CompositeField, EnumField, Hook +from msggen.gen.generator import IGenerator +from msggen.gen.rpc.rust import gen_composite +from msggen.gen.grpc.proto import gather_subfields + + +class HookGenerator(IGenerator): + def __init__(self, dest: TextIO, meta: Dict[str, Any]): + self.dest = dest + self.meta = meta + self.logger = logging.getLogger(__name__) + self.meta = meta + + def write(self, text: str, numindent: Optional[int] = None) -> None: + raw = dedent(text) + if numindent is not None: + raw = indent(text, " " * numindent) + + self.dest.write(raw) + + def gather_hook_types(self, hooks: List[Hook]): + """Gather all types that might need to be defined + to represent hooks + """ + types = [] + for hook in hooks: + types.extend([hook.request, hook.response]) + for field in hook.request.fields: + types.extend(gather_subfields(field)) + for field in hook.response.fields: + types.extend(gather_subfields(field)) + return types + + def enumvar2number(self, typename: TypeName, variant): + """Find an existing variant number of generate a new one. + + If we don't have a variant number yet we'll just take the + largest one assigned so far and increment it by 1.""" + + typename = str(typename.name) + + m = self.meta["rpc-only-enum-map"] + variant = str(variant) + if typename not in m: + m[typename] = {} + + variants = m[typename] + if variant in variants: + return variants[variant] + + # Now find the maximum and increment once + n = max(variants.values()) if len(variants) else -1 + + m[typename][variant] = n + 1 + return m[typename][variant] + + def generate_enum(self, service: Service): + self.write("#[derive(Clone, Debug, Deserialize, Serialize)]\n") + self.write("pub enum Hook {\n") + for hook in service.hooks: + tn = hook.typename + name = hook.name + self.write(f'#[serde(rename = "{name}")]\n', numindent=1) + (self.write(f"{tn}(events::{tn}Event),\n", numindent=1),) + self.write("}\n") + + def generate_metadata(self, message: CompositeField, typename_override=None): + if message.omit(): + return + + # If override is not specified it is a function that returns itself + # This is equivalent to do not override + if typename_override is None: + typename_override = lambda x: x + + for _, f in enumerate(message.fields): + if isinstance(f, EnumField) and not f.override(): + self.logger.debug(f"Generating enum {f}") + + for i, v in self.enumerate_enum( + typename_override(f.typename), f.variants + ): + self.logger.debug(f"Generating enum variant {v}") + + def enumerate_enum(self, typename, variants): + enumerated_values = [(self.enumvar2number(typename, v), v) for v in variants] + sorted_enumerated_values = sorted(enumerated_values, key=lambda x: x[0]) + for i, v in sorted_enumerated_values: + yield (i, v) + + def generate(self, service: Service) -> None: + hook_fields = self.gather_hook_types(service.hooks) + for message in [f for f in hook_fields if isinstance(f, CompositeField)]: + self.generate_metadata(message, hook_typename_overrides) + + self.write("// This file is autogenerated by `msggen`\n") + self.write("// Do not edit it manually, your changes will be overwritten\n\n\n") + self.write("\n") + self.write("use serde::{Serialize, Deserialize};\n") + + self.generate_enum(service) + self.write("\n\n") + + self.write("pub mod events{\n") + self.write("use crate::primitives::*;\n", numindent=1) + self.write("use serde::{Serialize, Deserialize};\n\n", numindent=1) + for hook in service.hooks: + _, req_decl = gen_composite(hook.request, self.meta) + self.write(req_decl, numindent=1) + self.write("}\n") + + self.write("pub mod actions{\n") + self.write("use crate::primitives::*;\n", numindent=1) + self.write("use serde::{Serialize, Deserialize};\n\n", numindent=1) + for hook in service.hooks: + _, resp_decl = gen_composite( + hook.response, self.meta, hook_typename_overrides + ) + self.write(resp_decl, numindent=1) + self.write("}\n") + + +def hook_typename_overrides(typename: str): + # no overrides needed for now + # if isinstance(typename, TypeName): + # return_class = TypeName + # else: + # return_class = str + return typename diff --git a/contrib/msggen/msggen/gen/rpc/rust.py b/contrib/msggen/msggen/gen/rpc/rust.py index b2fa76251666..07b51a47b095 100644 --- a/contrib/msggen/msggen/gen/rpc/rust.py +++ b/contrib/msggen/msggen/gen/rpc/rust.py @@ -47,6 +47,8 @@ "bip340sig": "String", "integer": "i64", "string_map": "HashMap", + "json_object_or_array": "JsonObjectOrArray", + "json_scalar": "JsonScalar", } header = f""" @@ -110,12 +112,17 @@ def gen_enum(e, meta, override): m = meta["grpc-field-map"] m2 = meta["grpc-enum-map"] + m3 = meta["rpc-only-enum-map"] + + count = sum(message_name in d for d in (m, m2, m3)) + assert count <= 1 - assert not (message_name in m and message_name in m2) if message_name in m: m = m[message_name] elif message_name in m2: m = m2[message_name] + elif message_name in m3: + m = m3[message_name] else: m = {} @@ -221,7 +228,7 @@ def gen_primitive(p): def rename_if_necessary(original, name): if str(original) != str(name): - return f" #[serde(rename = \"{original}\")]\n" + return f' #[serde(rename = "{original}")]\n' else: return f"" @@ -253,9 +260,9 @@ def gen_array(a, meta, override=None): # Note: flake8 gets confused on these strings in f strings, hence suppression: # contrib/msggen/msggen/gen/rpc/rust.py:250:42: E226 missing whitespace around arithmetic operator if not a.optional: - defi += f" pub {name}: {'Vec<'*a.dims}{itemtype}{'>'*a.dims},\n" # noqa: E226 + defi += f" pub {name}: {'Vec<' * a.dims}{itemtype}{'>' * a.dims},\n" # noqa: E226 else: - defi += f" #[serde(skip_serializing_if = \"crate::is_none_or_empty\")]\n pub {name}: Option<{'Vec<'*a.dims}{itemtype}{'>'*a.dims}>,\n" # noqa: E226 + defi += f' #[serde(skip_serializing_if = "crate::is_none_or_empty")]\n pub {name}: Option<{"Vec<" * a.dims}{itemtype}{">" * a.dims}>,\n' # noqa: E226 return (defi, decl) @@ -282,7 +289,7 @@ def gen_composite(c, meta, override=None) -> Tuple[str, str]: if not c.optional: defi += f" pub {c.normalized()}: {c.typename},\n" else: - defi += f" #[serde(skip_serializing_if = \"Option::is_none\")]\n pub {c.normalized()}: Option<{c.typename}>,\n" + defi += f' #[serde(skip_serializing_if = "Option::is_none")]\n pub {c.normalized()}: Option<{c.typename}>,\n' return defi, r diff --git a/contrib/msggen/msggen/model.py b/contrib/msggen/msggen/model.py index a48336a97145..3925fd7f1c14 100644 --- a/contrib/msggen/msggen/model.py +++ b/contrib/msggen/msggen/model.py @@ -17,7 +17,8 @@ def __init__(self, name): def normalized(self): name = { - "type": "item_type" + "type": "item_type", + "return": "return_", }.get(self.name, self.name) name = name.replace(' ', '_').replace('-', '_').replace('[]', '').replace("/", "_") return name @@ -135,10 +136,11 @@ def override(self, default: Optional[str] = None) -> Optional[str]: class Service: """Top level class that wraps all the RPC methods. """ - def __init__(self, name: str, methods=None, notifications=None): + def __init__(self, name: str, methods=None, notifications=None, hooks=None): self.name: str = name self.methods: List[Method] = [] if methods is None else methods self.notifications: List[Notification] = [] if notifications is None else notifications + self.hooks: List[Hook] = [] if hooks is None else hooks # If we require linking with some external files we'll add # them here so the generator can use them. @@ -175,9 +177,24 @@ def gather_subfields(field: Field) -> List[Field]: for field in notification.response.fields: types.extend(gather_subfields(field)) + for hook in self.hooks: + types.extend([hook.request]) + for field in hook.request.fields: + types.extend(gather_subfields(field)) + for field in hook.response.fields: + types.extend(gather_subfields(field)) + return types +class Hook: + def __init__(self, name: str, typename: str, request: Field, response: Field): + self.name = name + self.typename = typename + self.request = request + self.response = response + + class Notification: def __init__(self, name: str, typename: str, request: Field, response: Field): self.name = name @@ -338,6 +355,9 @@ def __eq__(self, other): return self.variant == other.variant def normalized(self): + if self.variant.replace('.', '', 1).isdigit() or self.variant.lstrip('-').replace('.', '', 1).isdigit(): + normalized = self.variant.replace('.', '_') + return f"NUM_{normalized}" return self.variant.replace(' ', '_').replace('-', '_').replace("/", "_").upper() @@ -433,6 +453,8 @@ class PrimitiveField(Field): "bip340sig", "hash", "string_map", + "json_object_or_array", + "json_scalar" ] def __init__(self, typename, path, description, added, deprecated): @@ -521,6 +543,8 @@ def __str__(self): CheckRuneParamsField = ArrayField(itemtype=PrimitiveField("string", None, None, added=None, deprecated=None), dims=1, path=None, description=None, added=None, deprecated=None) ChainMovesExtraTagsField = ArrayField(itemtype=PrimitiveField("string", None, None, added=None, deprecated=None), dims=1, path=None, description=None, added=None, deprecated=None) ClnrestRegisterPathParamsField = PrimitiveField("string_map", None, None, added=None, deprecated=None) +JsonIdField = PrimitiveField("json_scalar", None, None, added=None, deprecated=None) +JsonObjectOrArrayField = PrimitiveField("json_object_or_array", None, None, added=None, deprecated=None) # TlvStreams are special, they don't have preset dict-keys, rather # they can specify `u64` keys pointing to hex payloads. So the schema @@ -557,6 +581,10 @@ def __str__(self): 'CheckRune.params': CheckRuneParamsField, "ListChainMoves.chainmoves[].extra_tags": ChainMovesExtraTagsField, "Clnrest-Register-Path.rune_restrictions.params": ClnrestRegisterPathParamsField, + "rpc_command.replace.id": JsonIdField, + "rpc_command.replace.params": JsonObjectOrArrayField, + "rpc_command.rpc_command.id": JsonIdField, + "rpc_command.rpc_command.params": JsonObjectOrArrayField } diff --git a/contrib/msggen/msggen/patch.py b/contrib/msggen/msggen/patch.py index 98c80050ed1c..cac192b2c377 100644 --- a/contrib/msggen/msggen/patch.py +++ b/contrib/msggen/msggen/patch.py @@ -50,6 +50,14 @@ def recurse(f: model.Field, inherited_added: Optional[str] = None, inherited_dep root_deprecated = root_deprecated[0] recurse(n.request, inherited_added=root_added, inherited_deprecated=root_deprecated) recurse(n.response, inherited_added=root_added, inherited_deprecated=root_deprecated) + for h in service.hooks: + root_added = getattr(h.request, 'added', None) or getattr(h, 'added', None) + root_deprecated = getattr(h.request, 'deprecated', None) or getattr(h, 'deprecated', None) + if isinstance(root_deprecated, list): + assert len(root_deprecated) == 2 + root_deprecated = root_deprecated[0] + recurse(h.request, inherited_added=root_added, inherited_deprecated=root_deprecated) + recurse(h.response, inherited_added=root_added, inherited_deprecated=root_deprecated) class VersionAnnotationPatch(Patch): diff --git a/contrib/msggen/msggen/schema.json b/contrib/msggen/msggen/schema.json index dc2744813c27..88b241c8e00d 100644 --- a/contrib/msggen/msggen/schema.json +++ b/contrib/msggen/msggen/schema.json @@ -39938,5 +39938,2270 @@ } } } + }, + "hooks": { + "commitment_revocation.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "commitment_revocation", + "title": "Hook fired when a commitment transaction is revoked", + "description": [ + "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", + "", + "1. Proposal of a new state commitment in the form of a commitment transaction", + "2. Exchange of signatures for the agreed upon commitment transaction", + "3. Verification that the signatures match the commitment transaction", + "4. Exchange of revocation secrets that could be used to penalize an eventual misbehaving party", + "", + "The `commitment_revocation` hook is used to inform the plugin about the state transition being completed, and deliver the penalty transaction.", + "The penalty transaction could then be sent to a watchtower that automatically reacts in case one party attempts to settle using a revoked commitment.", + "", + "This is a chained hook: multiple plugins may be registered." + ], + "request": { + "additionalProperties": false, + "required": [ + "commitment_txid", + "penalty_tx", + "channel_id", + "commitnum" + ], + "properties": { + "commitment_txid": { + "type": "txid", + "description": [ + "The txid of the revoked commitment transaction." + ] + }, + "penalty_tx": { + "type": "hex", + "description": [ + "The penalty transaction that can spend the revoked commitment.", + "Can be sent to a watchtower for enforcement." + ] + }, + "channel_id": { + "added": "v0.10.2", + "type": "hash", + "description": [ + "The channel_id for which the revocation occurred." + ] + }, + "commitnum": { + "added": "v0.10.2", + "type": "u64", + "description": [ + "The commitment number identifying the revoked state." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Plugins should always return \"continue\", otherwise subsequent hook subscribers would not get called." + ] + } + } + } + }, + "custommsg.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "custommsg", + "title": "Hook for handling custom peer messages", + "description": [ + "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", + "", + "The goal of these two components is to allow the implementation of custom protocols or prototypes on top of a Core Lightning node, without having to change the node's implementation itself.", + "", + "Messages are restricted to odd-numbered types and must not conflict with internally handled message types.", + "These limitations are in place in order to avoid conflicts with the internal state tracking, and avoiding disconnections or channel closures, since odd-numbered message can be ignored by nodes (see \"it's ok to be odd\" in BOLT #1 for details).", + "", + "Note that if the hook registration specifies \"filters\" then that should be a JSON array of message numbers, and the hook will only be called for those.", + "Otherwise, the hook is called for all messages not handled internally. (added in v25.12)", + "", + "This is a chained hook and MUST return `{\"result\": \"continue\"}`." + ], + "request": { + "required": [ + "peer_id", + "payload" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer that sent the message." + ] + }, + "payload": { + "type": "hex", + "description": [ + "The raw message payload as a hex string.", + "", + "The first two bytes encode the message type (big-endian), followed by the message payload.", + "The plugin must implement the parsing of the message, including the type prefix, since Core Lightning does not know how to parse the message." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must always be `continue`. Any other value will cause the hook to fail." + ] + } + } + }, + "see_also": [ + "lightning-sendcustommsg(7)" + ] + }, + "db_write.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "db_write", + "title": "Hook fired before database writes are committed", + "description": [ + "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", + "This hook will be useless (the \"writes\" field will always be empty) if you are using a PostgreSQL database.", + "", + "This hook is extremely restricted:", + "1. A plugin registering for this hook should not perform anything that may cause a database operation in response (pretty much, anything but logging).", + "2. A plugin registering for this hook should not register for other hooks or commands, as these may become intermingled and break rule #1.", + "3. The hook will be called before your plugin is initialized!", + "", + "This hook is strongly synchronous: `lightningd` will halt almost all processing until all plugins have responded.", + "", + "This hook is intended for creating continuous backups. The intent is that your backup plugin maintains three pieces of information (possibly in separate files):", + "1. A snapshot of the database", + "2. A log of database queries that will bring that snapshot up-to-date", + "3. The previous `data_version`", + "", + "`data_version` is an unsigned 32-bit number that will always increment by 1 each time `db_write` is called. Note that this will wrap around on the limit of 32-bit numbers.", + "", + "`writes` is an array of strings, each string being a database query that modifies the database.", + "If the `data_version` above is validated correctly, then you can simply append this to the log of database queries.", + "", + "Your plugin MUST validate the `data_version`. It MUST keep track of the previous `data_version` it got, and:", + "1. If the new `data_version` is exactly one higher than the previous, then this is the ideal case and nothing bad happened and we should save this and continue.", + "2. If the new `data_version` is exactly the same value as the previous, then the previous set of queries was not committed.", + " Your plugin MAY overwrite the previous set of queries with the current set, or it MAY overwrite its entire backup with a new snapshot of the database and the current `writes` array", + " (treating this case as if `data_version` were two or more higher than the previous).", + "3. If the new `data_version` is less than the previous, your plugin MUST halt and catch fire, and have the operator inspect what exactly happened here.", + "4. Otherwise, some queries were lost and your plugin SHOULD recover by creating a new snapshot of the database: copy the database file, back up the given `writes` array, then delete", + " (or atomically rename if in a POSIX filesystem) the previous backups of the database and SQL statements, or you MAY fail the hook to abort `lightningd`.", + "", + "The \"rolling up\" of the database could be done periodically as well if the log of SQL statements has grown large.", + "", + "Any response other than `{\"result\": \"continue\"}` will cause `lightningd` to error without committing to the database! This is the expected way to halt and catch fire.", + "", + "`db_write` is a parallel-chained hook, i.e., multiple plugins can register it, and all of them will be invoked simultaneously without regard for order of registration.", + "The hook is considered handled if all registered plugins return `{\"result\": \"continue\"}`. If any plugin returns anything else, `lightningd` will error without committing to the database." + ], + "request": { + "additionalProperties": false, + "required": [ + "data_version", + "writes" + ], + "properties": { + "data_version": { + "type": "u32", + "description": [ + "A monotonically increasing 32-bit unsigned integer representing the database version.", + "Wraps around at the 32-bit limit." + ] + }, + "writes": { + "type": "array", + "description": [ + "Array of SQL statements that modify the database.", + "If using PostgreSQL, this array will always be empty.", + "Each entry is a SQL query string." + ], + "items": { + "type": "string" + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be \"continue\" for the database commit to proceed.", + "Any other value will abort the commit and cause `lightningd` to error." + ] + } + } + } + }, + "htlc_accepted.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "htlc_accepted", + "title": "Hook for handling incoming HTLCs", + "description": [ + "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", + "", + "The plugin can inspect the HTLC and decide to continue processing, fail it, or resolve it.", + "", + "lightningd will replay the HTLCs for which it doesn't have a final verdict during startup.", + "This means that, if the plugin response wasn't processed before the HTLC was forwarded, failed, or resolved,", + "then the plugin may see the same HTLC again during startup. It is therefore paramount that the plugin is idempotent if it talks to an external system.", + "", + "This is a chained hook: plugins are called in order until one returns a result other than `continue`.", + "After this the event is considered handled and the remaining plugins are skipped." + ], + "request": { + "required": [ + "onion", + "htlc" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "added": "v25.12", + "type": "pubkey", + "description": [ + "The `node_id` of the peer that offered this HTLC.", + "This field may be absent if the peer is unknown." + ] + }, + "onion": { + "type": "object", + "additionalProperties": false, + "required": [ + "payload", + "next_onion", + "shared_secret" + ], + "properties": { + "payload": { + "type": "hex", + "description": [ + "The raw unparsed onion payload received from the sender." + ] + }, + "type": { + "type": "string", + "enum": [ + "tlv" + ], + "description": [ + "Indicates that the payload is TLV formatted.", + "Only present if the payload was successfully parsed." + ] + }, + "short_channel_id": { + "type": "short_channel_id", + "description": [ + "Determines the channel that the sender is hinting should be used next.", + "Not present if this node is the final destination." + ] + }, + "next_node_id": { + "type": "pubkey", + "description": [ + "The node_id of the next hop.", + "Only present if specified in the onion payload." + ] + }, + "forward_msat": { + "type": "msat", + "description": [ + "The amount to forward to the next hop." + ] + }, + "outgoing_cltv_value": { + "type": "u32", + "description": [ + "Determines what the CLTV value for the HTLC that we forward to the next hop should be." + ] + }, + "total_msat": { + "type": "msat", + "description": [ + "The total payment amount.", + "Only present for final recipients using modern TLV payloads." + ] + }, + "payment_secret": { + "type": "secret", + "description": [ + "The payment secret (which the payer should have obtained from the invoice) provided by the sender.", + "Only present for final recipients." + ] + }, + "payment_metadata": { + "type": "hex", + "description": [ + "Additional metadata provided in the onion payload.", + "Only present if included by the sender." + ] + }, + "next_onion": { + "type": "hex", + "description": [ + "The fully processed onion that we should be sending to the next hop as part of the outgoing HTLC.", + "Processed in this case means that we took the incoming onion, decrypted it, extracted the payload destined for us, and serialised the resulting onion again." + ] + }, + "shared_secret": { + "type": "secret", + "description": [ + "The shared secret used to decrypt the incoming onion.", + "It is shared with the sender that constructed the onion." + ] + } + } + }, + "htlc": { + "type": "object", + "additionalProperties": false, + "required": [ + "short_channel_id", + "id", + "amount_msat", + "cltv_expiry", + "cltv_expiry_relative", + "payment_hash" + ], + "properties": { + "short_channel_id": { + "added": "v0.12.0", + "type": "short_channel_id", + "description": [ + "The channel this HTLC is coming from." + ] + }, + "id": { + "added": "v0.12.0", + "type": "u64", + "description": [ + "The unique HTLC identifier assigned by the channel peer." + ] + }, + "amount_msat": { + "added": "v0.12.0", + "type": "msat", + "description": [ + "The amount received in this HTLC.", + "This amount minus the `forward_msat` amount is the fee that will stay with us." + ] + }, + "cltv_expiry": { + "type": "u32", + "description": [ + "Determines when the HTLC reverts back to the sender.", + "`cltv_expiry` minus `outgoing_cltv_value` should be equal or larger than our `cltv_delta` setting." + ] + }, + "cltv_expiry_relative": { + "type": "u32", + "description": [ + "Hints how much time we still have to claim the HTLC.", + "It is the `cltv_expiry` minus the current blockheight and is passed along mainly to avoid the plugin having to look up the current blockheight." + ] + }, + "payment_hash": { + "type": "hash", + "description": [ + "The payment hash used to identify the payment." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Optional TLV stream attached to the HTLC." + ] + } + } + }, + "forward_to": { + "type": "hash", + "description": [ + "The `channel_id` we intend to forward the HTLC to.", + "Will not be present if the `short_channel_id` was invalid or we were the final destination." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "fail", + "resolve" + ], + "description": [ + "Determines how the HTLC should be handled.", + "", + "`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,", + "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", + "", + "It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,", + "so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;", + "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", + "", + "It can also specify forward_to in the response, replacing the destination.", + "This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.", + "", + "Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.", + "", + "If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.", + "", + "", + "`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).", + "", + "Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).", + "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", + "returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.", + "", + "", + "`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.", + "Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed." + ] + }, + "payload": { + "type": "hex", + "description": [ + "Replacement TLV payload to use instead of the original onion payload." + ] + }, + "forward_to": { + "type": "hash", + "description": [ + "Overrides the forwarding destination." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Replacement TLV stream for forwarded HTLCs." + ] + }, + "invoice_msat": { + "added": "v25.12", + "type": "msat", + "description": [ + "Overrides the invoice amount for final destination checks." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Failure message to return if result is `fail`." + ] + }, + "failure_onion": { + "type": "hex", + "description": [ + "Serialized failure onion to return if result is `fail`." + ] + }, + "payment_key": { + "type": "secret", + "description": [ + "Preimage used to resolve the HTLC if result is `resolve`." + ] + } + }, + "if": { + "properties": { + "result": { + "enum": [ + "fail" + ] + } + } + }, + "then": { + "anyOf": [ + { + "required": [ + "failure_message" + ] + }, + { + "required": [ + "failure_onion" + ] + } + ] + }, + "else": { + "if": { + "properties": { + "result": { + "enum": [ + "resolve" + ] + } + } + }, + "then": { + "required": [ + "payment_key" + ] + } + } + } + }, + "invoice_payment.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "invoice_payment", + "title": "Hook fired when a payment for an invoice is received", + "description": [ + "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", + "", + "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", + "", + "The plugin can:", + "- accept the payment by returning {\"result\": \"continue\"}", + "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", + "", + "If `failure_message` is provided, the payment will be failed with that message.", + "If result is \"reject\" and no `failure_message` is provided, the payment fails with `incorrect_or_unknown_payment_details`.", + "`failure_message` must NOT be provided when result is \"continue\".", + "", + "Before version 23.11 the msat field was encoded as a string with an 'msat' suffix." + ], + "request": { + "additionalProperties": false, + "required": [ + "payment" + ], + "properties": { + "payment": { + "type": "object", + "additionalProperties": true, + "required": [ + "label", + "preimage", + "msat" + ], + "properties": { + "label": { + "type": "string", + "description": [ + "Unique label identifying the invoice." + ] + }, + "preimage": { + "type": "secret", + "description": [ + "The payment preimage." + ] + }, + "msat": { + "type": "msat", + "description": [ + "Amount paid in millisatoshis." + ] + } + }, + "description": [ + "Basic payment information.", + "Additional TLV-derived fields may be included when running in developer mode." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Controls whether the payment is accepted or rejected.", + "\"continue\" accepts the payment.", + "\"reject\" fails the payment." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Optional BOLT 4 failure message.", + "Used to provide a specific failure reason when rejecting the payment." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "failure_message": { + "type": "hex" + } + } + } + }, + "see_also": [ + "lightning-listinvoices(7)" + ] + }, + "onion_message_recv.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv", + "title": "Hook for receiving unsolicited onion messages", + "description": [ + "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", + "", + "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "additionalProperties": false, + "properties": { + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } + }, + "onion_message_recv_secret.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv_secret", + "title": "Hook for receiving onion messages via blinded paths", + "description": [ + "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", + "", + "The presence of `pathsecret` allows the plugin to authenticate that the message used the intended return path.", + "", + "Replies MUST only be sent when the `pathsecret` matches expectations.", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "required": [ + "pathsecret" + ], + "additionalProperties": false, + "properties": { + "pathsecret": { + "type": "secret", + "description": [ + "Shared secret identifying the blinded path.", + "", + "Used to verify that the sender used a path previously provided by this node.", + "This prevents probing attacks and unauthorized replies." + ] + }, + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } + }, + "openchannel.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel", + "title": "Hook fired when a peer proposes opening a channel using v1 protocol", + "description": [ + "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and may include additional fields defined by the protocol.", + "", + "Plugins can reject the channel or modify certain parameters before accepting it.", + "", + "This is a chained hook: the first plugin returning a non-\"continue\" result terminates the chain.", + "Mutation fields (`close_to`, `mindepth`, `reserve`) are only applied from the first plugin that sets them.", + "Additional fields may be present in the request as defined by BOLT #2.", + "Providing invalid values (e.g., invalid `close_to` address) will cause lightningd to exit." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel" + ], + "properties": { + "openchannel": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "funding_msat", + "push_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "channel_reserve_msat", + "htlc_minimum_msat", + "feerate_per_kw", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The peer's node_id." + ] + }, + "funding_msat": { + "type": "msat", + "description": [ + "Funding amount proposed by the peer." + ] + }, + "push_msat": { + "type": "msat", + "description": [ + "Amount pushed to us at channel open." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Dust limit for outputs." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum HTLC value allowed in flight." + ] + }, + "channel_reserve_msat": { + "type": "msat", + "description": [ + "Channel reserve required by the peer." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value." + ] + }, + "feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate in satoshi per kw." + ] + }, + "to_self_delay": { + "type": "u32", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u32", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey proposed by the peer." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the channel opening request." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer when rejecting." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address for mutual close output.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "mindepth": { + "added": "v0.12.0", + "type": "u32", + "description": [ + "`mindepth` is the number of confirmations to require before making the channel usable.", + "Notice that setting this to 0 (zeroconf) or some other low value might expose you to double-spending issues,", + "so only lower this value from the default if you trust the peer not to double-spend, or you reject incoming payments,", + "including forwards, until the funding is confirmed." + ] + }, + "reserve": { + "added": "v22.11", + "type": "sat", + "description": [ + "`reserve` is an absolute value for the amount (in satoshi) in the channel that the peer must keep on their side.", + "This ensures that they always have something to lose, so only lower this below the 1% of funding amount if you trust the peer.", + "The protocol requires this to be larger than the dust limit, hence it will be adjusted to be the dust limit if the specified value is below." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } + }, + "openchannel2.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2", + "title": "Hook fired when a peer proposes opening a channel using v2 protocol", + "description": [ + "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and dual-funding extensions.", + "There may be additional fields present depending on negotiated features.", + "", + "`requested_lease_msat`, `lease_blockheight_start`, and `node_blockheight` are only present if the peer requested a funding lease (`option_will_fund`).", + "", + "The plugin can reject the channel, accept it, or contribute funds via a PSBT when accepting.", + "", + "See `plugins/funder.c` for an example of how to use this hook to contribute funds to a channel open.", + "", + "This is a chained hook: multiple plugins may be invoked.", + "Returning any result other than \"continue\" terminates the chain.", + "Only the first plugin that sets mutation fields (e.g. `close_to`) will have them applied.", + "Invalid `close_to` addresses will cause lightningd to exit.", + "The PSBT must be consistent with the funding transaction and respect feerate constraints." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel2" + ], + "properties": { + "openchannel2": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "channel_id", + "their_funding_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "htlc_minimum_msat", + "funding_feerate_per_kw", + "commitment_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "locktime", + "channel_max_msat", + "require_confirmed_inputs", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the channel." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "Temporary `channel_id` assigned for this channel negotiation." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "Amount contributed by the remote peer to the channel funding transaction." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Minimum output value below which outputs are considered dust." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum total value of outstanding HTLCs allowed in the channel at any time." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value the peer will accept." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for the funding transaction." + ] + }, + "commitment_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for commitment transactions." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "Maximum feerate we are willing to accept for commitment transactions." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "Minimum feerate we are willing to accept for commitment transactions." + ] + }, + "to_self_delay": { + "type": "u16", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u16", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "Locktime to be used in the funding transaction." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey provided by the peer for cooperative close." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "Maximum capacity this channel is allowed to reach." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "Amount of liquidity the peer is requesting us to lease to them.", + "Only present if `option_will_fund` is negotiated." + ] + }, + "lease_blockheight_start": { + "type": "u32", + "description": [ + "Blockheight at which the lease period begins.", + "Only present if `requested_lease_msat` is present." + ] + }, + "node_blockheight": { + "type": "u32", + "description": [ + "Current blockheight of the node.", + "Used in conjunction with lease parameters.", + "Only present if `requested_lease_msat` is present." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the peer requires all funding inputs to be confirmed." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Indicates whether to accept or reject the channel proposal.", + "Returning \"continue\" allows the channel negotiation to proceed.", + "Returning \"reject\" aborts the channel opening." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Error message sent to the peer when rejecting the channel.", + "Only valid if result is \"reject\"." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address to which funds will be sent on cooperative close.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "psbt": { + "type": "string", + "description": [ + "Partially Signed Bitcoin Transaction contributing inputs and outputs for the funding transaction.", + "Used when the plugin contributes funds to the channel." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "Amount we contribute to the channel funding.", + "This amount must NOT be included in any outputs in the provided PSBT.", + "Change outputs must be included separately." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } + }, + "openchannel2_changed.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_changed", + "title": "Hook for handling updates to the dual-funding PSBT", + "description": [ + "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", + "", + "This allows plugins to inspect and modify the PSBT before it is sent back to the peer.", + "", + "The negotiation continues until neither side makes further changes to the PSBT, at which point commitment transactions are exchanged.", + "", + "See `plugins/funder.c` for an example of how to use this hook to continue a v2 channel open." + ], + "request": { + "required": [ + "openchannel2_changed" + ], + "additionalProperties": false, + "properties": { + "openchannel2_changed": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt", + "require_confirmed_inputs" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary channel_id identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The current Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "This PSBT includes contributions from both peers and may be modified." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with the channel opening negotiation." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The updated PSBT to send back to the peer.", + "If no modifications are made, this should be identical to the input PSBT." + ] + } + } + }, + "examples": [ + { + "request": { + "id": "example:openchannel2_changed#1", + "method": "openchannel2_changed", + "params": { + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": true + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] + }, + "openchannel2_sign.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_sign", + "title": "Hook for signing the dual-funding PSBT", + "description": [ + "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", + "", + "The plugin is expected to sign any inputs it owns in the provided PSBT and return the updated PSBT.", + "", + "If no inputs need to be signed, the original PSBT should be returned unchanged.", + "", + "Once both sides have provided signatures, the funding transaction will be broadcast.", + "", + "See `plugins/funder.c` for an example of how to use this hook to sign a funding transaction." + ], + "request": { + "required": [ + "openchannel2_sign" + ], + "additionalProperties": false, + "properties": { + "openchannel2_sign": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary `channel_id` identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "The plugin should add signatures for any inputs it controls." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with channel opening." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The PSBT including any added signatures.", + "If no inputs were signed, this should be identical to the input PSBT." + ] + } + } + }, + "example_notifications": [ + { + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + } + ], + "examples": [ + { + "request": { + "id": "example:openchannel2_sign#1", + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] + }, + "peer_connected.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "peer_connected", + "title": "Hook fired when a peer connects and completes handshake", + "description": [ + "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", + "", + "This is a chained hook: the first plugin returning \"disconnect\" stops further processing.", + "Plugins can call `listpeers` to retrieve additional information about the peer." + ], + "request": { + "additionalProperties": false, + "required": [ + "peer" + ], + "properties": { + "peer": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "direction", + "addr", + "features" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The node_id of the connected peer." + ] + }, + "direction": { + "type": "string", + "enum": [ + "in", + "out" + ], + "description": [ + "Connection direction: `in` for incoming, `out` for outgoing." + ] + }, + "addr": { + "type": "string", + "description": [ + "The `addr` field shows the address that we are connected to ourselves, not the gossiped list of known addresses.", + "In particular this means that the port for incoming connections is an ephemeral port, that may not be available for reconnections." + ] + }, + "remote_addr": { + "type": "string", + "description": [ + "Our own address as reported by the remote peer. Helps with detecting our own IPv4 changes behind NAT." + ] + }, + "features": { + "type": "hex", + "description": [ + "Feature bits advertised by the peer, encoded as hex." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "disconnect" + ], + "description": [ + "Whether to allow the connection to proceed or disconnect the peer." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer before disconnection.", + "Only used if result is \"disconnect\"." + ] + } + } + }, + "see_also": [ + "lightning-listpeers(7)" + ] + }, + "rbf_channel.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rbf_channel", + "title": "Hook for handling RBF channel funding requests", + "description": [ + "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", + "", + "The plugin can choose to reject or continue the negotiation.", + "", + "If continuing, the plugin may contribute additional inputs and outputs by returning a PSBT and specifying an `our_funding_msat` amount.", + "", + "The `our_funding_msat` value must not be included in any output in the PSBT. Change outputs should be included and calculated using the provided `funding_feerate_per_kw`." + ], + "request": { + "required": [ + "rbf_channel" + ], + "additionalProperties": false, + "properties": { + "rbf_channel": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "channel_id", + "their_last_funding_msat", + "their_funding_msat", + "our_last_funding_msat", + "funding_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "channel_max_msat", + "locktime", + "require_confirmed_inputs" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the RBF." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "The `channel_id` of the channel being modified." + ] + }, + "their_last_funding_msat": { + "type": "msat", + "description": [ + "The peer's previous contribution to the funding transaction." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "The peer's proposed new funding contribution." + ] + }, + "our_last_funding_msat": { + "type": "msat", + "description": [ + "Our previous contribution to the funding transaction." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "The feerate to use for the updated funding transaction, in satoshis per kw." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "The maximum feerate we are willing to accept for the funding transaction." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "The minimum feerate we are willing to accept for the funding transaction." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "The maximum total channel capacity allowed for this channel." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "The locktime to use for the funding transaction." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "If present, the amount of liquidity the peer is requesting us to lease.", + "This field is optional and only included if the peer requested a lease." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the RBF proposal." + ] + }, + "psbt": { + "type": "string", + "description": [ + "A PSBT containing additional inputs and outputs to contribute to the funding transaction.", + "Only valid if `result` is `continue`." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "The amount we are contributing to the new funding transaction.", + "Must not be included in any output in the PSBT." + ] + }, + "error_message": { + "type": "string", + "description": [ + "An error message explaining the rejection.", + "Only used if `result` is `reject` and will be sent to the peer." + ] + } + } + } + }, + "recover.json": { + "$schema": "../rpc-schema-draft.json", + "added": "v23.08", + "type": "object", + "notification": "recover", + "title": "Hook fired when node starts in recovery mode", + "description": [ + "The **recover** hook is called whenever the node is started using the --recovery flag.", + "It provides the codex32 secret used to derive the HSM secret.", + "Plugins can use this to reconnect to peers who keep your peer storage backups with them and recover state or funds.", + "", + "This hook is informational and does not allow altering execution flow.", + "Plugins are expected to perform recovery-related side effects such as reconnecting to peers." + ], + "request": { + "additionalProperties": false, + "required": [ + "codex32" + ], + "properties": { + "codex32": { + "type": "string", + "description": [ + "The codex32-encoded secret provided via --recover.", + "Used to reconstruct the node's HSM secret." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Returning \"continue\" resumes normal execution." + ] + } + } + } + }, + "rpc_command.json": { + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rpc_command", + "title": "Hook for intercepting and modifying RPC commands", + "description": [ + "The **rpc_command** hook allows a plugin to take over any RPC command.", + "", + "You can optionally specify a `filters` array, containing the command names you want to intercept: without this, all commands will be sent to this hook. (added in v25.12)", + "", + "The plugin receives the full JSON-RPC request and may choose to continue, replace the request, or return a custom result or error.", + "", + "This is a chained hook: only the first plugin that modifies the request or response will take effect. Other plugins will then be ignored and a warning will be logged." + ], + "request": { + "required": [ + "rpc_command" + ], + "additionalProperties": false, + "properties": { + "rpc_command": { + "type": "object", + "description": [ + "The original JSON-RPC request object." + ], + "additionalProperties": true, + "required": [ + "id", + "method", + "params" + ], + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Indicates that lightningd should continue processing the RPC command normally." + ] + }, + "replace": { + "type": "object", + "description": [ + "Replaces the original JSON-RPC request with a new one." + ], + "additionalProperties": true, + "required": [ + "jsonrpc", + "id", + "method", + "params" + ], + "properties": { + "jsonrpc": { + "type": "string", + "enum": [ + "2.0" + ], + "description": [ + "The JSON-RPC version." + ] + }, + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + }, + "return": { + "type": "object", + "description": [ + "Returns a custom JSON-RPC response to the caller." + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "object", + "description": [ + "Custom result object to return to the caller." + ] + }, + "error": { + "type": "object", + "description": [ + "Custom error object to return to the caller." + ], + "additionalProperties": true, + "properties": { + "code": { + "type": "integer", + "description": [ + "JSON-RPC error code." + ] + }, + "message": { + "type": "string", + "description": [ + "Human-readable error message." + ] + } + }, + "required": [ + "code", + "message" + ] + } + } + } + }, + "oneOf": [ + { + "required": [ + "result" + ] + }, + { + "required": [ + "replace" + ] + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "result" + ] + } + } + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "error" + ] + } + } + } + ] + } + } } } \ No newline at end of file diff --git a/contrib/msggen/msggen/utils/utils.py b/contrib/msggen/msggen/utils/utils.py index eb8b025df9a3..bee62921d70e 100644 --- a/contrib/msggen/msggen/utils/utils.py +++ b/contrib/msggen/msggen/utils/utils.py @@ -4,7 +4,7 @@ from importlib import resources from pathlib import Path -from msggen.model import CompositeField, Method, Notification, Service, TypeName +from msggen.model import CompositeField, Method, Notification, Hook, Service, TypeName grpc_method_names = [ "Getinfo", @@ -195,12 +195,32 @@ }, ] +hook_names = [ + {"name": "peer_connected", "typename": "PeerConnected"}, + {"name": "recover_hook", "schema_name": "recover", "typename": "RecoverHook"}, + {"name": "commitment_revocation", "typename": "CommitmentRevocation"}, + {"name": "db_write", "typename": "DbWrite"}, + {"name": "invoice_payment_hook", "schema_name": "invoice_payment", "typename": "InvoicePaymentHook"}, + {"name": "openchannel", "typename": "Openchannel"}, + {"name": "openchannel2", "typename": "Openchannel2"}, + {"name": "openchannel2_changed", "typename": "Openchannel2Changed"}, + {"name": "openchannel2_sign", "typename": "Openchannel2Sign"}, + {"name": "rbf_channel", "typename": "RbfChannel"}, + {"name": "htlc_accepted", "typename": "HtlcAccepted"}, + {"name": "rpc_command", "typename": "RpcCommand"}, + {"name": "custommsg_hook", "schema_name": "custommsg", "typename": "CustommsgHook"}, + {"name": "onion_message_recv", "typename": "OnionMessageRecv"}, + {"name": "onion_message_recv_secret", "typename": "OnionMessageRecvSecret"}, + +] + def combine_schemas(schema_dir: Path, dest: Path): """Enumerate all schema files, and combine it into a single JSON file.""" bundle = OrderedDict() methods = OrderedDict() notifications = OrderedDict() + hooks = OrderedDict() # Parse methods files = sorted(list(schema_dir.iterdir())) @@ -218,8 +238,17 @@ def combine_schemas(schema_dir: Path, dest: Path): continue notifications[f.name] = json.load(f.open()) + # Parse hooks + hooks_dir = schema_dir / "hook" + files = sorted(list(hooks_dir.iterdir())) + for f in files: + if not f.name.endswith("json"): + continue + hooks[f.name] = json.load(f.open()) + bundle["methods"] = methods bundle["notifications"] = notifications + bundle["hooks"] = hooks with dest.open(mode="w") as f: json.dump( @@ -314,6 +343,36 @@ def load_notification(name, typename: TypeName, schema_name=None): return Notification(name, TypeName(typename), request, response) +def load_hook(name, typename: TypeName, schema_name=None): + """Load a hook that can be used by a plug-in""" + typename = str(typename) + + hooks = get_schema_bundle()["hooks"] + if schema_name is None: + schema_name = name + hook_name = f"{schema_name.lower()}.json" + + root_added = hooks[hook_name].get("added", None) + root_deprecated = hooks[hook_name].get("deprecated", None) + + request = CompositeField.from_js(hooks[hook_name]["request"], path=name) + response = CompositeField.from_js(hooks[hook_name]["response"], path=name) + + if request.added is None: + request.added = root_added + if request.deprecated is None: + request.deprecated = root_deprecated + if response.added is None: + response.added = root_added + if response.deprecated is None: + response.deprecated = root_deprecated + + request.typename += "Event" + response.typename += "Action" + + return Hook(name, TypeName(typename), request, response) + + def load_jsonrpc_service(): methods = [load_jsonrpc_method(name) for name in grpc_method_names] notifications = [ @@ -324,7 +383,17 @@ def load_jsonrpc_service(): ) for names in grpc_notification_names ] - service = Service(name="Node", methods=methods, notifications=notifications) + hooks = [ + load_hook( + name=names["name"], + typename=names["typename"], + schema_name=names.get("schema_name"), + ) + for names in hook_names + ] + service = Service( + name="Node", methods=methods, notifications=notifications, hooks=hooks + ) service.includes = [ "primitives.proto" ] # Make sure we have the primitives included. diff --git a/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py b/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py index 97a670b9d4d5..3666bde88a84 100644 --- a/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py +++ b/contrib/pyln-grpc-proto/pyln/grpc/primitives_pb2.py @@ -24,25 +24,27 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10primitives.proto\x12\x03\x63ln\"\x16\n\x06\x41mount\x12\x0c\n\x04msat\x18\x01 \x01(\x04\"D\n\x0b\x41mountOrAll\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ll\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"D\n\x0b\x41mountOrAny\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ny\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"(\n\x08Outpoint\x12\x0c\n\x04txid\x18\x01 \x01(\x0c\x12\x0e\n\x06outnum\x18\x02 \x01(\r\"h\n\x07\x46\x65\x65rate\x12\x0e\n\x04slow\x18\x01 \x01(\x08H\x00\x12\x10\n\x06normal\x18\x02 \x01(\x08H\x00\x12\x10\n\x06urgent\x18\x03 \x01(\x08H\x00\x12\x0f\n\x05perkb\x18\x04 \x01(\rH\x00\x12\x0f\n\x05perkw\x18\x05 \x01(\rH\x00\x42\x07\n\x05style\":\n\nOutputDesc\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x1b\n\x06\x61mount\x18\x02 \x01(\x0b\x32\x0b.cln.Amount\"h\n\x08RouteHop\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x0c\n\x04scid\x18\x02 \x01(\t\x12\x1c\n\x07\x66\x65\x65\x62\x61se\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12\x0f\n\x07\x66\x65\x65prop\x18\x04 \x01(\r\x12\x13\n\x0b\x65xpirydelta\x18\x05 \x01(\r\"(\n\tRoutehint\x12\x1b\n\x04hops\x18\x01 \x03(\x0b\x32\r.cln.RouteHop\".\n\rRoutehintList\x12\x1d\n\x05hints\x18\x02 \x03(\x0b\x32\x0e.cln.Routehint\"\x9e\x01\n\x0e\x44\x65\x63odeRouteHop\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x18\n\x10short_channel_id\x18\x02 \x01(\t\x12\"\n\rfee_base_msat\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12#\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\r\x12\x19\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\r\"4\n\x0f\x44\x65\x63odeRoutehint\x12!\n\x04hops\x18\x01 \x03(\x0b\x32\x13.cln.DecodeRouteHop\":\n\x13\x44\x65\x63odeRoutehintList\x12#\n\x05hints\x18\x02 \x03(\x0b\x32\x14.cln.DecodeRoutehint\"\'\n\x08TlvEntry\x12\x0c\n\x04type\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c\"+\n\tTlvStream\x12\x1e\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\r.cln.TlvEntry*$\n\x0b\x43hannelSide\x12\t\n\x05LOCAL\x10\x00\x12\n\n\x06REMOTE\x10\x01*\xe7\x02\n\x0c\x43hannelState\x12\x0c\n\x08Openingd\x10\x00\x12\x1a\n\x16\x43hanneldAwaitingLockin\x10\x01\x12\x12\n\x0e\x43hanneldNormal\x10\x02\x12\x18\n\x14\x43hanneldShuttingDown\x10\x03\x12\x17\n\x13\x43losingdSigexchange\x10\x04\x12\x14\n\x10\x43losingdComplete\x10\x05\x12\x16\n\x12\x41waitingUnilateral\x10\x06\x12\x14\n\x10\x46undingSpendSeen\x10\x07\x12\x0b\n\x07Onchain\x10\x08\x12\x15\n\x11\x44ualopendOpenInit\x10\t\x12\x1b\n\x17\x44ualopendAwaitingLockin\x10\n\x12\x1a\n\x16\x43hanneldAwaitingSplice\x10\x0b\x12\x1a\n\x16\x44ualopendOpenCommitted\x10\x0c\x12\x1d\n\x19\x44ualopendOpenCommittReady\x10\r\x12\n\n\x06\x43losed\x10\x0e*\xd5\x03\n\tHtlcState\x12\x0f\n\x0bSentAddHtlc\x10\x00\x12\x11\n\rSentAddCommit\x10\x01\x12\x15\n\x11RcvdAddRevocation\x10\x02\x12\x14\n\x10RcvdAddAckCommit\x10\x03\x12\x18\n\x14SentAddAckRevocation\x10\x04\x12\x18\n\x14RcvdAddAckRevocation\x10\x05\x12\x12\n\x0eRcvdRemoveHtlc\x10\x06\x12\x14\n\x10RcvdRemoveCommit\x10\x07\x12\x18\n\x14SentRemoveRevocation\x10\x08\x12\x17\n\x13SentRemoveAckCommit\x10\t\x12\x1b\n\x17RcvdRemoveAckRevocation\x10\n\x12\x0f\n\x0bRcvdAddHtlc\x10\x0b\x12\x11\n\rRcvdAddCommit\x10\x0c\x12\x15\n\x11SentAddRevocation\x10\r\x12\x14\n\x10SentAddAckCommit\x10\x0e\x12\x12\n\x0eSentRemoveHtlc\x10\x0f\x12\x14\n\x10SentRemoveCommit\x10\x10\x12\x18\n\x14RcvdRemoveRevocation\x10\x11\x12\x17\n\x13RcvdRemoveAckCommit\x10\x12\x12\x1b\n\x17SentRemoveAckRevocation\x10\x13*\xa2\x01\n\x0f\x43hannelTypeName\x12\x19\n\x15static_remotekey_even\x10\x00\x12\x17\n\x13\x61nchor_outputs_even\x10\x01\x12!\n\x1d\x61nchors_zero_fee_htlc_tx_even\x10\x02\x12\x13\n\x0fscid_alias_even\x10\x03\x12\x11\n\rzeroconf_even\x10\x04\x12\x10\n\x0c\x61nchors_even\x10\x05*\x89\x01\n\x12\x41utocleanSubsystem\x12\x15\n\x11SUCCEEDEDFORWARDS\x10\x00\x12\x12\n\x0e\x46\x41ILEDFORWARDS\x10\x01\x12\x11\n\rSUCCEEDEDPAYS\x10\x02\x12\x0e\n\nFAILEDPAYS\x10\x03\x12\x10\n\x0cPAIDINVOICES\x10\x04\x12\x13\n\x0f\x45XPIREDINVOICES\x10\x05*K\n\x10PluginSubcommand\x12\t\n\x05START\x10\x00\x12\x08\n\x04STOP\x10\x01\x12\n\n\x06RESCAN\x10\x02\x12\x0c\n\x08STARTDIR\x10\x03\x12\x08\n\x04LIST\x10\x04\x62\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10primitives.proto\x12\x03\x63ln\"\x16\n\x06\x41mount\x12\x0c\n\x04msat\x18\x01 \x01(\x04\"D\n\x0b\x41mountOrAll\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ll\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"D\n\x0b\x41mountOrAny\x12\x1d\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x0b.cln.AmountH\x00\x12\r\n\x03\x61ny\x18\x02 \x01(\x08H\x00\x42\x07\n\x05value\"(\n\x08Outpoint\x12\x0c\n\x04txid\x18\x01 \x01(\x0c\x12\x0e\n\x06outnum\x18\x02 \x01(\r\"h\n\x07\x46\x65\x65rate\x12\x0e\n\x04slow\x18\x01 \x01(\x08H\x00\x12\x10\n\x06normal\x18\x02 \x01(\x08H\x00\x12\x10\n\x06urgent\x18\x03 \x01(\x08H\x00\x12\x0f\n\x05perkb\x18\x04 \x01(\rH\x00\x12\x0f\n\x05perkw\x18\x05 \x01(\rH\x00\x42\x07\n\x05style\":\n\nOutputDesc\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x1b\n\x06\x61mount\x18\x02 \x01(\x0b\x32\x0b.cln.Amount\"h\n\x08RouteHop\x12\n\n\x02id\x18\x01 \x01(\x0c\x12\x0c\n\x04scid\x18\x02 \x01(\t\x12\x1c\n\x07\x66\x65\x65\x62\x61se\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12\x0f\n\x07\x66\x65\x65prop\x18\x04 \x01(\r\x12\x13\n\x0b\x65xpirydelta\x18\x05 \x01(\r\"(\n\tRoutehint\x12\x1b\n\x04hops\x18\x01 \x03(\x0b\x32\r.cln.RouteHop\".\n\rRoutehintList\x12\x1d\n\x05hints\x18\x02 \x03(\x0b\x32\x0e.cln.Routehint\"\x9e\x01\n\x0e\x44\x65\x63odeRouteHop\x12\x0e\n\x06pubkey\x18\x01 \x01(\x0c\x12\x18\n\x10short_channel_id\x18\x02 \x01(\t\x12\"\n\rfee_base_msat\x18\x03 \x01(\x0b\x32\x0b.cln.Amount\x12#\n\x1b\x66\x65\x65_proportional_millionths\x18\x04 \x01(\r\x12\x19\n\x11\x63ltv_expiry_delta\x18\x05 \x01(\r\"4\n\x0f\x44\x65\x63odeRoutehint\x12!\n\x04hops\x18\x01 \x03(\x0b\x32\x13.cln.DecodeRouteHop\":\n\x13\x44\x65\x63odeRoutehintList\x12#\n\x05hints\x18\x02 \x03(\x0b\x32\x14.cln.DecodeRoutehint\"\'\n\x08TlvEntry\x12\x0c\n\x04type\x18\x01 \x01(\x04\x12\r\n\x05value\x18\x02 \x01(\x0c\"+\n\tTlvStream\x12\x1e\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\r.cln.TlvEntry\"d\n\x11JsonObjectOrArray\x12!\n\x06object\x18\x01 \x01(\x0b\x32\x0f.cln.JsonObjectH\x00\x12\x1f\n\x05\x61rray\x18\x02 \x01(\x0b\x32\x0e.cln.JsonArrayH\x00\x42\x0b\n\tstructure\"x\n\nJsonObject\x12+\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x1b.cln.JsonObject.FieldsEntry\x1a=\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1d\n\x05value\x18\x02 \x01(\x0b\x32\x0e.cln.JsonValue:\x02\x38\x01\"+\n\tJsonArray\x12\x1e\n\x06values\x18\x01 \x03(\x0b\x32\x0e.cln.JsonValue\"\xc8\x01\n\tJsonValue\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x14\n\nuint_value\x18\x03 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x05 \x01(\tH\x00\x12\x1f\n\x05\x61rray\x18\x06 \x01(\x0b\x32\x0e.cln.JsonArrayH\x00\x12!\n\x06object\x18\x07 \x01(\x0b\x32\x0f.cln.JsonObjectH\x00\x42\x06\n\x04kind\"\x87\x01\n\nJsonScalar\x12\x14\n\nbool_value\x18\x01 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x02 \x01(\x03H\x00\x12\x14\n\nuint_value\x18\x03 \x01(\x04H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x05 \x01(\tH\x00\x42\x08\n\x06scalar*$\n\x0b\x43hannelSide\x12\t\n\x05LOCAL\x10\x00\x12\n\n\x06REMOTE\x10\x01*\xe7\x02\n\x0c\x43hannelState\x12\x0c\n\x08Openingd\x10\x00\x12\x1a\n\x16\x43hanneldAwaitingLockin\x10\x01\x12\x12\n\x0e\x43hanneldNormal\x10\x02\x12\x18\n\x14\x43hanneldShuttingDown\x10\x03\x12\x17\n\x13\x43losingdSigexchange\x10\x04\x12\x14\n\x10\x43losingdComplete\x10\x05\x12\x16\n\x12\x41waitingUnilateral\x10\x06\x12\x14\n\x10\x46undingSpendSeen\x10\x07\x12\x0b\n\x07Onchain\x10\x08\x12\x15\n\x11\x44ualopendOpenInit\x10\t\x12\x1b\n\x17\x44ualopendAwaitingLockin\x10\n\x12\x1a\n\x16\x43hanneldAwaitingSplice\x10\x0b\x12\x1a\n\x16\x44ualopendOpenCommitted\x10\x0c\x12\x1d\n\x19\x44ualopendOpenCommittReady\x10\r\x12\n\n\x06\x43losed\x10\x0e*\xd5\x03\n\tHtlcState\x12\x0f\n\x0bSentAddHtlc\x10\x00\x12\x11\n\rSentAddCommit\x10\x01\x12\x15\n\x11RcvdAddRevocation\x10\x02\x12\x14\n\x10RcvdAddAckCommit\x10\x03\x12\x18\n\x14SentAddAckRevocation\x10\x04\x12\x18\n\x14RcvdAddAckRevocation\x10\x05\x12\x12\n\x0eRcvdRemoveHtlc\x10\x06\x12\x14\n\x10RcvdRemoveCommit\x10\x07\x12\x18\n\x14SentRemoveRevocation\x10\x08\x12\x17\n\x13SentRemoveAckCommit\x10\t\x12\x1b\n\x17RcvdRemoveAckRevocation\x10\n\x12\x0f\n\x0bRcvdAddHtlc\x10\x0b\x12\x11\n\rRcvdAddCommit\x10\x0c\x12\x15\n\x11SentAddRevocation\x10\r\x12\x14\n\x10SentAddAckCommit\x10\x0e\x12\x12\n\x0eSentRemoveHtlc\x10\x0f\x12\x14\n\x10SentRemoveCommit\x10\x10\x12\x18\n\x14RcvdRemoveRevocation\x10\x11\x12\x17\n\x13RcvdRemoveAckCommit\x10\x12\x12\x1b\n\x17SentRemoveAckRevocation\x10\x13*\xa2\x01\n\x0f\x43hannelTypeName\x12\x19\n\x15static_remotekey_even\x10\x00\x12\x17\n\x13\x61nchor_outputs_even\x10\x01\x12!\n\x1d\x61nchors_zero_fee_htlc_tx_even\x10\x02\x12\x13\n\x0fscid_alias_even\x10\x03\x12\x11\n\rzeroconf_even\x10\x04\x12\x10\n\x0c\x61nchors_even\x10\x05*\x89\x01\n\x12\x41utocleanSubsystem\x12\x15\n\x11SUCCEEDEDFORWARDS\x10\x00\x12\x12\n\x0e\x46\x41ILEDFORWARDS\x10\x01\x12\x11\n\rSUCCEEDEDPAYS\x10\x02\x12\x0e\n\nFAILEDPAYS\x10\x03\x12\x10\n\x0cPAIDINVOICES\x10\x04\x12\x13\n\x0f\x45XPIREDINVOICES\x10\x05*K\n\x10PluginSubcommand\x12\t\n\x05START\x10\x00\x12\x08\n\x04STOP\x10\x01\x12\n\n\x06RESCAN\x10\x02\x12\x0c\n\x08STARTDIR\x10\x03\x12\x08\n\x04LIST\x10\x04\x62\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'primitives_pb2', _globals) if not _descriptor._USE_C_DESCRIPTORS: DESCRIPTOR._loaded_options = None - _globals['_CHANNELSIDE']._serialized_start=954 - _globals['_CHANNELSIDE']._serialized_end=990 - _globals['_CHANNELSTATE']._serialized_start=993 - _globals['_CHANNELSTATE']._serialized_end=1352 - _globals['_HTLCSTATE']._serialized_start=1355 - _globals['_HTLCSTATE']._serialized_end=1824 - _globals['_CHANNELTYPENAME']._serialized_start=1827 - _globals['_CHANNELTYPENAME']._serialized_end=1989 - _globals['_AUTOCLEANSUBSYSTEM']._serialized_start=1992 - _globals['_AUTOCLEANSUBSYSTEM']._serialized_end=2129 - _globals['_PLUGINSUBCOMMAND']._serialized_start=2131 - _globals['_PLUGINSUBCOMMAND']._serialized_end=2206 + _globals['_JSONOBJECT_FIELDSENTRY']._loaded_options = None + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_options = b'8\001' + _globals['_CHANNELSIDE']._serialized_start=1564 + _globals['_CHANNELSIDE']._serialized_end=1600 + _globals['_CHANNELSTATE']._serialized_start=1603 + _globals['_CHANNELSTATE']._serialized_end=1962 + _globals['_HTLCSTATE']._serialized_start=1965 + _globals['_HTLCSTATE']._serialized_end=2434 + _globals['_CHANNELTYPENAME']._serialized_start=2437 + _globals['_CHANNELTYPENAME']._serialized_end=2599 + _globals['_AUTOCLEANSUBSYSTEM']._serialized_start=2602 + _globals['_AUTOCLEANSUBSYSTEM']._serialized_end=2739 + _globals['_PLUGINSUBCOMMAND']._serialized_start=2741 + _globals['_PLUGINSUBCOMMAND']._serialized_end=2816 _globals['_AMOUNT']._serialized_start=25 _globals['_AMOUNT']._serialized_end=47 _globals['_AMOUNTORALL']._serialized_start=49 @@ -71,4 +73,16 @@ _globals['_TLVENTRY']._serialized_end=907 _globals['_TLVSTREAM']._serialized_start=909 _globals['_TLVSTREAM']._serialized_end=952 + _globals['_JSONOBJECTORARRAY']._serialized_start=954 + _globals['_JSONOBJECTORARRAY']._serialized_end=1054 + _globals['_JSONOBJECT']._serialized_start=1056 + _globals['_JSONOBJECT']._serialized_end=1176 + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_start=1115 + _globals['_JSONOBJECT_FIELDSENTRY']._serialized_end=1176 + _globals['_JSONARRAY']._serialized_start=1178 + _globals['_JSONARRAY']._serialized_end=1221 + _globals['_JSONVALUE']._serialized_start=1224 + _globals['_JSONVALUE']._serialized_end=1424 + _globals['_JSONSCALAR']._serialized_start=1427 + _globals['_JSONSCALAR']._serialized_end=1562 # @@protoc_insertion_point(module_scope) diff --git a/contrib/pyln-testing/pyln/testing/fixtures.py b/contrib/pyln-testing/pyln/testing/fixtures.py index e04d6d8e7929..641d4bd7087d 100644 --- a/contrib/pyln-testing/pyln/testing/fixtures.py +++ b/contrib/pyln-testing/pyln/testing/fixtures.py @@ -1,6 +1,16 @@ from concurrent import futures from pyln.testing.db import SqliteDbProvider, PostgresDbProvider -from pyln.testing.utils import NodeFactory, BitcoinD, ElementsD, env, LightningNode, TEST_DEBUG, TEST_NETWORK, SLOW_MACHINE, VALGRIND +from pyln.testing.utils import ( + NodeFactory, + BitcoinD, + ElementsD, + env, + LightningNode, + TEST_DEBUG, + TEST_NETWORK, + SLOW_MACHINE, + VALGRIND, +) from pyln.client import Millisatoshi from typing import Dict from pathlib import Path @@ -28,7 +38,7 @@ def test_base_dir(): d = os.getenv("TEST_DIR", "/tmp") - directory = tempfile.mkdtemp(prefix='ltests-', dir=d) + directory = tempfile.mkdtemp(prefix="ltests-", dir=d) print("Running tests in {}".format(directory)) yield directory @@ -36,13 +46,19 @@ def test_base_dir(): # Now check if any test directory is left because the corresponding test # failed. If there are no such tests we can clean up the root test # directory. - contents = [d for d in os.listdir(directory) if os.path.isdir(os.path.join(directory, d)) and d.startswith('test_')] + contents = [ + d + for d in os.listdir(directory) + if os.path.isdir(os.path.join(directory, d)) and d.startswith("test_") + ] if contents == []: shutil.rmtree(directory) else: - print("Leaving base_dir {} intact, it still has test sub-directories with failure details: {}".format( - directory, contents - )) + print( + "Leaving base_dir {} intact, it still has test sub-directories with failure details: {}".format( + directory, contents + ) + ) @pytest.fixture(autouse=True) @@ -66,7 +82,7 @@ def setup_logging(): loggers = [logging.getLogger()] + list(logging.Logger.manager.loggerDict.values()) for logger in loggers: - handlers = getattr(logger, 'handlers', []) + handlers = getattr(logger, "handlers", []) for handler in handlers: logger.removeHandler(handler) @@ -80,7 +96,9 @@ def directory(request, test_base_dir, test_name): """ # Auto set value if it isn't in the dict yet __attempts[test_name] = __attempts.get(test_name, 0) + 1 - directory = os.path.join(test_base_dir, "{}_{}".format(test_name, __attempts[test_name])) + directory = os.path.join( + test_base_dir, "{}_{}".format(test_name, __attempts[test_name]) + ) request.node.has_errors = False if not os.path.exists(directory): @@ -92,9 +110,9 @@ def directory(request, test_base_dir, test_name): # determine whether we succeeded or failed. Outcome can be None if the # failure occurs during the setup phase, hence the use to getattr instead # of accessing it directly. - rep_call = getattr(request.node, 'rep_call', None) - outcome = 'passed' if rep_call is None else rep_call.outcome - failed = not outcome or request.node.has_errors or outcome != 'passed' + rep_call = getattr(request.node, "rep_call", None) + outcome = "passed" if rep_call is None else rep_call.outcome + failed = not outcome or request.node.has_errors or outcome != "passed" if not failed: try: @@ -102,13 +120,19 @@ def directory(request, test_base_dir, test_name): except OSError: # Usually, this means that e.g. valgrind is still running. Wait # a little and retry. - files = [os.path.join(dp, f) for dp, dn, fn in os.walk(directory) for f in fn] + files = [ + os.path.join(dp, f) for dp, dn, fn in os.walk(directory) for f in fn + ] print("Directory still contains files: ", files) print("... sleeping then retrying") time.sleep(10) shutil.rmtree(directory) else: - logging.debug("Test execution failed, leaving the test directory {} intact.".format(directory)) + logging.debug( + "Test execution failed, leaving the test directory {} intact.".format( + directory + ) + ) @pytest.fixture @@ -117,8 +141,8 @@ def test_name(request): network_daemons = { - 'regtest': BitcoinD, - 'liquid-regtest': ElementsD, + "regtest": BitcoinD, + "liquid-regtest": ElementsD, } @@ -129,12 +153,12 @@ def node_cls(): @pytest.fixture def bitcoind(request, directory, teardown_checks): - chaind = network_daemons[env('TEST_NETWORK', 'regtest')] + chaind = network_daemons[env("TEST_NETWORK", "regtest")] bitcoind = chaind(bitcoin_dir=directory) # @pytest.mark.parametrize('bitcoind', [False], indirect=True) if you don't # want bitcoind started! - if getattr(request, 'param', True): + if getattr(request, "param", True): try: bitcoind.start() except Exception: @@ -145,21 +169,25 @@ def bitcoind(request, directory, teardown_checks): # FIXME: include liquid-regtest in this check after elementsd has been # updated - if info['version'] < 200100 and env('TEST_NETWORK') != 'liquid-regtest': + if info["version"] < 200100 and env("TEST_NETWORK") != "liquid-regtest": bitcoind.rpc.stop() - raise ValueError("bitcoind is too old. At least version 20100 (v0.20.1)" - " is needed, current version is {}".format(info['version'])) - elif info['version'] < 160000: + raise ValueError( + "bitcoind is too old. At least version 20100 (v0.20.1)" + " is needed, current version is {}".format(info["version"]) + ) + elif info["version"] < 160000: bitcoind.rpc.stop() - raise ValueError("elementsd is too old. At least version 160000 (v0.16.0)" - " is needed, current version is {}".format(info['version'])) + raise ValueError( + "elementsd is too old. At least version 160000 (v0.16.0)" + " is needed, current version is {}".format(info["version"]) + ) info = bitcoind.rpc.getblockchaininfo() # Make sure we have some spendable funds - if info['blocks'] < 101: - bitcoind.generate_block(101 - info['blocks']) - elif bitcoind.rpc.getwalletinfo()['balance'] < 1: + if info["blocks"] < 101: + bitcoind.generate_block(101 - info["blocks"]) + elif bitcoind.rpc.getwalletinfo()["balance"] < 1: logging.debug("Insufficient balance, generating 1 block") bitcoind.generate_block(1) @@ -218,6 +246,7 @@ def teardown_checks(request): def _extra_validator(is_request: bool): """JSON Schema validator with additions for our specialized types""" + def is_hex(checker, instance): """Hex string""" if not checker.is_type(instance, "string"): @@ -272,9 +301,14 @@ def is_short_channel_id(checker, instance): # 2. the next 3 bytes: indicating the transaction index within the block # 3. the least significant 2 bytes: indicating the output index that pays to the # channel. - return (blocknum >= 0 and blocknum < 2**24 - and txnum >= 0 and txnum < 2**24 - and outnum >= 0 and outnum < 2**16) + return ( + blocknum >= 0 + and blocknum < 2**24 + and txnum >= 0 + and txnum < 2**24 + and outnum >= 0 + and outnum < 2**16 + ) def is_short_channel_id_dir(checker, instance): """Short channel id with direction""" @@ -307,7 +341,16 @@ def is_feerate(checker, instance): return False if instance in ("urgent", "normal", "slow", "minimum"): return True - if instance in ("opening", "mutual_close", "unilateral_close", "delayed_to_us", "htlc_resolution", "penalty", "min_acceptable", "max_acceptable"): + if instance in ( + "opening", + "mutual_close", + "unilateral_close", + "delayed_to_us", + "htlc_resolution", + "penalty", + "min_acceptable", + "max_acceptable", + ): return True if not instance.endswith("perkw") and not instance.endswith("perkb"): return False @@ -409,7 +452,7 @@ def is_sat_or_all(checker, instance): def is_currency(checker, instance): """currency including currency code""" - pattern = re.compile(r'^\d+(\.\d+)?[A-Z][A-Z][A-Z]$') + pattern = re.compile(r"^\d+(\.\d+)?[A-Z][A-Z][A-Z]$") if checker.is_type(instance, "string") and pattern.match(instance): return True return False @@ -425,67 +468,93 @@ def is_string_map(checker, instance): return False return True + def is_json_object_or_array(checker, instance): + """rpc method params can be either an object or an array""" + return checker.is_type(instance, "object") or checker.is_type(instance, "array") + + def is_json_scalar(checker, instance): + """rpc id can be either a string, number, or null""" + return checker.is_type(instance, "string") or checker.is_type(instance, "number") or checker.is_type(instance, "null") + # "msat" for request can be many forms if is_request: is_msat = is_msat_request else: is_msat = is_msat_response - type_checker = jsonschema.Draft7Validator.TYPE_CHECKER.redefine_many({ - "hex": is_hex, - "hash": is_32byte_hex, - "secret": is_32byte_hex, - "u64": is_u64, - "u32": is_u32, - "u16": is_u16, - "u8": is_u8, - "pubkey": is_pubkey, - "sat": is_sat, - "sat_or_all": is_sat_or_all, - "msat": is_msat, - "msat_or_all": is_msat_or_all, - "msat_or_any": is_msat_or_any, - "currency": is_currency, - "txid": is_txid, - "signature": is_signature, - "bip340sig": is_bip340sig, - "short_channel_id": is_short_channel_id, - "short_channel_id_dir": is_short_channel_id_dir, - "outpoint": is_outpoint, - "feerate": is_feerate, - "outputdesc": is_outputdesc, - "string_map": is_string_map, - }) - - return jsonschema.validators.extend(jsonschema.Draft7Validator, - type_checker=type_checker) + type_checker = jsonschema.Draft7Validator.TYPE_CHECKER.redefine_many( + { + "hex": is_hex, + "hash": is_32byte_hex, + "secret": is_32byte_hex, + "u64": is_u64, + "u32": is_u32, + "u16": is_u16, + "u8": is_u8, + "pubkey": is_pubkey, + "sat": is_sat, + "sat_or_all": is_sat_or_all, + "msat": is_msat, + "msat_or_all": is_msat_or_all, + "msat_or_any": is_msat_or_any, + "currency": is_currency, + "txid": is_txid, + "signature": is_signature, + "bip340sig": is_bip340sig, + "short_channel_id": is_short_channel_id, + "short_channel_id_dir": is_short_channel_id_dir, + "outpoint": is_outpoint, + "feerate": is_feerate, + "outputdesc": is_outputdesc, + "string_map": is_string_map, + "json_object_or_array": is_json_object_or_array, + "json_scalar": is_json_scalar, + } + ) + + return jsonschema.validators.extend( + jsonschema.Draft7Validator, type_checker=type_checker + ) def _load_schema(filename): """Load the schema from @filename and create a validator for it""" - with open(filename, 'r') as f: + with open(filename, "r") as f: data = json.load(f) - return [_extra_validator(True)(data.get('request', {})), _extra_validator(False)(data.get('response', {}))] + return [ + _extra_validator(True)(data.get("request", {})), + _extra_validator(False)(data.get("response", {})), + ] @pytest.fixture(autouse=True) def jsonschemas(): """Load schema file if it exist: returns request/response schemas by pairs""" try: - schemafiles = os.listdir('doc/schemas') + schemafiles = os.listdir("doc/schemas") except FileNotFoundError: schemafiles = [] schemas = {} for fname in schemafiles: - if fname.endswith('.json'): - base = fname.replace('lightning-', '').replace('.json', '') + if fname.endswith(".json"): + base = fname.replace("lightning-", "").replace(".json", "") # Request is 0 and Response is 1 - schemas[base] = _load_schema(os.path.join('doc/schemas', fname)) + schemas[base] = _load_schema(os.path.join("doc/schemas", fname)) return schemas @pytest.fixture -def node_factory(request, directory, test_name, bitcoind, executor, db_provider, teardown_checks, node_cls, jsonschemas): +def node_factory( + request, + directory, + test_name, + bitcoind, + executor, + db_provider, + teardown_checks, + node_cls, + jsonschemas, +): nf = NodeFactory( request, test_name, @@ -514,7 +583,11 @@ def map_node_error(nodes, f, msg): map_node_error(nf.nodes, printValgrindErrors, "reported valgrind errors") map_node_error(nf.nodes, printCrashLog, "had crash.log files") map_node_error(nf.nodes, checkBroken, "had BROKEN or That's weird messages") - map_node_error(nf.nodes, lambda n: not n.allow_warning and n.daemon.is_in_log(r' WARNING:'), "had warning messages") + map_node_error( + nf.nodes, + lambda n: not n.allow_warning and n.daemon.is_in_log(r" WARNING:"), + "had warning messages", + ) map_node_error(nf.nodes, checkReconnect, "had unexpected reconnections") map_node_error(nf.nodes, checkPluginJSON, "had malformed hooks/notifications") @@ -523,13 +596,29 @@ def map_node_error(nodes, f, msg): for n in nf.nodes: dumpGossipStore(n) - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log('Bad reestablish'), "had bad reestablish") - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log('bad hsm request'), "had bad hsm requests") - map_node_error(nf.nodes, lambda n: n.daemon.is_in_log(r'Accessing a null column'), "Accessing a null column") + map_node_error( + nf.nodes, lambda n: n.daemon.is_in_log("Bad reestablish"), "had bad reestablish" + ) + map_node_error( + nf.nodes, + lambda n: n.daemon.is_in_log("bad hsm request"), + "had bad hsm requests", + ) + map_node_error( + nf.nodes, + lambda n: n.daemon.is_in_log(r"Accessing a null column"), + "Accessing a null column", + ) map_node_error(nf.nodes, checkMemleak, "had memleak messages") - map_node_error(nf.nodes, lambda n: n.rc != 0 and not n.may_fail, "Node exited with return code {n.rc}") + map_node_error( + nf.nodes, + lambda n: n.rc != 0 and not n.may_fail, + "Node exited with return code {n.rc}", + ) if not ok: - map_node_error(nf.nodes, prinErrlog, "some node failed unexpected, non-empty errlog file") + map_node_error( + nf.nodes, prinErrlog, "some node failed unexpected, non-empty errlog file" + ) for n in nf.nodes: n.daemon.cleanup_files() @@ -539,7 +628,7 @@ def getErrlog(node): for error_file in os.listdir(node.daemon.lightning_dir): if not re.fullmatch(r"errlog", error_file): continue - with open(os.path.join(node.daemon.lightning_dir, error_file), 'r') as f: + with open(os.path.join(node.daemon.lightning_dir, error_file), "r") as f: errors = f.read().strip() if errors: return errors, error_file @@ -549,7 +638,11 @@ def getErrlog(node): def prinErrlog(node): errors, fname = getErrlog(node) if errors: - print("-" * 31, "stderr of node {} captured in {} file".format(node.daemon.prefix, fname), "-" * 32) + print( + "-" * 31, + "stderr of node {} captured in {} file".format(node.daemon.prefix, fname), + "-" * 32, + ) print(errors) print("-" * 80) return 1 if errors else 0 @@ -559,7 +652,7 @@ def getValgrindErrors(node): for error_file in os.listdir(node.daemon.lightning_dir): if not re.fullmatch(r"valgrind-errors.\d+", error_file): continue - with open(os.path.join(node.daemon.lightning_dir, error_file), 'r') as f: + with open(os.path.join(node.daemon.lightning_dir, error_file), "r") as f: errors = f.read().strip() if errors: return errors, error_file @@ -580,8 +673,8 @@ def getCrashLog(node): if node.may_fail: return None, None try: - crashlog = os.path.join(node.daemon.lightning_dir, 'crash.log') - with open(crashlog, 'r') as f: + crashlog = os.path.join(node.daemon.lightning_dir, "crash.log") + with open(crashlog, "r") as f: return f.readlines(), crashlog except Exception: return None, None @@ -599,15 +692,17 @@ def printCrashLog(node): def checkReconnect(node): if node.may_reconnect: return 0 - if node.daemon.is_in_log('Peer has reconnected'): + if node.daemon.is_in_log("Peer has reconnected"): return 1 return 0 def dumpGossipStore(node): - gs_path = os.path.join(node.daemon.lightning_dir, TEST_NETWORK, 'gossip_store') - gs = subprocess.run(['devtools/dump-gossipstore', '--print-deleted', gs_path], - stdout=subprocess.PIPE) + gs_path = os.path.join(node.daemon.lightning_dir, TEST_NETWORK, "gossip_store") + gs = subprocess.run( + ["devtools/dump-gossipstore", "--print-deleted", gs_path], + stdout=subprocess.PIPE, + ) print("GOSSIP STORE CONTENTS for {}:\n".format(node.daemon.prefix)) print(gs.stdout.decode()) @@ -616,22 +711,24 @@ def checkBadGossip(node): if node.allow_bad_gossip: return 0 # We can get bad gossip order from inside error msgs. - if node.daemon.is_in_log('Bad gossip order:'): + if node.daemon.is_in_log("Bad gossip order:"): # This can happen if a node sees a node_announce after a channel # is deleted, however. - if node.daemon.is_in_log('Deleting channel'): + if node.daemon.is_in_log("Deleting channel"): return 0 return 1 # Other 'Bad' messages shouldn't happen. - if node.daemon.is_in_log(r'gossipd.*Bad (?!gossip order from error)'): + if node.daemon.is_in_log(r"gossipd.*Bad (?!gossip order from error)"): return 1 return 0 def checkBroken(node): node.daemon.logs_catchup() - broken_lines = [l for l in node.daemon.logs if '**BROKEN**' in l or "That's weird: " in l] + broken_lines = [ + l for l in node.daemon.logs if "**BROKEN**" in l or "That's weird: " in l + ] if node.broken_log: ex = re.compile(node.broken_log) broken_lines = [l for l in broken_lines if not ex.search(l)] @@ -650,16 +747,18 @@ def checkPluginJSON(node): return 0 try: - notificationfiles = os.listdir('doc/schemas/notification') + notificationfiles = os.listdir("doc/schemas/notification") except FileNotFoundError: notificationfiles = [] notifications = {} for fname in notificationfiles: - if fname.endswith('.json'): - base = fname.replace('.json', '') + if fname.endswith(".json"): + base = fname.replace(".json", "") # Request is 0 and Response is 1 - notifications[base] = _load_schema(os.path.join('doc/schemas/notification', fname)) + notifications[base] = _load_schema( + os.path.join("doc/schemas/notification", fname) + ) # FIXME: add doc/schemas/hook/ hooks = {} @@ -667,24 +766,27 @@ def checkPluginJSON(node): for f in (Path(node.daemon.lightning_dir) / "plugin-io").iterdir(): # e.g. hook_in-peer_connected-124567-358 io = json.loads(f.read_text()) - parts = f.name.split('-') - if parts[0] == 'hook_in': + parts = f.name.split("-") + if parts[0] == "hook_in": schema = hooks.get(parts[1]) - req = io['result'] + req = io["result"] direction = 1 - elif parts[0] == 'hook_out': + elif parts[0] == "hook_out": schema = hooks.get(parts[1]) - req = io['params'] + req = io["params"] direction = 0 else: - assert parts[0] == 'notification_out' + assert parts[0] == "notification_out" schema = notifications.get(parts[1]) # The notification is wrapped in an object of its own name. - req = io['params'][parts[1]] + req = io["params"][parts[1]] direction = 1 # Until v26.09, with channel_state_changed.null_scid, that notification will be non-schema compliant. - if f.name.startswith('notification_out-channel_state_changed-') and node.daemon.opts.get('allow-deprecated-apis', True) is True: + if ( + f.name.startswith("notification_out-channel_state_changed-") + and node.daemon.opts.get("allow-deprecated-apis", True) is True + ): continue if schema is not None: @@ -697,33 +799,33 @@ def checkPluginJSON(node): def checkBadReestablish(node): - if node.daemon.is_in_log('Bad reestablish'): + if node.daemon.is_in_log("Bad reestablish"): return 1 return 0 def checkBadHSMRequest(node): - if node.daemon.is_in_log('bad hsm request'): + if node.daemon.is_in_log("bad hsm request"): return 1 return 0 def checkMemleak(node): - if node.daemon.is_in_log('MEMLEAK:'): + if node.daemon.is_in_log("MEMLEAK:"): return 1 return 0 # Mapping from TEST_DB_PROVIDER env variable to class to be used providers = { - 'sqlite3': SqliteDbProvider, - 'postgres': PostgresDbProvider, + "sqlite3": SqliteDbProvider, + "postgres": PostgresDbProvider, } @pytest.fixture def db_provider(test_base_dir): - provider = providers[os.getenv('TEST_DB_PROVIDER', 'sqlite3')](test_base_dir) + provider = providers[os.getenv("TEST_DB_PROVIDER", "sqlite3")](test_base_dir) provider.start() yield provider provider.stop() @@ -740,29 +842,29 @@ def executor(teardown_checks): def chainparams(): """Return the chainparams for the TEST_NETWORK. - - chain_hash is in network byte order, not the RPC return order. - - example_addr doesn't belong to any node in the test (randomly generated) + - chain_hash is in network byte order, not the RPC return order. + - example_addr doesn't belong to any node in the test (randomly generated) """ chainparams = { - 'regtest': { + "regtest": { "bip173_prefix": "bcrt", "elements": False, "name": "regtest", - "p2sh_prefix": '2', + "p2sh_prefix": "2", "example_addr": "bcrt1qeyyk6sl5pr49ycpqyckvmttus5ttj25pd0zpvg", "feeoutput": False, - "chain_hash": '06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f', + "chain_hash": "06226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f", }, - 'liquid-regtest': { + "liquid-regtest": { "bip173_prefix": "ert", "elements": True, "name": "liquid-regtest", - "p2sh_prefix": 'X', + "p2sh_prefix": "X", "example_addr": "ert1qjsesxflhs3632syhcz7llpfx20p5tr0kpllfve", "feeoutput": True, "chain_hash": "9f87eb580b9e5f11dc211e9fb66abb3699999044f8fe146801162393364286c6", - } + }, } - return chainparams[env('TEST_NETWORK', 'regtest')] + return chainparams[env("TEST_NETWORK", "regtest")] diff --git a/doc/Makefile b/doc/Makefile index 5f1b8306234f..fbf4b64d749b 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -188,7 +188,7 @@ doc/schemas/sql.json: doc/schemas/sql-template.json plugins/sql doc-all: $(MANPAGES) doc/index.rst -SCHEMAS := $(wildcard doc/schemas/*.json) $(wildcard doc/schemas/notification/*.json) +SCHEMAS := $(wildcard doc/schemas/*.json) $(wildcard doc/schemas/notification/*.json) $(wildcard doc/schemas/hook/*.json) # Don't try to build sql.json tables with plugins/sql if we don't have sqlite3 ifeq ($(HAVE_SQLITE3),0) diff --git a/doc/schemas/hook/commitment_revocation.json b/doc/schemas/hook/commitment_revocation.json new file mode 100644 index 000000000000..f655a076a36b --- /dev/null +++ b/doc/schemas/hook/commitment_revocation.json @@ -0,0 +1,75 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "commitment_revocation", + "title": "Hook fired when a commitment transaction is revoked", + "description": [ + "The **commitment_revocation** hook is called whenever a channel state is updated, and the old state was revoked. State updates in Lightning consist of the following steps:", + "", + "1. Proposal of a new state commitment in the form of a commitment transaction", + "2. Exchange of signatures for the agreed upon commitment transaction", + "3. Verification that the signatures match the commitment transaction", + "4. Exchange of revocation secrets that could be used to penalize an eventual misbehaving party", + "", + "The `commitment_revocation` hook is used to inform the plugin about the state transition being completed, and deliver the penalty transaction.", + "The penalty transaction could then be sent to a watchtower that automatically reacts in case one party attempts to settle using a revoked commitment.", + "", + "This is a chained hook: multiple plugins may be registered." + ], + "request": { + "additionalProperties": false, + "required": [ + "commitment_txid", + "penalty_tx", + "channel_id", + "commitnum" + ], + "properties": { + "commitment_txid": { + "type": "txid", + "description": [ + "The txid of the revoked commitment transaction." + ] + }, + "penalty_tx": { + "type": "hex", + "description": [ + "The penalty transaction that can spend the revoked commitment.", + "Can be sent to a watchtower for enforcement." + ] + }, + "channel_id": { + "added": "v0.10.2", + "type": "hash", + "description": [ + "The channel_id for which the revocation occurred." + ] + }, + "commitnum": { + "added": "v0.10.2", + "type": "u64", + "description": [ + "The commitment number identifying the revoked state." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Plugins should always return \"continue\", otherwise subsequent hook subscribers would not get called." + ] + } + } + } +} diff --git a/doc/schemas/hook/custommsg.json b/doc/schemas/hook/custommsg.json new file mode 100644 index 000000000000..b1077a1515ff --- /dev/null +++ b/doc/schemas/hook/custommsg.json @@ -0,0 +1,64 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "custommsg", + "title": "Hook for handling custom peer messages", + "description": [ + "The **custommsg** hook is the receiving counterpart to the sendcustommsg RPC method and is called whenever a peer sends a custom message that is not handled internally by Core Lightning.", + "", + "The goal of these two components is to allow the implementation of custom protocols or prototypes on top of a Core Lightning node, without having to change the node's implementation itself.", + "", + "Messages are restricted to odd-numbered types and must not conflict with internally handled message types.", + "These limitations are in place in order to avoid conflicts with the internal state tracking, and avoiding disconnections or channel closures, since odd-numbered message can be ignored by nodes (see \"it's ok to be odd\" in BOLT #1 for details).", + "", + "Note that if the hook registration specifies \"filters\" then that should be a JSON array of message numbers, and the hook will only be called for those.", + "Otherwise, the hook is called for all messages not handled internally. (added in v25.12)", + "", + "This is a chained hook and MUST return `{\"result\": \"continue\"}`." + ], + "request": { + "required": [ + "peer_id", + "payload" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer that sent the message." + ] + }, + "payload": { + "type": "hex", + "description": [ + "The raw message payload as a hex string.", + "", + "The first two bytes encode the message type (big-endian), followed by the message payload.", + "The plugin must implement the parsing of the message, including the type prefix, since Core Lightning does not know how to parse the message." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must always be `continue`. Any other value will cause the hook to fail." + ] + } + } + }, + "see_also": [ + "lightning-sendcustommsg(7)" + ] +} diff --git a/doc/schemas/hook/db_write.json b/doc/schemas/hook/db_write.json new file mode 100644 index 000000000000..317faff7984c --- /dev/null +++ b/doc/schemas/hook/db_write.json @@ -0,0 +1,89 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "db_write", + "title": "Hook fired before database writes are committed", + "description": [ + "The **db_write** hook is called whenever a change is about to be committed to the database, if you are using a SQLITE3 database (the default).", + "This hook will be useless (the \"writes\" field will always be empty) if you are using a PostgreSQL database.", + "", + "This hook is extremely restricted:", + "1. A plugin registering for this hook should not perform anything that may cause a database operation in response (pretty much, anything but logging).", + "2. A plugin registering for this hook should not register for other hooks or commands, as these may become intermingled and break rule #1.", + "3. The hook will be called before your plugin is initialized!", + "", + "This hook is strongly synchronous: `lightningd` will halt almost all processing until all plugins have responded.", + "", + "This hook is intended for creating continuous backups. The intent is that your backup plugin maintains three pieces of information (possibly in separate files):", + "1. A snapshot of the database", + "2. A log of database queries that will bring that snapshot up-to-date", + "3. The previous `data_version`", + "", + "`data_version` is an unsigned 32-bit number that will always increment by 1 each time `db_write` is called. Note that this will wrap around on the limit of 32-bit numbers.", + "", + "`writes` is an array of strings, each string being a database query that modifies the database.", + "If the `data_version` above is validated correctly, then you can simply append this to the log of database queries.", + "", + "Your plugin MUST validate the `data_version`. It MUST keep track of the previous `data_version` it got, and:", + "1. If the new `data_version` is exactly one higher than the previous, then this is the ideal case and nothing bad happened and we should save this and continue.", + "2. If the new `data_version` is exactly the same value as the previous, then the previous set of queries was not committed.", + " Your plugin MAY overwrite the previous set of queries with the current set, or it MAY overwrite its entire backup with a new snapshot of the database and the current `writes` array", + " (treating this case as if `data_version` were two or more higher than the previous).", + "3. If the new `data_version` is less than the previous, your plugin MUST halt and catch fire, and have the operator inspect what exactly happened here.", + "4. Otherwise, some queries were lost and your plugin SHOULD recover by creating a new snapshot of the database: copy the database file, back up the given `writes` array, then delete", + " (or atomically rename if in a POSIX filesystem) the previous backups of the database and SQL statements, or you MAY fail the hook to abort `lightningd`.", + "", + "The \"rolling up\" of the database could be done periodically as well if the log of SQL statements has grown large.", + "", + "Any response other than `{\"result\": \"continue\"}` will cause `lightningd` to error without committing to the database! This is the expected way to halt and catch fire.", + "", + "`db_write` is a parallel-chained hook, i.e., multiple plugins can register it, and all of them will be invoked simultaneously without regard for order of registration.", + "The hook is considered handled if all registered plugins return `{\"result\": \"continue\"}`. If any plugin returns anything else, `lightningd` will error without committing to the database." + ], + "request": { + "additionalProperties": false, + "required": [ + "data_version", + "writes" + ], + "properties": { + "data_version": { + "type": "u32", + "description": [ + "A monotonically increasing 32-bit unsigned integer representing the database version.", + "Wraps around at the 32-bit limit." + ] + }, + "writes": { + "type": "array", + "description": [ + "Array of SQL statements that modify the database.", + "If using PostgreSQL, this array will always be empty.", + "Each entry is a SQL query string." + ], + "items": { + "type": "string" + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be \"continue\" for the database commit to proceed.", + "Any other value will abort the commit and cause `lightningd` to error." + ] + } + } + } +} diff --git a/doc/schemas/hook/htlc_accepted.json b/doc/schemas/hook/htlc_accepted.json new file mode 100644 index 000000000000..bbba2a6cc4d2 --- /dev/null +++ b/doc/schemas/hook/htlc_accepted.json @@ -0,0 +1,321 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "htlc_accepted", + "title": "Hook for handling incoming HTLCs", + "description": [ + "The **htlc_accepted** hook is called whenever an incoming HTLC is accepted.", + "", + "The plugin can inspect the HTLC and decide to continue processing, fail it, or resolve it.", + "", + "lightningd will replay the HTLCs for which it doesn't have a final verdict during startup.", + "This means that, if the plugin response wasn't processed before the HTLC was forwarded, failed, or resolved,", + "then the plugin may see the same HTLC again during startup. It is therefore paramount that the plugin is idempotent if it talks to an external system.", + "", + "This is a chained hook: plugins are called in order until one returns a result other than `continue`.", + "After this the event is considered handled and the remaining plugins are skipped." + ], + "request": { + "required": [ + "onion", + "htlc" + ], + "additionalProperties": false, + "properties": { + "peer_id": { + "added": "v25.12", + "type": "pubkey", + "description": [ + "The `node_id` of the peer that offered this HTLC.", + "This field may be absent if the peer is unknown." + ] + }, + "onion": { + "type": "object", + "additionalProperties": false, + "required": [ + "payload", + "next_onion", + "shared_secret" + ], + "properties": { + "payload": { + "type": "hex", + "description": [ + "The raw unparsed onion payload received from the sender." + ] + }, + "type": { + "type": "string", + "enum": [ + "tlv" + ], + "description": [ + "Indicates that the payload is TLV formatted.", + "Only present if the payload was successfully parsed." + ] + }, + "short_channel_id": { + "type": "short_channel_id", + "description": [ + "Determines the channel that the sender is hinting should be used next.", + "Not present if this node is the final destination." + ] + }, + "next_node_id": { + "type": "pubkey", + "description": [ + "The node_id of the next hop.", + "Only present if specified in the onion payload." + ] + }, + "forward_msat": { + "type": "msat", + "description": [ + "The amount to forward to the next hop." + ] + }, + "outgoing_cltv_value": { + "type": "u32", + "description": [ + "Determines what the CLTV value for the HTLC that we forward to the next hop should be." + ] + }, + "total_msat": { + "type": "msat", + "description": [ + "The total payment amount.", + "Only present for final recipients using modern TLV payloads." + ] + }, + "payment_secret": { + "type": "secret", + "description": [ + "The payment secret (which the payer should have obtained from the invoice) provided by the sender.", + "Only present for final recipients." + ] + }, + "payment_metadata": { + "type": "hex", + "description": [ + "Additional metadata provided in the onion payload.", + "Only present if included by the sender." + ] + }, + "next_onion": { + "type": "hex", + "description": [ + "The fully processed onion that we should be sending to the next hop as part of the outgoing HTLC.", + "Processed in this case means that we took the incoming onion, decrypted it, extracted the payload destined for us, and serialised the resulting onion again." + ] + }, + "shared_secret": { + "type": "secret", + "description": [ + "The shared secret used to decrypt the incoming onion.", + "It is shared with the sender that constructed the onion." + ] + } + } + }, + "htlc": { + "type": "object", + "additionalProperties": false, + "required": [ + "short_channel_id", + "id", + "amount_msat", + "cltv_expiry", + "cltv_expiry_relative", + "payment_hash" + ], + "properties": { + "short_channel_id": { + "added": "v0.12.0", + "type": "short_channel_id", + "description": [ + "The channel this HTLC is coming from." + ] + }, + "id": { + "added": "v0.12.0", + "type": "u64", + "description": [ + "The unique HTLC identifier assigned by the channel peer." + ] + }, + "amount_msat": { + "added": "v0.12.0", + "type": "msat", + "description": [ + "The amount received in this HTLC.", + "This amount minus the `forward_msat` amount is the fee that will stay with us." + ] + }, + "cltv_expiry": { + "type": "u32", + "description": [ + "Determines when the HTLC reverts back to the sender.", + "`cltv_expiry` minus `outgoing_cltv_value` should be equal or larger than our `cltv_delta` setting." + ] + }, + "cltv_expiry_relative": { + "type": "u32", + "description": [ + "Hints how much time we still have to claim the HTLC.", + "It is the `cltv_expiry` minus the current blockheight and is passed along mainly to avoid the plugin having to look up the current blockheight." + ] + }, + "payment_hash": { + "type": "hash", + "description": [ + "The payment hash used to identify the payment." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Optional TLV stream attached to the HTLC." + ] + } + } + }, + "forward_to": { + "type": "hash", + "description": [ + "The `channel_id` we intend to forward the HTLC to.", + "Will not be present if the `short_channel_id` was invalid or we were the final destination." + ] + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "fail", + "resolve" + ], + "description": [ + "Determines how the HTLC should be handled.", + "", + "`continue` means that the plugin does not want to do anything special and lightningd should continue processing it normally,", + "i.e., resolve the payment if we're the recipient, or attempt to forward it otherwise. Notice that the usual checks such as sufficient fees and CLTV deltas are still enforced.", + "", + "It can also replace the onion.payload by specifying a payload in the response. Note that this is always a TLV-style payload,", + "so unlike onion.payload there is no length prefix (and it must be at least 4 hex digits long). This will be re-parsed;", + "it's useful for removing onion fields which a plugin doesn't want lightningd to consider.", + "", + "It can also specify forward_to in the response, replacing the destination.", + "This usually only makes sense if it wants to choose an alternate channel to the same next peer, but is useful if the payload is also replaced.", + "", + "Also, it can specify extra_tlvs in the response. This will replace the TLV-stream update_add_htlc_tlvs in the update_add_htlc message for forwarded htlcs.", + "", + "If the node is the final destination, the plugin can also replace the amount of the invoice that belongs to the payment_hash by specifying invoice_msat.", + "", + "", + "`fail` will tell lightningd to fail the HTLC with a given hex-encoded `failure_message` (please refer to BOLT #4 for details: `incorrect_or_unknown_payment_details` is the most common).", + "", + "Instead of `failure_message` the response can contain a hex-encoded `failure_onion` that will be used instead (please refer to the BOLT #4 for details).", + "This can be used, for example, if you're writing a bridge between two Lightning Networks. Note that lightningd will apply the obfuscation step to the value", + "returned here with its own shared secret (and key type `ammag`) before returning it to the previous hop.", + "", + "", + "`resolve` instructs lightningd to claim the HTLC by providing the preimage matching the `payment_hash` presented in the call.", + "Notice that the plugin must ensure that the `payment_key` really matches the `payment_hash` since lightningd will not check and the wrong value could result in the channel being closed." + ] + }, + "payload": { + "type": "hex", + "description": [ + "Replacement TLV payload to use instead of the original onion payload." + ] + }, + "forward_to": { + "type": "hash", + "description": [ + "Overrides the forwarding destination." + ] + }, + "extra_tlvs": { + "added": "v25.09", + "type": "hex", + "description": [ + "Replacement TLV stream for forwarded HTLCs." + ] + }, + "invoice_msat": { + "added": "v25.12", + "type": "msat", + "description": [ + "Overrides the invoice amount for final destination checks." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Failure message to return if result is `fail`." + ] + }, + "failure_onion": { + "type": "hex", + "description": [ + "Serialized failure onion to return if result is `fail`." + ] + }, + "payment_key": { + "type": "secret", + "description": [ + "Preimage used to resolve the HTLC if result is `resolve`." + ] + } + }, + "if": { + "properties": { + "result": { + "enum": [ + "fail" + ] + } + } + }, + "then": { + "anyOf": [ + { + "required": [ + "failure_message" + ] + }, + { + "required": [ + "failure_onion" + ] + } + ] + }, + "else": { + "if": { + "properties": { + "result": { + "enum": [ + "resolve" + ] + } + } + }, + "then": { + "required": [ + "payment_key" + ] + } + } + } +} diff --git a/doc/schemas/hook/invoice_payment.json b/doc/schemas/hook/invoice_payment.json new file mode 100644 index 000000000000..3b9657cebab7 --- /dev/null +++ b/doc/schemas/hook/invoice_payment.json @@ -0,0 +1,114 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "invoice_payment", + "title": "Hook fired when a payment for an invoice is received", + "description": [ + "The **invoice_payment** hook is called whenever a valid payment for an unpaid invoice has arrived.", + "", + "The hook is deliberately sparse. Plugins can use `listinvoices` to retrieve additional information.", + "", + "The plugin can:", + "- accept the payment by returning {\"result\": \"continue\"}", + "- reject the payment with a generic error using {\"result\": \"reject\"}", + "- reject the payment with a custom BOLT 4 failure message using the `failure_message` field", + "", + "If `failure_message` is provided, the payment will be failed with that message.", + "If result is \"reject\" and no `failure_message` is provided, the payment fails with `incorrect_or_unknown_payment_details`.", + "`failure_message` must NOT be provided when result is \"continue\".", + "", + "Before version 23.11 the msat field was encoded as a string with an 'msat' suffix." + ], + "request": { + "additionalProperties": false, + "required": [ + "payment" + ], + "properties": { + "payment": { + "type": "object", + "additionalProperties": true, + "required": [ + "label", + "preimage", + "msat" + ], + "properties": { + "label": { + "type": "string", + "description": [ + "Unique label identifying the invoice." + ] + }, + "preimage": { + "type": "secret", + "description": [ + "The payment preimage." + ] + }, + "msat": { + "type": "msat", + "description": [ + "Amount paid in millisatoshis." + ] + } + }, + "description": [ + "Basic payment information.", + "Additional TLV-derived fields may be included when running in developer mode." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Controls whether the payment is accepted or rejected.", + "\"continue\" accepts the payment.", + "\"reject\" fails the payment." + ] + }, + "failure_message": { + "type": "hex", + "description": [ + "Optional BOLT 4 failure message.", + "Used to provide a specific failure reason when rejecting the payment." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "failure_message": { + "type": "hex" + } + } + } + }, + "see_also": [ + "lightning-listinvoices(7)" + ] +} diff --git a/doc/schemas/hook/onion_message_recv.json b/doc/schemas/hook/onion_message_recv.json new file mode 100644 index 000000000000..c5c837370405 --- /dev/null +++ b/doc/schemas/hook/onion_message_recv.json @@ -0,0 +1,171 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv", + "title": "Hook for receiving unsolicited onion messages", + "description": [ + "The **onion_message_recv** hook is used for unsolicited onion messages (where the source knows that it is sending to this node).", + "", + "Replies MUST be ignored unless they use the correct path (see onion_message_recv_secret).", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "additionalProperties": false, + "properties": { + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } +} diff --git a/doc/schemas/hook/onion_message_recv_secret.json b/doc/schemas/hook/onion_message_recv_secret.json new file mode 100644 index 000000000000..55ada6b4a45b --- /dev/null +++ b/doc/schemas/hook/onion_message_recv_secret.json @@ -0,0 +1,185 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "onion_message_recv_secret", + "title": "Hook for receiving onion messages via blinded paths", + "description": [ + "The **onion_message_recv_secret** hook is used when an onion message is received via a blinded path previously provided by this node.", + "", + "The presence of `pathsecret` allows the plugin to authenticate that the message used the intended return path.", + "", + "Replies MUST only be sent when the `pathsecret` matches expectations.", + "", + "Returning anything other than {\"result\": \"continue\"} prevents further hook processing." + ], + "request": { + "required": [ + "onion_message" + ], + "additionalProperties": false, + "properties": { + "onion_message": { + "type": "object", + "required": [ + "pathsecret" + ], + "additionalProperties": false, + "properties": { + "pathsecret": { + "type": "secret", + "description": [ + "Shared secret identifying the blinded path.", + "", + "Used to verify that the sender used a path previously provided by this node.", + "This prevents probing attacks and unauthorized replies." + ] + }, + "reply_blindedpath": { + "type": "object", + "description": [ + "A blinded return path provided by the sender.", + "", + "This allows replying without revealing the recipient's identity or network position.", + "If present, plugins must use this path if they construct a reply onion message." + ], + "additionalProperties": false, + "properties": { + "first_node_id": { + "type": "pubkey", + "description": [ + "The introduction node of the blinded path.", + "This is the first hop to which the reply should be sent.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid": { + "type": "short_channel_id", + "description": [ + "Alternative to `first_node_id`: identifies the introduction point via a channel.", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_scid_dir": { + "type": "u32", + "description": [ + "Direction of the `short_channel_id` (0 or 1).", + "", + "Only one of `first_node_id` or the pair `first_scid` and `first_scid_dir` is present." + ] + }, + "first_path_key": { + "added": "v24.11", + "type": "pubkey", + "description": [ + "Initial public key used to derive shared secrets with the first hop.", + "", + "This key allows each hop to derive per-hop encryption keys and blinding factors." + ] + }, + "hops": { + "type": "array", + "description": [ + "Sequence of blinded hops forming the path.", + "", + "Each hop contains a blinded node identifier and encrypted routing instructions." + ], + "items": { + "type": "object", + "required": [ + "blinded_node_id", + "encrypted_recipient_data" + ], + "additionalProperties": false, + "properties": { + "blinded_node_id": { + "type": "pubkey", + "description": [ + "Blinded public key representing the hop.", + "", + "The actual node identity is hidden using a blinding factor." + ] + }, + "encrypted_recipient_data": { + "type": "hex", + "description": [ + "Encrypted TLV payload for this hop.", + "", + "Contains instructions (e.g., next hop) encrypted with a shared secret derived from the path key." + ] + } + } + } + } + } + }, + "invoice_request": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_request` payload." + ] + }, + "invoice": { + "type": "hex", + "description": [ + "BOLT #12 `invoice` payload." + ] + }, + "invoice_error": { + "type": "hex", + "description": [ + "BOLT #12 `invoice_error` payload." + ] + }, + "unknown_fields": { + "type": "array", + "description": [ + "Unknown or unparsed TLV fields from the onion message.", + "", + "Plugins may inspect these for experimental or custom extensions." + ], + "items": { + "type": "object", + "required": [ + "number", + "value" + ], + "additionalProperties": false, + "properties": { + "number": { + "type": "u64", + "description": [ + "TLV type number." + ] + }, + "value": { + "type": "hex", + "description": [ + "Raw TLV value." + ] + } + } + } + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "description": [ + "Return \"continue\" to pass the message to the next plugin.", + "Returning any other value stops further hook processing." + ] + } + } + } +} diff --git a/doc/schemas/hook/openchannel.json b/doc/schemas/hook/openchannel.json new file mode 100644 index 000000000000..260614de7a99 --- /dev/null +++ b/doc/schemas/hook/openchannel.json @@ -0,0 +1,226 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel", + "title": "Hook fired when a peer proposes opening a channel using v1 protocol", + "description": [ + "The **openchannel** hook is called whenever a remote peer tries to fund a channel using the v1 protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and may include additional fields defined by the protocol.", + "", + "Plugins can reject the channel or modify certain parameters before accepting it.", + "", + "This is a chained hook: the first plugin returning a non-\"continue\" result terminates the chain.", + "Mutation fields (`close_to`, `mindepth`, `reserve`) are only applied from the first plugin that sets them.", + "Additional fields may be present in the request as defined by BOLT #2.", + "Providing invalid values (e.g., invalid `close_to` address) will cause lightningd to exit." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel" + ], + "properties": { + "openchannel": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "funding_msat", + "push_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "channel_reserve_msat", + "htlc_minimum_msat", + "feerate_per_kw", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The peer's node_id." + ] + }, + "funding_msat": { + "type": "msat", + "description": [ + "Funding amount proposed by the peer." + ] + }, + "push_msat": { + "type": "msat", + "description": [ + "Amount pushed to us at channel open." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Dust limit for outputs." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum HTLC value allowed in flight." + ] + }, + "channel_reserve_msat": { + "type": "msat", + "description": [ + "Channel reserve required by the peer." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value." + ] + }, + "feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate in satoshi per kw." + ] + }, + "to_self_delay": { + "type": "u32", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u32", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey proposed by the peer." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the channel opening request." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer when rejecting." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address for mutual close output.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "mindepth": { + "added": "v0.12.0", + "type": "u32", + "description": [ + "`mindepth` is the number of confirmations to require before making the channel usable.", + "Notice that setting this to 0 (zeroconf) or some other low value might expose you to double-spending issues,", + "so only lower this value from the default if you trust the peer not to double-spend, or you reject incoming payments,", + "including forwards, until the funding is confirmed." + ] + }, + "reserve": { + "added": "v22.11", + "type": "sat", + "description": [ + "`reserve` is an absolute value for the amount (in satoshi) in the channel that the peer must keep on their side.", + "This ensures that they always have something to lose, so only lower this below the 1% of funding amount if you trust the peer.", + "The protocol requires this to be larger than the dust limit, hence it will be adjusted to be the dust limit if the specified value is below." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } +} diff --git a/doc/schemas/hook/openchannel2.json b/doc/schemas/hook/openchannel2.json new file mode 100644 index 000000000000..4b551102bc56 --- /dev/null +++ b/doc/schemas/hook/openchannel2.json @@ -0,0 +1,289 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2", + "title": "Hook fired when a peer proposes opening a channel using v2 protocol", + "description": [ + "The **openchannel2** hook is called whenever a remote peer tries to fund a channel using the v2 (dual-funding) protocol, after passing basic sanity checks.", + "", + "The payload mirrors the BOLT #2 `open_channel` message and dual-funding extensions.", + "There may be additional fields present depending on negotiated features.", + "", + "`requested_lease_msat`, `lease_blockheight_start`, and `node_blockheight` are only present if the peer requested a funding lease (`option_will_fund`).", + "", + "The plugin can reject the channel, accept it, or contribute funds via a PSBT when accepting.", + "", + "See `plugins/funder.c` for an example of how to use this hook to contribute funds to a channel open.", + "", + "This is a chained hook: multiple plugins may be invoked.", + "Returning any result other than \"continue\" terminates the chain.", + "Only the first plugin that sets mutation fields (e.g. `close_to`) will have them applied.", + "Invalid `close_to` addresses will cause lightningd to exit.", + "The PSBT must be consistent with the funding transaction and respect feerate constraints." + ], + "request": { + "additionalProperties": false, + "required": [ + "openchannel2" + ], + "properties": { + "openchannel2": { + "type": "object", + "additionalProperties": true, + "required": [ + "id", + "channel_id", + "their_funding_msat", + "dust_limit_msat", + "max_htlc_value_in_flight_msat", + "htlc_minimum_msat", + "funding_feerate_per_kw", + "commitment_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "to_self_delay", + "max_accepted_htlcs", + "channel_flags", + "locktime", + "channel_max_msat", + "require_confirmed_inputs", + "channel_type" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the channel." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "Temporary `channel_id` assigned for this channel negotiation." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "Amount contributed by the remote peer to the channel funding transaction." + ] + }, + "dust_limit_msat": { + "type": "msat", + "description": [ + "Minimum output value below which outputs are considered dust." + ] + }, + "max_htlc_value_in_flight_msat": { + "type": "msat", + "description": [ + "Maximum total value of outstanding HTLCs allowed in the channel at any time." + ] + }, + "htlc_minimum_msat": { + "type": "msat", + "description": [ + "Minimum HTLC value the peer will accept." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for the funding transaction." + ] + }, + "commitment_feerate_per_kw": { + "type": "u32", + "description": [ + "Feerate (per kw) used for commitment transactions." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "Maximum feerate we are willing to accept for commitment transactions." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "Minimum feerate we are willing to accept for commitment transactions." + ] + }, + "to_self_delay": { + "type": "u16", + "description": [ + "The number of blocks before they can take their funds if they unilateral close." + ] + }, + "max_accepted_htlcs": { + "type": "u16", + "description": [ + "Maximum number of HTLC's the remote is allowed to offer at once." + ] + }, + "channel_flags": { + "type": "u8", + "description": [ + "Channel flags as defined in BOLT #7." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "Locktime to be used in the funding transaction." + ] + }, + "shutdown_scriptpubkey": { + "type": "hex", + "description": [ + "Optional shutdown scriptPubKey provided by the peer for cooperative close." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "Maximum capacity this channel is allowed to reach." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "Amount of liquidity the peer is requesting us to lease to them.", + "Only present if `option_will_fund` is negotiated." + ] + }, + "lease_blockheight_start": { + "type": "u32", + "description": [ + "Blockheight at which the lease period begins.", + "Only present if `requested_lease_msat` is present." + ] + }, + "node_blockheight": { + "type": "u32", + "description": [ + "Current blockheight of the node.", + "Used in conjunction with lease parameters.", + "Only present if `requested_lease_msat` is present." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the peer requires all funding inputs to be confirmed." + ] + }, + "channel_type": { + "added": "v25.09", + "type": "object", + "additionalProperties": false, + "required": [ + "bits", + "names" + ], + "properties": { + "bits": { + "type": "array", + "description": [ + "List of feature bit numbers that define the negotiated channel type.", + "Each value represents a feature bit as defined in BOLT #2." + ], + "items": { + "type": "u32", + "description": [ + "Feature bit number." + ] + } + }, + "names": { + "type": "array", + "description": [ + "Human-readable names corresponding to each feature bit.", + "Names are implementation-defined and may evolve over time." + ], + "items": { + "type": "string", + "description": [ + "Name of the feature bit." + ] + } + } + } + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Indicates whether to accept or reject the channel proposal.", + "Returning \"continue\" allows the channel negotiation to proceed.", + "Returning \"reject\" aborts the channel opening." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Error message sent to the peer when rejecting the channel.", + "Only valid if result is \"reject\"." + ] + }, + "close_to": { + "type": "string", + "description": [ + "Bitcoin address to which funds will be sent on cooperative close.", + "Must be valid for the current chain or lightningd will exit with an error." + ] + }, + "psbt": { + "type": "string", + "description": [ + "Partially Signed Bitcoin Transaction contributing inputs and outputs for the funding transaction.", + "Used when the plugin contributes funds to the channel." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "Amount we contribute to the channel funding.", + "This amount must NOT be included in any outputs in the provided PSBT.", + "Change outputs must be included separately." + ] + } + }, + "if": { + "properties": { + "result": { + "type": "string", + "enum": [ + "reject" + ] + } + }, + "required": [ + "result" + ] + }, + "then": { + "properties": { + "error_message": { + "type": "string" + } + } + } + } +} diff --git a/doc/schemas/hook/openchannel2_changed.json b/doc/schemas/hook/openchannel2_changed.json new file mode 100644 index 000000000000..0d19729ca046 --- /dev/null +++ b/doc/schemas/hook/openchannel2_changed.json @@ -0,0 +1,100 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_changed", + "title": "Hook for handling updates to the dual-funding PSBT", + "description": [ + "The **openchannel2_changed** hook is called when the peer sends an updated PSBT during dual-funding channel negotiation.", + "", + "This allows plugins to inspect and modify the PSBT before it is sent back to the peer.", + "", + "The negotiation continues until neither side makes further changes to the PSBT, at which point commitment transactions are exchanged.", + "", + "See `plugins/funder.c` for an example of how to use this hook to continue a v2 channel open." + ], + "request": { + "required": [ + "openchannel2_changed" + ], + "additionalProperties": false, + "properties": { + "openchannel2_changed": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt", + "require_confirmed_inputs" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary channel_id identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The current Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "This PSBT includes contributions from both peers and may be modified." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with the channel opening negotiation." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The updated PSBT to send back to the peer.", + "If no modifications are made, this should be identical to the input PSBT." + ] + } + } + }, + "examples": [ + { + "request": { + "id": "example:openchannel2_changed#1", + "method": "openchannel2_changed", + "params": { + "openchannel2_changed": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr...", + "require_confirmed_inputs": true + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] +} diff --git a/doc/schemas/hook/openchannel2_sign.json b/doc/schemas/hook/openchannel2_sign.json new file mode 100644 index 000000000000..3917e8b38335 --- /dev/null +++ b/doc/schemas/hook/openchannel2_sign.json @@ -0,0 +1,103 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "openchannel2_sign", + "title": "Hook for signing the dual-funding PSBT", + "description": [ + "The **openchannel2_sign** hook is called after commitment transactions have been received during dual-funding channel establishment.", + "", + "The plugin is expected to sign any inputs it owns in the provided PSBT and return the updated PSBT.", + "", + "If no inputs need to be signed, the original PSBT should be returned unchanged.", + "", + "Once both sides have provided signatures, the funding transaction will be broadcast.", + "", + "See `plugins/funder.c` for an example of how to use this hook to sign a funding transaction." + ], + "request": { + "required": [ + "openchannel2_sign" + ], + "additionalProperties": false, + "properties": { + "openchannel2_sign": { + "type": "object", + "additionalProperties": false, + "required": [ + "channel_id", + "psbt" + ], + "properties": { + "channel_id": { + "type": "hash", + "description": [ + "The temporary `channel_id` identifying the channel being negotiated." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The Partially Signed Bitcoin Transaction (PSBT) representing the funding transaction.", + "The plugin should add signatures for any inputs it controls." + ] + } + } + } + } + }, + "response": { + "required": [ + "result", + "psbt" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Must be set to `continue` to proceed with channel opening." + ] + }, + "psbt": { + "type": "string", + "description": [ + "The PSBT including any added signatures.", + "If no inputs were signed, this should be identical to the input PSBT." + ] + } + } + }, + "example_notifications": [ + { + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + } + ], + "examples": [ + { + "request": { + "id": "example:openchannel2_sign#1", + "method": "openchannel2_sign", + "params": { + "openchannel2_sign": { + "channel_id": "252d1b0a1e57895e841...", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + }, + "response": { + "result": "continue", + "psbt": "cHNidP8BADMCAAAAAQ+yBipSVZr..." + } + } + ] +} diff --git a/doc/schemas/hook/peer_connected.json b/doc/schemas/hook/peer_connected.json new file mode 100644 index 000000000000..78fd60c74611 --- /dev/null +++ b/doc/schemas/hook/peer_connected.json @@ -0,0 +1,96 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "peer_connected", + "title": "Hook fired when a peer connects and completes handshake", + "description": [ + "The **peer_connected** hook is called whenever a peer has connected and successfully completed the cryptographic handshake.", + "", + "This is a chained hook: the first plugin returning \"disconnect\" stops further processing.", + "Plugins can call `listpeers` to retrieve additional information about the peer." + ], + "request": { + "additionalProperties": false, + "required": [ + "peer" + ], + "properties": { + "peer": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "direction", + "addr", + "features" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The node_id of the connected peer." + ] + }, + "direction": { + "type": "string", + "enum": [ + "in", + "out" + ], + "description": [ + "Connection direction: `in` for incoming, `out` for outgoing." + ] + }, + "addr": { + "type": "string", + "description": [ + "The `addr` field shows the address that we are connected to ourselves, not the gossiped list of known addresses.", + "In particular this means that the port for incoming connections is an ephemeral port, that may not be available for reconnections." + ] + }, + "remote_addr": { + "type": "string", + "description": [ + "Our own address as reported by the remote peer. Helps with detecting our own IPv4 changes behind NAT." + ] + }, + "features": { + "type": "hex", + "description": [ + "Feature bits advertised by the peer, encoded as hex." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "disconnect" + ], + "description": [ + "Whether to allow the connection to proceed or disconnect the peer." + ] + }, + "error_message": { + "type": "string", + "description": [ + "Optional error message sent to the peer before disconnection.", + "Only used if result is \"disconnect\"." + ] + } + } + }, + "see_also": [ + "lightning-listpeers(7)" + ] +} diff --git a/doc/schemas/hook/rbf_channel.json b/doc/schemas/hook/rbf_channel.json new file mode 100644 index 000000000000..6aed12464dd2 --- /dev/null +++ b/doc/schemas/hook/rbf_channel.json @@ -0,0 +1,157 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rbf_channel", + "title": "Hook for handling RBF channel funding requests", + "description": [ + "The **rbf_channel** hook is called when a peer proposes replacing the funding transaction of an existing channel using Replace-By-Fee (RBF).", + "", + "The plugin can choose to reject or continue the negotiation.", + "", + "If continuing, the plugin may contribute additional inputs and outputs by returning a PSBT and specifying an `our_funding_msat` amount.", + "", + "The `our_funding_msat` value must not be included in any output in the PSBT. Change outputs should be included and calculated using the provided `funding_feerate_per_kw`." + ], + "request": { + "required": [ + "rbf_channel" + ], + "additionalProperties": false, + "properties": { + "rbf_channel": { + "type": "object", + "additionalProperties": false, + "required": [ + "id", + "channel_id", + "their_last_funding_msat", + "their_funding_msat", + "our_last_funding_msat", + "funding_feerate_per_kw", + "feerate_our_max", + "feerate_our_min", + "channel_max_msat", + "locktime", + "require_confirmed_inputs" + ], + "properties": { + "id": { + "type": "pubkey", + "description": [ + "The `node_id` of the peer proposing the RBF." + ] + }, + "channel_id": { + "type": "hash", + "description": [ + "The `channel_id` of the channel being modified." + ] + }, + "their_last_funding_msat": { + "type": "msat", + "description": [ + "The peer's previous contribution to the funding transaction." + ] + }, + "their_funding_msat": { + "type": "msat", + "description": [ + "The peer's proposed new funding contribution." + ] + }, + "our_last_funding_msat": { + "type": "msat", + "description": [ + "Our previous contribution to the funding transaction." + ] + }, + "funding_feerate_per_kw": { + "type": "u32", + "description": [ + "The feerate to use for the updated funding transaction, in satoshis per kw." + ] + }, + "feerate_our_max": { + "type": "u32", + "description": [ + "The maximum feerate we are willing to accept for the funding transaction." + ] + }, + "feerate_our_min": { + "type": "u32", + "description": [ + "The minimum feerate we are willing to accept for the funding transaction." + ] + }, + "channel_max_msat": { + "type": "msat", + "description": [ + "The maximum total channel capacity allowed for this channel." + ] + }, + "locktime": { + "type": "u32", + "description": [ + "The locktime to use for the funding transaction." + ] + }, + "requested_lease_msat": { + "type": "msat", + "description": [ + "If present, the amount of liquidity the peer is requesting us to lease.", + "This field is optional and only included if the peer requested a lease." + ] + }, + "require_confirmed_inputs": { + "added": "v23.02", + "type": "boolean", + "description": [ + "Indicates whether the remote peer requires all inputs in the PSBT to be confirmed.", + "If true, the plugin must avoid adding unconfirmed inputs." + ] + } + } + } + } + }, + "response": { + "required": [ + "result" + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue", + "reject" + ], + "description": [ + "Whether to accept or reject the RBF proposal." + ] + }, + "psbt": { + "type": "string", + "description": [ + "A PSBT containing additional inputs and outputs to contribute to the funding transaction.", + "Only valid if `result` is `continue`." + ] + }, + "our_funding_msat": { + "type": "msat", + "description": [ + "The amount we are contributing to the new funding transaction.", + "Must not be included in any output in the PSBT." + ] + }, + "error_message": { + "type": "string", + "description": [ + "An error message explaining the rejection.", + "Only used if `result` is `reject` and will be sent to the peer." + ] + } + } + } +} diff --git a/doc/schemas/hook/recover.json b/doc/schemas/hook/recover.json new file mode 100644 index 000000000000..f9f246c7bb0f --- /dev/null +++ b/doc/schemas/hook/recover.json @@ -0,0 +1,47 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "v23.08", + "type": "object", + "notification": "recover", + "title": "Hook fired when node starts in recovery mode", + "description": [ + "The **recover** hook is called whenever the node is started using the --recovery flag.", + "It provides the codex32 secret used to derive the HSM secret.", + "Plugins can use this to reconnect to peers who keep your peer storage backups with them and recover state or funds.", + "", + "This hook is informational and does not allow altering execution flow.", + "Plugins are expected to perform recovery-related side effects such as reconnecting to peers." + ], + "request": { + "additionalProperties": false, + "required": [ + "codex32" + ], + "properties": { + "codex32": { + "type": "string", + "description": [ + "The codex32-encoded secret provided via --recover.", + "Used to reconstruct the node's HSM secret." + ] + } + } + }, + "response": { + "additionalProperties": false, + "required": [ + "result" + ], + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Returning \"continue\" resumes normal execution." + ] + } + } + } +} diff --git a/doc/schemas/hook/rpc_command.json b/doc/schemas/hook/rpc_command.json new file mode 100644 index 000000000000..4bbf8ec3dffc --- /dev/null +++ b/doc/schemas/hook/rpc_command.json @@ -0,0 +1,226 @@ +{ + "$schema": "../rpc-schema-draft.json", + "added": "pre-v0.10.1", + "type": "object", + "notification": "rpc_command", + "title": "Hook for intercepting and modifying RPC commands", + "description": [ + "The **rpc_command** hook allows a plugin to take over any RPC command.", + "", + "You can optionally specify a `filters` array, containing the command names you want to intercept: without this, all commands will be sent to this hook. (added in v25.12)", + "", + "The plugin receives the full JSON-RPC request and may choose to continue, replace the request, or return a custom result or error.", + "", + "This is a chained hook: only the first plugin that modifies the request or response will take effect. Other plugins will then be ignored and a warning will be logged." + ], + "request": { + "required": [ + "rpc_command" + ], + "additionalProperties": false, + "properties": { + "rpc_command": { + "type": "object", + "description": [ + "The original JSON-RPC request object." + ], + "additionalProperties": true, + "required": [ + "id", + "method", + "params" + ], + "properties": { + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + } + } + }, + "response": { + "additionalProperties": false, + "properties": { + "result": { + "type": "string", + "enum": [ + "continue" + ], + "description": [ + "Indicates that lightningd should continue processing the RPC command normally." + ] + }, + "replace": { + "type": "object", + "description": [ + "Replaces the original JSON-RPC request with a new one." + ], + "additionalProperties": true, + "required": [ + "jsonrpc", + "id", + "method", + "params" + ], + "properties": { + "jsonrpc": { + "type": "string", + "enum": [ + "2.0" + ], + "description": [ + "The JSON-RPC version." + ] + }, + "id": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "description": [ + "The JSON-RPC request id." + ] + }, + "method": { + "type": "string", + "description": [ + "The RPC method name." + ] + }, + "params": { + "oneOf": [ + { + "type": "object", + "additionalProperties": true + }, + { + "type": "array", + "items": {} + } + ], + "description": [ + "The parameters passed to the RPC method." + ] + } + } + }, + "return": { + "type": "object", + "description": [ + "Returns a custom JSON-RPC response to the caller." + ], + "additionalProperties": false, + "properties": { + "result": { + "type": "object", + "description": [ + "Custom result object to return to the caller." + ] + }, + "error": { + "type": "object", + "description": [ + "Custom error object to return to the caller." + ], + "additionalProperties": true, + "properties": { + "code": { + "type": "integer", + "description": [ + "JSON-RPC error code." + ] + }, + "message": { + "type": "string", + "description": [ + "Human-readable error message." + ] + } + }, + "required": [ + "code", + "message" + ] + } + } + } + }, + "oneOf": [ + { + "required": [ + "result" + ] + }, + { + "required": [ + "replace" + ] + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "result" + ] + } + } + }, + { + "required": [ + "return" + ], + "properties": { + "return": { + "required": [ + "error" + ] + } + } + } + ] + } +} diff --git a/plugins/src/lib.rs b/plugins/src/lib.rs index e9a3a10d7888..b08da4b510cb 100644 --- a/plugins/src/lib.rs +++ b/plugins/src/lib.rs @@ -2,6 +2,7 @@ use crate::codec::{JsonCodec, JsonRpcCodec}; pub use anyhow::anyhow; use anyhow::{Context, Result}; use futures::sink::SinkExt; +use serde::de::DeserializeOwned; use serde::Serialize; use tokio::io::{AsyncReadExt, AsyncWriteExt}; extern crate log; @@ -195,7 +196,9 @@ where self } - /// Add a subscription to a given `hookname` + /// Add a hook subscription for `hookname` with a raw [`serde_json::Value`] request and response. + /// Prefer [`Builder::hook_typed`] for type-safe hooks, or [`Builder::hook_from_builder`] if you + /// need to configure `before`, `after`, or `filters`. pub fn hook(mut self, hookname: &str, callback: C) -> Self where C: Send + Sync + 'static, @@ -215,11 +218,60 @@ where self } + /// Add a hook subscription using a [`HookBuilder`], which allows configuring `before`, `after`, + /// and `filters` in addition to the callback. Use [`HookBuilder::new`] for raw + /// [`serde_json::Value`] hooks or [`HookBuilder::new_typed`] for type-safe hooks. pub fn hook_from_builder(mut self, hook: HookBuilder) -> Builder { self.hooks.insert(hook.name.clone(), hook.build()); self } + /// Add a hook subscription for `hookname` with typed request and response. The request is + /// deserialized from JSON into `Req` and the response is serialized from `Resp` back to JSON + /// automatically. If deserialization of the request fails, the hook returns an error to CLN. + /// Use [`Builder::hook_from_builder`] with [`HookBuilder::new_typed`] if you additionally need + /// to configure `before`, `after`, or `filters`. + pub fn hook_typed(mut self, hookname: &str, callback: C) -> Self + where + C: Send + Sync + 'static, + C: Fn(Plugin, Req) -> F + 'static, + F: Future> + Send + 'static, + Req: DeserializeOwned + Send + 'static, + Resp: Serialize + Send + 'static, + { + let hookname = hookname.to_string(); + self.hooks.insert( + hookname.clone(), + Hook { + name: hookname.clone(), + callback: Box::new(move |p, r| { + let typed_req = serde_json::from_value(r).unwrap_or_else(|e| { + let error = format!( + "cln-plugin: hook '{hookname}' received a request that doesn't match \ + the expected schema. Error: {e}" + ); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message":error}}) + ); + std::process::exit(1); + }); + let fut = callback(p, typed_req); + Box::pin(async move { + let typed_resp = fut.await?; + serde_json::to_value(typed_resp).map_err(Error::from) + }) + }), + before: Vec::new(), + after: Vec::new(), + filters: None, + }, + ); + self + } + /// Register a custom RPC method for the RPC passthrough from the /// main daemon pub fn rpcmethod(mut self, name: &str, description: &str, callback: C) -> Builder @@ -498,6 +550,43 @@ where } } + pub fn new_typed(name: &str, callback: C) -> Self + where + C: Send + Sync + 'static, + C: Fn(Plugin, Req) -> F + 'static, + F: Future> + Send + 'static, + Req: DeserializeOwned + Send + 'static, + Resp: Serialize + Send + 'static, + { + let hookname = name.to_string(); + Self { + name: hookname.clone(), + callback: Box::new(move |p, r| { + let typed_req = serde_json::from_value(r).unwrap_or_else(|e| { + let error = format!( + "cln-plugin: hook '{hookname}' received a request that doesn't match \ + the expected schema. Error: {e}" + ); + println!( + "{}", + serde_json::json!({"jsonrpc": "2.0", + "method": "log", + "params": {"level":"warn", "message":error}}) + ); + std::process::exit(1); + }); + let fut = callback(p, typed_req); + Box::pin(async move { + let typed_resp = fut.await?; + serde_json::to_value(typed_resp).map_err(Error::from) + }) + }), + before: Vec::new(), + after: Vec::new(), + filters: None, + } + } + pub fn before(mut self, before: Vec) -> Self { self.before = before; self