2016-12-05 14:59:36 +03:00
|
|
|
package channeldb
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"fmt"
|
2020-01-17 09:31:07 +03:00
|
|
|
"math"
|
2016-12-21 12:19:01 +03:00
|
|
|
"reflect"
|
|
|
|
"testing"
|
|
|
|
"time"
|
2016-12-31 03:32:20 +03:00
|
|
|
|
2019-05-08 16:00:19 +03:00
|
|
|
"github.com/btcsuite/btcd/btcec"
|
2020-06-10 13:34:27 +03:00
|
|
|
"github.com/btcsuite/btcwallet/walletdb"
|
2016-12-31 03:32:20 +03:00
|
|
|
"github.com/davecgh/go-spew/spew"
|
2020-01-17 09:31:07 +03:00
|
|
|
"github.com/lightningnetwork/lnd/channeldb/kvdb"
|
2019-05-23 21:05:27 +03:00
|
|
|
"github.com/lightningnetwork/lnd/lntypes"
|
2019-11-05 01:11:28 +03:00
|
|
|
"github.com/lightningnetwork/lnd/record"
|
2019-05-08 16:00:19 +03:00
|
|
|
"github.com/lightningnetwork/lnd/routing/route"
|
2020-06-10 13:34:27 +03:00
|
|
|
"github.com/stretchr/testify/require"
|
2019-05-08 16:00:19 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
priv, _ = btcec.NewPrivateKey(btcec.S256())
|
|
|
|
pub = priv.PubKey()
|
|
|
|
|
2020-04-09 20:06:39 +03:00
|
|
|
testHop1 = &route.Hop{
|
2019-05-08 16:00:19 +03:00
|
|
|
PubKeyBytes: route.NewVertex(pub),
|
|
|
|
ChannelID: 12345,
|
|
|
|
OutgoingTimeLock: 111,
|
|
|
|
AmtToForward: 555,
|
2019-12-11 12:52:27 +03:00
|
|
|
CustomRecords: record.CustomSet{
|
2019-12-12 16:47:46 +03:00
|
|
|
65536: []byte{},
|
|
|
|
80001: []byte{},
|
2019-07-31 07:44:50 +03:00
|
|
|
},
|
2019-11-05 01:11:28 +03:00
|
|
|
MPP: record.NewMPP(32, [32]byte{0x42}),
|
2019-07-31 07:44:50 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
testHop2 = &route.Hop{
|
|
|
|
PubKeyBytes: route.NewVertex(pub),
|
|
|
|
ChannelID: 12345,
|
|
|
|
OutgoingTimeLock: 111,
|
|
|
|
AmtToForward: 555,
|
|
|
|
LegacyPayload: true,
|
2019-05-08 16:00:19 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
testRoute = route.Route{
|
|
|
|
TotalTimeLock: 123,
|
|
|
|
TotalAmount: 1234567,
|
|
|
|
SourcePubKey: route.NewVertex(pub),
|
|
|
|
Hops: []*route.Hop{
|
2019-07-31 07:44:50 +03:00
|
|
|
testHop2,
|
2019-11-05 01:11:28 +03:00
|
|
|
testHop1,
|
2019-05-08 16:00:19 +03:00
|
|
|
},
|
|
|
|
}
|
2016-12-05 14:59:36 +03:00
|
|
|
)
|
|
|
|
|
2020-02-07 12:31:27 +03:00
|
|
|
func makeFakeInfo() (*PaymentCreationInfo, *HTLCAttemptInfo) {
|
2019-05-23 21:05:27 +03:00
|
|
|
var preimg lntypes.Preimage
|
|
|
|
copy(preimg[:], rev[:])
|
|
|
|
|
|
|
|
c := &PaymentCreationInfo{
|
|
|
|
PaymentHash: preimg.Hash(),
|
|
|
|
Value: 1000,
|
|
|
|
// Use single second precision to avoid false positive test
|
|
|
|
// failures due to the monotonic time component.
|
2020-02-19 11:53:13 +03:00
|
|
|
CreationTime: time.Unix(time.Now().Unix(), 0),
|
2019-05-23 21:05:27 +03:00
|
|
|
PaymentRequest: []byte(""),
|
|
|
|
}
|
|
|
|
|
2020-02-07 12:31:27 +03:00
|
|
|
a := &HTLCAttemptInfo{
|
2020-02-20 20:08:01 +03:00
|
|
|
AttemptID: 44,
|
|
|
|
SessionKey: priv,
|
|
|
|
Route: testRoute,
|
|
|
|
AttemptTime: time.Unix(100, 0),
|
2019-05-23 21:05:27 +03:00
|
|
|
}
|
|
|
|
return c, a
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestSentPaymentSerialization(t *testing.T) {
|
2017-06-17 01:59:20 +03:00
|
|
|
t.Parallel()
|
|
|
|
|
2019-05-23 21:05:27 +03:00
|
|
|
c, s := makeFakeInfo()
|
2016-12-31 03:32:20 +03:00
|
|
|
|
|
|
|
var b bytes.Buffer
|
2019-05-23 21:05:27 +03:00
|
|
|
if err := serializePaymentCreationInfo(&b, c); err != nil {
|
|
|
|
t.Fatalf("unable to serialize creation info: %v", err)
|
2016-12-05 14:59:36 +03:00
|
|
|
}
|
2016-12-21 12:19:01 +03:00
|
|
|
|
2019-05-23 21:05:27 +03:00
|
|
|
newCreationInfo, err := deserializePaymentCreationInfo(&b)
|
2016-12-05 14:59:36 +03:00
|
|
|
if err != nil {
|
2019-05-23 21:05:27 +03:00
|
|
|
t.Fatalf("unable to deserialize creation info: %v", err)
|
2016-12-05 14:59:36 +03:00
|
|
|
}
|
2016-12-21 12:19:01 +03:00
|
|
|
|
2019-05-23 21:05:27 +03:00
|
|
|
if !reflect.DeepEqual(c, newCreationInfo) {
|
2016-12-21 12:19:01 +03:00
|
|
|
t.Fatalf("Payments do not match after "+
|
|
|
|
"serialization/deserialization %v vs %v",
|
2019-05-23 21:05:27 +03:00
|
|
|
spew.Sdump(c), spew.Sdump(newCreationInfo),
|
2016-12-05 14:59:36 +03:00
|
|
|
)
|
|
|
|
}
|
2019-05-23 21:05:27 +03:00
|
|
|
|
|
|
|
b.Reset()
|
2020-02-07 12:31:27 +03:00
|
|
|
if err := serializeHTLCAttemptInfo(&b, s); err != nil {
|
2019-05-23 21:05:27 +03:00
|
|
|
t.Fatalf("unable to serialize info: %v", err)
|
|
|
|
}
|
|
|
|
|
2020-02-20 20:08:01 +03:00
|
|
|
newWireInfo, err := deserializeHTLCAttemptInfo(&b)
|
2019-05-23 21:05:27 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unable to deserialize info: %v", err)
|
|
|
|
}
|
2020-02-20 20:08:01 +03:00
|
|
|
newWireInfo.AttemptID = s.AttemptID
|
2019-05-23 21:05:27 +03:00
|
|
|
|
2019-08-23 22:50:25 +03:00
|
|
|
// First we verify all the records match up porperly, as they aren't
|
|
|
|
// able to be properly compared using reflect.DeepEqual.
|
2020-02-20 20:08:01 +03:00
|
|
|
err = assertRouteEqual(&s.Route, &newWireInfo.Route)
|
2019-09-10 16:34:02 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("Routes do not match after "+
|
|
|
|
"serialization/deserialization: %v", err)
|
|
|
|
}
|
2019-08-23 22:50:25 +03:00
|
|
|
|
2019-09-10 16:34:02 +03:00
|
|
|
// Clear routes to allow DeepEqual to compare the remaining fields.
|
2020-02-20 20:08:01 +03:00
|
|
|
newWireInfo.Route = route.Route{}
|
2019-09-10 16:34:02 +03:00
|
|
|
s.Route = route.Route{}
|
2019-08-23 22:50:25 +03:00
|
|
|
|
2020-02-20 20:08:01 +03:00
|
|
|
if !reflect.DeepEqual(s, newWireInfo) {
|
2019-07-31 07:44:50 +03:00
|
|
|
s.SessionKey.Curve = nil
|
2020-02-20 20:08:01 +03:00
|
|
|
newWireInfo.SessionKey.Curve = nil
|
2019-05-23 21:05:27 +03:00
|
|
|
t.Fatalf("Payments do not match after "+
|
|
|
|
"serialization/deserialization %v vs %v",
|
2020-02-20 20:08:01 +03:00
|
|
|
spew.Sdump(s), spew.Sdump(newWireInfo),
|
2019-05-23 21:05:27 +03:00
|
|
|
)
|
|
|
|
}
|
2019-09-10 16:34:02 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// assertRouteEquals compares to routes for equality and returns an error if
|
|
|
|
// they are not equal.
|
|
|
|
func assertRouteEqual(a, b *route.Route) error {
|
2019-12-11 12:52:27 +03:00
|
|
|
if !reflect.DeepEqual(a, b) {
|
2020-02-07 12:31:27 +03:00
|
|
|
return fmt.Errorf("HTLCAttemptInfos don't match: %v vs %v",
|
2019-09-10 16:34:02 +03:00
|
|
|
spew.Sdump(a), spew.Sdump(b))
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2016-12-05 14:59:36 +03:00
|
|
|
}
|
|
|
|
|
2019-05-08 16:00:19 +03:00
|
|
|
func TestRouteSerialization(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
var b bytes.Buffer
|
2019-06-14 16:01:48 +03:00
|
|
|
if err := SerializeRoute(&b, testRoute); err != nil {
|
2019-05-08 16:00:19 +03:00
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
r := bytes.NewReader(b.Bytes())
|
2019-06-14 16:01:48 +03:00
|
|
|
route2, err := DeserializeRoute(r)
|
2019-05-08 16:00:19 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatal(err)
|
|
|
|
}
|
|
|
|
|
2019-07-31 07:44:50 +03:00
|
|
|
// First we verify all the records match up porperly, as they aren't
|
|
|
|
// able to be properly compared using reflect.DeepEqual.
|
2019-09-10 16:34:02 +03:00
|
|
|
err = assertRouteEqual(&testRoute, &route2)
|
2019-07-31 07:44:50 +03:00
|
|
|
if err != nil {
|
2019-05-08 16:00:19 +03:00
|
|
|
t.Fatalf("routes not equal: \n%v vs \n%v",
|
|
|
|
spew.Sdump(testRoute), spew.Sdump(route2))
|
|
|
|
}
|
|
|
|
}
|
2020-01-17 09:31:07 +03:00
|
|
|
|
|
|
|
// deletePayment removes a payment with paymentHash from the payments database.
|
2020-06-10 13:34:28 +03:00
|
|
|
func deletePayment(t *testing.T, db *DB, paymentHash lntypes.Hash, seqNr uint64) {
|
2020-01-17 09:31:07 +03:00
|
|
|
t.Helper()
|
|
|
|
|
|
|
|
err := kvdb.Update(db, func(tx kvdb.RwTx) error {
|
|
|
|
payments := tx.ReadWriteBucket(paymentsRootBucket)
|
|
|
|
|
2020-06-10 13:34:28 +03:00
|
|
|
// Delete the payment bucket.
|
2020-01-17 09:31:07 +03:00
|
|
|
err := payments.DeleteNestedBucket(paymentHash[:])
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2020-06-10 13:34:28 +03:00
|
|
|
key := make([]byte, 8)
|
|
|
|
byteOrder.PutUint64(key, seqNr)
|
|
|
|
|
|
|
|
// Delete the index that references this payment.
|
|
|
|
indexes := tx.ReadWriteBucket(paymentsIndexBucket)
|
|
|
|
return indexes.Delete(key)
|
2020-10-26 16:06:32 +03:00
|
|
|
}, func() {})
|
2020-01-17 09:31:07 +03:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("could not delete "+
|
|
|
|
"payment: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TestQueryPayments tests retrieval of payments with forwards and reversed
|
|
|
|
// queries.
|
|
|
|
func TestQueryPayments(t *testing.T) {
|
|
|
|
// Define table driven test for QueryPayments.
|
|
|
|
// Test payments have sequence indices [1, 3, 4, 5, 6, 7].
|
2020-06-10 13:34:27 +03:00
|
|
|
// Note that the payment with index 7 has the same payment hash as 6,
|
|
|
|
// and is stored in a nested bucket within payment 6 rather than being
|
|
|
|
// its own entry in the payments bucket. We do this to test retrieval
|
|
|
|
// of legacy payments.
|
2020-01-17 09:31:07 +03:00
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
query PaymentsQuery
|
|
|
|
firstIndex uint64
|
|
|
|
lastIndex uint64
|
|
|
|
|
|
|
|
// expectedSeqNrs contains the set of sequence numbers we expect
|
|
|
|
// our query to return.
|
|
|
|
expectedSeqNrs []uint64
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "IndexOffset at the end of the payments range",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 7,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 0,
|
|
|
|
lastIndex: 0,
|
|
|
|
expectedSeqNrs: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query in forwards order, start at beginning",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 0,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 3,
|
|
|
|
expectedSeqNrs: []uint64{1, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query in forwards order, start at end, overflow",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 6,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 7,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{7},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "start at offset index outside of payments",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 20,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 0,
|
|
|
|
lastIndex: 0,
|
|
|
|
expectedSeqNrs: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "overflow in forwards order",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 4,
|
|
|
|
MaxPayments: math.MaxUint64,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 5,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{5, 6, 7},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "start at offset index outside of payments, " +
|
|
|
|
"reversed order",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 9,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 6,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{6, 7},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query in reverse order, start at end",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 0,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 6,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{6, 7},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query in reverse order, starting in middle",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 4,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 3,
|
|
|
|
expectedSeqNrs: []uint64{1, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query in reverse order, starting in middle, " +
|
|
|
|
"with underflow",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 4,
|
|
|
|
MaxPayments: 5,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 3,
|
|
|
|
expectedSeqNrs: []uint64{1, 3},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "all payments in reverse, order maintained",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 0,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{1, 3, 4, 5, 6, 7},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "exclude incomplete payments",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 0,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: false,
|
|
|
|
},
|
2021-01-22 16:04:20 +03:00
|
|
|
firstIndex: 7,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{7},
|
2020-01-17 09:31:07 +03:00
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query payments at index gap",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 1,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 3,
|
|
|
|
lastIndex: 7,
|
|
|
|
expectedSeqNrs: []uint64{3, 4, 5, 6, 7},
|
|
|
|
},
|
2020-06-10 13:34:28 +03:00
|
|
|
{
|
|
|
|
name: "query payments reverse before index gap",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 3,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 1,
|
|
|
|
expectedSeqNrs: []uint64{1},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query payments reverse on index gap",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 2,
|
|
|
|
MaxPayments: 7,
|
|
|
|
Reversed: true,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 1,
|
|
|
|
lastIndex: 1,
|
|
|
|
expectedSeqNrs: []uint64{1},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "query payments forward on index gap",
|
|
|
|
query: PaymentsQuery{
|
|
|
|
IndexOffset: 2,
|
|
|
|
MaxPayments: 2,
|
|
|
|
Reversed: false,
|
|
|
|
IncludeIncomplete: true,
|
|
|
|
},
|
|
|
|
firstIndex: 3,
|
|
|
|
lastIndex: 4,
|
|
|
|
expectedSeqNrs: []uint64{3, 4},
|
|
|
|
},
|
2020-01-17 09:31:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
tt := tt
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
2020-06-24 13:50:11 +03:00
|
|
|
db, cleanup, err := MakeTestDB()
|
2020-01-17 09:31:07 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unable to init db: %v", err)
|
|
|
|
}
|
2020-06-10 13:34:28 +03:00
|
|
|
defer cleanup()
|
|
|
|
|
|
|
|
// Make a preliminary query to make sure it's ok to
|
|
|
|
// query when we have no payments.
|
|
|
|
resp, err := db.QueryPayments(tt.query)
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Len(t, resp.Payments, 0)
|
2020-01-17 09:31:07 +03:00
|
|
|
|
|
|
|
// Populate the database with a set of test payments.
|
2020-06-10 13:34:27 +03:00
|
|
|
// We create 6 original payments, deleting the payment
|
|
|
|
// at index 2 so that we cover the case where sequence
|
|
|
|
// numbers are missing. We also add a duplicate payment
|
|
|
|
// to the last payment added to test the legacy case
|
|
|
|
// where we have duplicates in the nested duplicates
|
|
|
|
// bucket.
|
|
|
|
nonDuplicatePayments := 6
|
2020-01-17 09:31:07 +03:00
|
|
|
pControl := NewPaymentControl(db)
|
|
|
|
|
2020-06-10 13:34:27 +03:00
|
|
|
for i := 0; i < nonDuplicatePayments; i++ {
|
2020-01-17 09:31:07 +03:00
|
|
|
// Generate a test payment.
|
2021-01-22 16:04:20 +03:00
|
|
|
info, _, preimg, err := genInfo()
|
2020-01-17 09:31:07 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unable to create test "+
|
|
|
|
"payment: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create a new payment entry in the database.
|
|
|
|
err = pControl.InitPayment(info.PaymentHash, info)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unable to initialize "+
|
|
|
|
"payment in database: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Immediately delete the payment with index 2.
|
|
|
|
if i == 1 {
|
2020-06-10 13:34:28 +03:00
|
|
|
pmt, err := pControl.FetchPayment(
|
|
|
|
info.PaymentHash,
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
deletePayment(t, db, info.PaymentHash,
|
|
|
|
pmt.SequenceNum)
|
2020-01-17 09:31:07 +03:00
|
|
|
}
|
2020-06-10 13:34:27 +03:00
|
|
|
|
|
|
|
// If we are on the last payment entry, add a
|
|
|
|
// duplicate payment with sequence number equal
|
2021-01-22 16:04:20 +03:00
|
|
|
// to the parent payment + 1. Note that
|
|
|
|
// duplicate payments will always be succeeded.
|
2020-06-10 13:34:27 +03:00
|
|
|
if i == (nonDuplicatePayments - 1) {
|
|
|
|
pmt, err := pControl.FetchPayment(
|
|
|
|
info.PaymentHash,
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
appendDuplicatePayment(
|
|
|
|
t, pControl.db,
|
|
|
|
info.PaymentHash,
|
|
|
|
pmt.SequenceNum+1,
|
2021-01-22 16:04:20 +03:00
|
|
|
preimg,
|
2020-06-10 13:34:27 +03:00
|
|
|
)
|
|
|
|
}
|
2020-01-17 09:31:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// Fetch all payments in the database.
|
|
|
|
allPayments, err := db.FetchPayments()
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("payments could not be fetched from "+
|
|
|
|
"database: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(allPayments) != 6 {
|
|
|
|
t.Fatalf("Number of payments received does not "+
|
|
|
|
"match expected one. Got %v, want %v.",
|
|
|
|
len(allPayments), 6)
|
|
|
|
}
|
|
|
|
|
|
|
|
querySlice, err := db.QueryPayments(tt.query)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("unexpected error: %v", err)
|
|
|
|
}
|
|
|
|
if tt.firstIndex != querySlice.FirstIndexOffset ||
|
|
|
|
tt.lastIndex != querySlice.LastIndexOffset {
|
|
|
|
t.Errorf("First or last index does not match "+
|
|
|
|
"expected index. Want (%d, %d), got (%d, %d).",
|
|
|
|
tt.firstIndex, tt.lastIndex,
|
|
|
|
querySlice.FirstIndexOffset,
|
|
|
|
querySlice.LastIndexOffset)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(querySlice.Payments) != len(tt.expectedSeqNrs) {
|
|
|
|
t.Errorf("expected: %v payments, got: %v",
|
2021-01-22 16:04:20 +03:00
|
|
|
len(tt.expectedSeqNrs), len(querySlice.Payments))
|
2020-01-17 09:31:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
for i, seqNr := range tt.expectedSeqNrs {
|
|
|
|
q := querySlice.Payments[i]
|
|
|
|
if seqNr != q.SequenceNum {
|
|
|
|
t.Errorf("sequence numbers do not match, "+
|
|
|
|
"got %v, want %v", q.SequenceNum, seqNr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2020-06-10 13:34:27 +03:00
|
|
|
|
2020-06-10 13:34:28 +03:00
|
|
|
// TestFetchPaymentWithSequenceNumber tests lookup of payments with their
|
|
|
|
// sequence number. It sets up one payment with no duplicates, and another with
|
|
|
|
// two duplicates in its duplicates bucket then uses these payments to test the
|
|
|
|
// case where a specific duplicate is not found and the duplicates bucket is not
|
|
|
|
// present when we expect it to be.
|
|
|
|
func TestFetchPaymentWithSequenceNumber(t *testing.T) {
|
2020-06-24 13:50:11 +03:00
|
|
|
db, cleanup, err := MakeTestDB()
|
2020-06-10 13:34:28 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
defer cleanup()
|
|
|
|
|
|
|
|
pControl := NewPaymentControl(db)
|
|
|
|
|
|
|
|
// Generate a test payment which does not have duplicates.
|
|
|
|
noDuplicates, _, _, err := genInfo()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Create a new payment entry in the database.
|
|
|
|
err = pControl.InitPayment(noDuplicates.PaymentHash, noDuplicates)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Fetch the payment so we can get its sequence nr.
|
|
|
|
noDuplicatesPayment, err := pControl.FetchPayment(
|
|
|
|
noDuplicates.PaymentHash,
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Generate a test payment which we will add duplicates to.
|
2021-01-22 16:04:20 +03:00
|
|
|
hasDuplicates, _, preimg, err := genInfo()
|
2020-06-10 13:34:28 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Create a new payment entry in the database.
|
|
|
|
err = pControl.InitPayment(hasDuplicates.PaymentHash, hasDuplicates)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Fetch the payment so we can get its sequence nr.
|
|
|
|
hasDuplicatesPayment, err := pControl.FetchPayment(
|
|
|
|
hasDuplicates.PaymentHash,
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// We declare the sequence numbers used here so that we can reference
|
|
|
|
// them in tests.
|
|
|
|
var (
|
|
|
|
duplicateOneSeqNr = hasDuplicatesPayment.SequenceNum + 1
|
|
|
|
duplicateTwoSeqNr = hasDuplicatesPayment.SequenceNum + 2
|
|
|
|
)
|
|
|
|
|
|
|
|
// Add two duplicates to our second payment.
|
|
|
|
appendDuplicatePayment(
|
2021-01-22 16:04:20 +03:00
|
|
|
t, db, hasDuplicates.PaymentHash, duplicateOneSeqNr, preimg,
|
2020-06-10 13:34:28 +03:00
|
|
|
)
|
|
|
|
appendDuplicatePayment(
|
2021-01-22 16:04:20 +03:00
|
|
|
t, db, hasDuplicates.PaymentHash, duplicateTwoSeqNr, preimg,
|
2020-06-10 13:34:28 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
paymentHash lntypes.Hash
|
|
|
|
sequenceNumber uint64
|
|
|
|
expectedErr error
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "lookup payment without duplicates",
|
|
|
|
paymentHash: noDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: noDuplicatesPayment.SequenceNum,
|
|
|
|
expectedErr: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "lookup payment with duplicates",
|
|
|
|
paymentHash: hasDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: hasDuplicatesPayment.SequenceNum,
|
|
|
|
expectedErr: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "lookup first duplicate",
|
|
|
|
paymentHash: hasDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: duplicateOneSeqNr,
|
|
|
|
expectedErr: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "lookup second duplicate",
|
|
|
|
paymentHash: hasDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: duplicateTwoSeqNr,
|
|
|
|
expectedErr: nil,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "lookup non-existent duplicate",
|
|
|
|
paymentHash: hasDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: 999999,
|
|
|
|
expectedErr: ErrDuplicateNotFound,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "lookup duplicate, no duplicates bucket",
|
|
|
|
paymentHash: noDuplicates.PaymentHash,
|
|
|
|
sequenceNumber: duplicateTwoSeqNr,
|
|
|
|
expectedErr: ErrNoDuplicateBucket,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
test := test
|
|
|
|
|
|
|
|
t.Run(test.name, func(t *testing.T) {
|
|
|
|
err := kvdb.Update(db,
|
|
|
|
func(tx walletdb.ReadWriteTx) error {
|
|
|
|
|
|
|
|
var seqNrBytes [8]byte
|
|
|
|
byteOrder.PutUint64(
|
|
|
|
seqNrBytes[:], test.sequenceNumber,
|
|
|
|
)
|
|
|
|
|
|
|
|
_, err := fetchPaymentWithSequenceNumber(
|
|
|
|
tx, test.paymentHash, seqNrBytes[:],
|
|
|
|
)
|
|
|
|
return err
|
2020-10-26 16:06:32 +03:00
|
|
|
}, func() {})
|
2020-06-10 13:34:28 +03:00
|
|
|
require.Equal(t, test.expectedErr, err)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-10 13:34:27 +03:00
|
|
|
// appendDuplicatePayment adds a duplicate payment to an existing payment. Note
|
|
|
|
// that this function requires a unique sequence number.
|
|
|
|
//
|
|
|
|
// This code is *only* intended to replicate legacy duplicate payments in lnd,
|
|
|
|
// our current schema does not allow duplicates.
|
|
|
|
func appendDuplicatePayment(t *testing.T, db *DB, paymentHash lntypes.Hash,
|
2021-01-22 16:04:20 +03:00
|
|
|
seqNr uint64, preImg lntypes.Preimage) {
|
2020-06-10 13:34:27 +03:00
|
|
|
|
|
|
|
err := kvdb.Update(db, func(tx walletdb.ReadWriteTx) error {
|
|
|
|
bucket, err := fetchPaymentBucketUpdate(
|
|
|
|
tx, paymentHash,
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create the duplicates bucket if it is not
|
|
|
|
// present.
|
|
|
|
dup, err := bucket.CreateBucketIfNotExists(
|
|
|
|
duplicatePaymentsBucket,
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var sequenceKey [8]byte
|
|
|
|
byteOrder.PutUint64(sequenceKey[:], seqNr)
|
|
|
|
|
|
|
|
// Create duplicate payments for the two dup
|
|
|
|
// sequence numbers we've setup.
|
2021-01-22 16:04:20 +03:00
|
|
|
putDuplicatePayment(t, dup, sequenceKey[:], paymentHash, preImg)
|
2020-06-10 13:34:27 +03:00
|
|
|
|
2020-06-10 13:34:27 +03:00
|
|
|
// Finally, once we have created our entry we add an index for
|
|
|
|
// it.
|
|
|
|
err = createPaymentIndexEntry(tx, sequenceKey[:], paymentHash)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
2020-06-10 13:34:27 +03:00
|
|
|
return nil
|
2020-10-26 16:06:32 +03:00
|
|
|
}, func() {})
|
2020-06-10 13:34:27 +03:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("could not create payment: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// putDuplicatePayment creates a duplicate payment in the duplicates bucket
|
|
|
|
// provided with the minimal information required for successful reading.
|
|
|
|
func putDuplicatePayment(t *testing.T, duplicateBucket kvdb.RwBucket,
|
2021-01-22 16:04:20 +03:00
|
|
|
sequenceKey []byte, paymentHash lntypes.Hash,
|
|
|
|
preImg lntypes.Preimage) {
|
2020-06-10 13:34:27 +03:00
|
|
|
|
|
|
|
paymentBucket, err := duplicateBucket.CreateBucketIfNotExists(
|
|
|
|
sequenceKey,
|
|
|
|
)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
err = paymentBucket.Put(duplicatePaymentSequenceKey, sequenceKey)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Generate fake information for the duplicate payment.
|
|
|
|
info, _, _, err := genInfo()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Write the payment info to disk under the creation info key. This code
|
|
|
|
// is copied rather than using serializePaymentCreationInfo to ensure
|
|
|
|
// we always write in the legacy format used by duplicate payments.
|
|
|
|
var b bytes.Buffer
|
|
|
|
var scratch [8]byte
|
|
|
|
_, err = b.Write(paymentHash[:])
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
byteOrder.PutUint64(scratch[:], uint64(info.Value))
|
|
|
|
_, err = b.Write(scratch[:])
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
err = serializeTime(&b, info.CreationTime)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
byteOrder.PutUint32(scratch[:4], 0)
|
|
|
|
_, err = b.Write(scratch[:4])
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Get the PaymentCreationInfo.
|
|
|
|
err = paymentBucket.Put(duplicatePaymentCreationInfoKey, b.Bytes())
|
|
|
|
require.NoError(t, err)
|
2021-01-22 16:04:20 +03:00
|
|
|
|
|
|
|
// Duolicate payments are only stored for successes, so add the
|
|
|
|
// preimage.
|
|
|
|
err = paymentBucket.Put(duplicatePaymentSettleInfoKey, preImg[:])
|
|
|
|
require.NoError(t, err)
|
2020-06-10 13:34:27 +03:00
|
|
|
}
|