github.com/decred/dcrlnd@v0.7.6/lntest/itest/lnd_forward_interceptor_test.go (about)

     1  package itest
     2  
     3  import (
     4  	"context"
     5  	"encoding/hex"
     6  	"fmt"
     7  	"sync"
     8  	"time"
     9  
    10  	"github.com/decred/dcrd/dcrutil/v4"
    11  	"github.com/decred/dcrd/wire"
    12  	"github.com/decred/dcrlnd/chainreg"
    13  	"github.com/decred/dcrlnd/lnrpc"
    14  	"github.com/decred/dcrlnd/lnrpc/routerrpc"
    15  	"github.com/decred/dcrlnd/lntest"
    16  	"github.com/decred/dcrlnd/lntest/wait"
    17  	"github.com/decred/dcrlnd/routing/route"
    18  	"github.com/stretchr/testify/require"
    19  	"google.golang.org/grpc/codes"
    20  	"google.golang.org/grpc/status"
    21  )
    22  
    23  var (
    24  	customTestKey   uint64 = 394829
    25  	customTestValue        = []byte{1, 3, 5}
    26  )
    27  
    28  type interceptorTestCase struct {
    29  	amountMsat        int64
    30  	payAddr           []byte
    31  	invoice           *lnrpc.Invoice
    32  	shouldHold        bool
    33  	interceptorAction routerrpc.ResolveHoldForwardAction
    34  }
    35  
    36  // testForwardInterceptorDedupHtlc tests that upon reconnection, duplicate
    37  // HTLCs aren't re-notified using the HTLC interceptor API.
    38  func testForwardInterceptorDedupHtlc(net *lntest.NetworkHarness, t *harnessTest) {
    39  	// Initialize the test context with 3 connected nodes.
    40  	alice := net.NewNode(t.t, "alice", nil)
    41  	defer shutdownAndAssert(net, t, alice)
    42  
    43  	bob := net.NewNode(t.t, "bob", nil)
    44  	defer shutdownAndAssert(net, t, alice)
    45  
    46  	carol := net.NewNode(t.t, "carol", nil)
    47  	defer shutdownAndAssert(net, t, alice)
    48  
    49  	tc := newInterceptorTestContext(t, net, alice, bob, carol)
    50  
    51  	const (
    52  		chanAmt = dcrutil.Amount(300000)
    53  	)
    54  
    55  	// Open and wait for channels.
    56  	tc.openChannel(tc.alice, tc.bob, chanAmt)
    57  	tc.openChannel(tc.bob, tc.carol, chanAmt)
    58  	defer tc.closeChannels()
    59  	tc.waitForChannels()
    60  
    61  	ctxb := context.Background()
    62  	ctxt, cancelInterceptor := context.WithCancel(ctxb)
    63  	interceptor, err := tc.bob.RouterClient.HtlcInterceptor(ctxt)
    64  	require.NoError(tc.t.t, err, "failed to create HtlcInterceptor")
    65  
    66  	addResponse, err := tc.carol.AddInvoice(ctxb, &lnrpc.Invoice{
    67  		ValueMAtoms: 1000,
    68  	})
    69  	require.NoError(tc.t.t, err, "unable to add invoice")
    70  
    71  	invoice, err := tc.carol.LookupInvoice(ctxb, &lnrpc.PaymentHash{
    72  		RHashStr: hex.EncodeToString(addResponse.RHash),
    73  	})
    74  	require.NoError(tc.t.t, err, "unable to find invoice")
    75  
    76  	// We start the htlc interceptor with a simple implementation that
    77  	// saves all intercepted packets. These packets are held to simulate a
    78  	// pending payment.
    79  	interceptedPacketstMap := &sync.Map{}
    80  	var wg sync.WaitGroup
    81  	wg.Add(1)
    82  	go func() {
    83  		defer wg.Done()
    84  		for {
    85  			packet, err := interceptor.Recv()
    86  			if err != nil {
    87  				// If it is just the error result of the
    88  				// context cancellation the we exit silently.
    89  				status, ok := status.FromError(err)
    90  				if ok && status.Code() == codes.Canceled {
    91  					return
    92  				}
    93  
    94  				// Otherwise it an unexpected error, we fail
    95  				// the test.
    96  				require.NoError(
    97  					tc.t.t, err,
    98  					"unexpected error in interceptor.Recv()",
    99  				)
   100  				return
   101  			}
   102  			interceptedPacketstMap.Store(
   103  				packet.IncomingCircuitKey.HtlcId, packet,
   104  			)
   105  		}
   106  	}()
   107  
   108  	// We initiate a payment from Alice.
   109  	wg.Add(1)
   110  	go func() {
   111  		defer wg.Done()
   112  		_, _ = tc.sendAliceToCarolPayment(
   113  			ctxb, 1000,
   114  			invoice.RHash, invoice.PaymentAddr,
   115  		)
   116  	}()
   117  
   118  	// Here we should wait for the channel to contain a pending htlc, and
   119  	// also be shown as being active.
   120  	err = wait.Predicate(func() bool {
   121  		channels, err := tc.bob.ListChannels(ctxt, &lnrpc.ListChannelsRequest{
   122  			ActiveOnly: true,
   123  			Peer:       tc.alice.PubKey[:],
   124  		})
   125  		if err != nil {
   126  			return false
   127  		}
   128  		if len(channels.Channels) == 0 {
   129  			return false
   130  		}
   131  
   132  		aliceChan := channels.Channels[0]
   133  		if len(aliceChan.PendingHtlcs) == 0 {
   134  			return false
   135  		}
   136  		return aliceChan.Active
   137  	}, defaultTimeout)
   138  	require.NoError(
   139  		tc.t.t, err, "alice <> bob channel pending htlc never arrived",
   140  	)
   141  
   142  	// At this point we want to make bob's link send all pending htlcs to
   143  	// the switch again. We force this behavior by disconnecting and
   144  	// connecting to the peer.
   145  	if err := tc.net.DisconnectNodes(tc.bob, tc.alice); err != nil {
   146  		tc.t.Fatalf("failed to disconnect alice and bob")
   147  	}
   148  	tc.net.EnsureConnected(tc.t.t, tc.bob, tc.alice)
   149  
   150  	// Here we wait for the channel to be active again.
   151  	err = wait.Predicate(func() bool {
   152  		req := &lnrpc.ListChannelsRequest{
   153  			ActiveOnly: true,
   154  			Peer:       tc.alice.PubKey[:],
   155  		}
   156  
   157  		channels, err := tc.bob.ListChannels(ctxt, req)
   158  		return err == nil && len(channels.Channels) > 0
   159  	}, defaultTimeout)
   160  	require.NoError(
   161  		tc.t.t, err, "alice <> bob channel didn't re-activate",
   162  	)
   163  
   164  	// Now that the channel is active we make sure the test passes as
   165  	// expected.
   166  	payments, err := tc.alice.ListPayments(ctxb, &lnrpc.ListPaymentsRequest{
   167  		IncludeIncomplete: true,
   168  	})
   169  	require.NoError(tc.t.t, err, "failed to fetch payment")
   170  
   171  	// We expect one in flight payment since we held the htlcs.
   172  	require.Equal(tc.t.t, len(payments.Payments), 1)
   173  	require.Equal(tc.t.t, payments.Payments[0].Status, lnrpc.Payment_IN_FLIGHT)
   174  
   175  	// We now fail all htlcs to cancel the payment.
   176  	packetsCount := 0
   177  	interceptedPacketstMap.Range(func(_, packet interface{}) bool {
   178  		p := packet.(*routerrpc.ForwardHtlcInterceptRequest)
   179  		_ = interceptor.Send(&routerrpc.ForwardHtlcInterceptResponse{
   180  			IncomingCircuitKey: p.IncomingCircuitKey,
   181  			Action:             routerrpc.ResolveHoldForwardAction_FAIL,
   182  		})
   183  		packetsCount++
   184  		return true
   185  	})
   186  
   187  	// At this point if we have more than one held htlcs then we should
   188  	// fail.  This means we hold the same htlc twice which is a risk we
   189  	// want to eliminate. If we don't have the same htlc twice in theory we
   190  	// can cancel one and settle the other by mistake.
   191  	require.Equal(tc.t.t, packetsCount, 1)
   192  
   193  	cancelInterceptor()
   194  	wg.Wait()
   195  }
   196  
   197  // testForwardInterceptorBasic tests the forward interceptor RPC layer.
   198  // The test creates a cluster of 3 connected nodes: Alice -> Bob -> Carol
   199  // Alice sends 4 different payments to Carol while the interceptor handles
   200  // differently the htlcs.
   201  // The test ensures that:
   202  //  1. Intercepted failed htlcs result in no payment (invoice is not settled).
   203  //  2. Intercepted resumed htlcs result in a payment (invoice is settled).
   204  //  3. Intercepted held htlcs result in no payment (invoice is not settled).
   205  //  4. When Interceptor disconnects it resumes all held htlcs, which result in
   206  //     valid payment (invoice is settled).
   207  func testForwardInterceptorBasic(net *lntest.NetworkHarness, t *harnessTest) {
   208  	// Initialize the test context with 3 connected nodes.
   209  	alice := net.NewNode(t.t, "alice", nil)
   210  	defer shutdownAndAssert(net, t, alice)
   211  
   212  	bob := net.NewNode(t.t, "bob", nil)
   213  	defer shutdownAndAssert(net, t, alice)
   214  
   215  	carol := net.NewNode(t.t, "carol", nil)
   216  	defer shutdownAndAssert(net, t, alice)
   217  
   218  	testContext := newInterceptorTestContext(t, net, alice, bob, carol)
   219  
   220  	const (
   221  		chanAmt = dcrutil.Amount(300000)
   222  	)
   223  
   224  	// Open and wait for channels.
   225  	testContext.openChannel(testContext.alice, testContext.bob, chanAmt)
   226  	testContext.openChannel(testContext.bob, testContext.carol, chanAmt)
   227  	defer testContext.closeChannels()
   228  	testContext.waitForChannels()
   229  
   230  	// Connect the interceptor.
   231  	ctxb := context.Background()
   232  	ctxt, cancelInterceptor := context.WithTimeout(ctxb, defaultTimeout)
   233  	interceptor, err := testContext.bob.RouterClient.HtlcInterceptor(ctxt)
   234  	require.NoError(t.t, err, "failed to create HtlcInterceptor")
   235  
   236  	// Prepare the test cases.
   237  	testCases := testContext.prepareTestCases()
   238  
   239  	// A channel for the interceptor go routine to send the requested packets.
   240  	interceptedChan := make(chan *routerrpc.ForwardHtlcInterceptRequest,
   241  		len(testCases))
   242  
   243  	// Run the interceptor loop in its own go routine.
   244  	var wg sync.WaitGroup
   245  	wg.Add(1)
   246  	go func() {
   247  		defer wg.Done()
   248  		for {
   249  			request, err := interceptor.Recv()
   250  			if err != nil {
   251  				// If it is  just the error result of the context cancellation
   252  				// the we exit silently.
   253  				status, ok := status.FromError(err)
   254  				if ok && status.Code() == codes.Canceled {
   255  					return
   256  				}
   257  				// Otherwise it an unexpected error, we fail the test.
   258  				require.NoError(t.t, err, "unexpected error in interceptor.Recv()")
   259  				return
   260  			}
   261  			interceptedChan <- request
   262  		}
   263  	}()
   264  
   265  	// For each test case make sure we initiate a payment from Alice to Carol
   266  	// routed through Bob. For each payment we also test its final status
   267  	// according to the interceptorAction specified in the test case.
   268  	wg.Add(1)
   269  	go func() {
   270  		defer wg.Done()
   271  		for _, tc := range testCases {
   272  			attempt, err := testContext.sendAliceToCarolPayment(
   273  				context.Background(), tc.invoice.ValueMAtoms,
   274  				tc.invoice.RHash, tc.payAddr,
   275  			)
   276  
   277  			if t.t.Failed() {
   278  				return
   279  			}
   280  			if err != nil {
   281  				require.NoError(t.t, err, "failed to send payment")
   282  			}
   283  
   284  			switch tc.interceptorAction {
   285  			// For 'fail' interceptor action we make sure the payment failed.
   286  			case routerrpc.ResolveHoldForwardAction_FAIL:
   287  				require.Equal(t.t, lnrpc.HTLCAttempt_FAILED,
   288  					attempt.Status, "expected payment to fail")
   289  
   290  				// Assert that we get a temporary channel
   291  				// failure which has a channel update.
   292  				require.NotNil(t.t, attempt.Failure)
   293  				require.NotNil(t.t, attempt.Failure.ChannelUpdate)
   294  
   295  				require.Equal(t.t,
   296  					lnrpc.Failure_TEMPORARY_CHANNEL_FAILURE,
   297  					attempt.Failure.Code)
   298  
   299  			// For settle and resume we make sure the payment is successful.
   300  			case routerrpc.ResolveHoldForwardAction_SETTLE:
   301  				fallthrough
   302  
   303  			case routerrpc.ResolveHoldForwardAction_RESUME:
   304  				require.Equal(t.t, lnrpc.HTLCAttempt_SUCCEEDED,
   305  					attempt.Status, "expected payment to succeed")
   306  			}
   307  		}
   308  	}()
   309  
   310  	// We make sure here the interceptor has processed all packets before we
   311  	// check the payment statuses.
   312  	for i := 0; i < len(testCases); i++ {
   313  		select {
   314  		case request := <-interceptedChan:
   315  			// Assert sanity of informational packet data.
   316  			require.NotZero(t.t, request.OutgoingRequestedChanId)
   317  			require.NotZero(t.t, request.IncomingExpiry)
   318  			require.NotZero(t.t, request.IncomingAmountMAtoms)
   319  
   320  			require.Less(
   321  				t.t,
   322  				request.OutgoingExpiry, request.IncomingExpiry,
   323  			)
   324  			require.Less(
   325  				t.t,
   326  				request.OutgoingAmountMAtoms,
   327  				request.IncomingAmountMAtoms,
   328  			)
   329  
   330  			value, ok := request.CustomRecords[customTestKey]
   331  			require.True(t.t, ok, "expected custom record")
   332  			require.Equal(t.t, customTestValue, value)
   333  
   334  			testCase := testCases[i]
   335  
   336  			// For held packets we ignore, keeping them in hold status.
   337  			if testCase.shouldHold {
   338  				continue
   339  			}
   340  
   341  			// For all other packets we resolve according to the test case.
   342  			_ = interceptor.Send(&routerrpc.ForwardHtlcInterceptResponse{
   343  				IncomingCircuitKey: request.IncomingCircuitKey,
   344  				Action:             testCase.interceptorAction,
   345  				Preimage:           testCase.invoice.RPreimage,
   346  			})
   347  		case <-time.After(defaultTimeout):
   348  			t.Fatalf("response from interceptor was not received %v", i)
   349  		}
   350  	}
   351  
   352  	// At this point we are left with the held packets, we want to make sure
   353  	// each one of them has a corresponding 'in-flight' payment at
   354  	// Alice's node.
   355  	payments, err := testContext.alice.ListPayments(context.Background(),
   356  		&lnrpc.ListPaymentsRequest{IncludeIncomplete: true})
   357  	require.NoError(t.t, err, "failed to fetch payment")
   358  
   359  	for _, testCase := range testCases {
   360  		if testCase.shouldHold {
   361  			hashStr := hex.EncodeToString(testCase.invoice.RHash)
   362  			var foundPayment *lnrpc.Payment
   363  			expectedAmt := testCase.invoice.ValueMAtoms
   364  			for _, p := range payments.Payments {
   365  				if p.PaymentHash == hashStr {
   366  					foundPayment = p
   367  					break
   368  				}
   369  			}
   370  			require.NotNil(t.t, foundPayment, fmt.Sprintf("expected "+
   371  				"to find pending payment for held htlc %v",
   372  				hashStr))
   373  			require.Equal(t.t, lnrpc.Payment_IN_FLIGHT,
   374  				foundPayment.Status, "expected payment to be "+
   375  					"in flight")
   376  			require.Equal(t.t, expectedAmt, foundPayment.ValueMAtoms,
   377  				"incorrect in flight amount")
   378  		}
   379  	}
   380  
   381  	// Disconnect interceptor should cause resume held packets.
   382  	// After that we wait for all go routines to finish, including the one
   383  	// that tests the payment final status for the held payment.
   384  	cancelInterceptor()
   385  	wg.Wait()
   386  
   387  	// Hopefully will be fixed by upstream PR 6825.
   388  	time.Sleep(time.Millisecond * 1000)
   389  
   390  	// Verify that we don't get notified about already completed HTLCs
   391  	// We do that by restarting alice, the sender the HTLCs. Under
   392  	// https://github.com/decred/dcrlnd/issues/5115
   393  	// this should cause all HTLCs settled or failed by the interceptor to renotify.
   394  	restartAlice, err := net.SuspendNode(alice)
   395  	require.NoError(t.t, err, "failed to suspend alice")
   396  
   397  	ctxt, cancelInterceptor = context.WithTimeout(ctxb, defaultTimeout)
   398  	defer cancelInterceptor()
   399  	interceptor, err = testContext.bob.RouterClient.HtlcInterceptor(ctxt)
   400  	require.NoError(t.t, err, "failed to create HtlcInterceptor")
   401  
   402  	err = restartAlice()
   403  	require.NoError(t.t, err, "failed to restart alice")
   404  
   405  	go func() {
   406  		request, err := interceptor.Recv()
   407  		if err != nil {
   408  			// If it is  just the error result of the context cancellation
   409  			// the we exit silently.
   410  			status, ok := status.FromError(err)
   411  			if ok && status.Code() == codes.Canceled {
   412  				return
   413  			}
   414  			// Otherwise it an unexpected error, we fail the test.
   415  			require.NoError(
   416  				t.t, err, "unexpected error in interceptor.Recv()",
   417  			)
   418  			return
   419  		}
   420  
   421  		require.Nil(t.t, request, "no more intercepts should arrive")
   422  	}()
   423  
   424  	err = wait.Predicate(func() bool {
   425  		channels, err := bob.ListChannels(ctxt, &lnrpc.ListChannelsRequest{
   426  			ActiveOnly: true, Peer: alice.PubKey[:],
   427  		})
   428  		return err == nil && len(channels.Channels) > 0
   429  	}, defaultTimeout)
   430  	require.NoError(t.t, err, "alice <> bob channel didnt re-activate")
   431  
   432  }
   433  
   434  // interceptorTestContext is a helper struct to hold the test context and
   435  // provide the needed functionality.
   436  type interceptorTestContext struct {
   437  	t   *harnessTest
   438  	net *lntest.NetworkHarness
   439  
   440  	// Keep a list of all our active channels.
   441  	networkChans      []*lnrpc.ChannelPoint
   442  	closeChannelFuncs []func()
   443  
   444  	alice, bob, carol *lntest.HarnessNode
   445  	nodes             []*lntest.HarnessNode
   446  }
   447  
   448  func newInterceptorTestContext(t *harnessTest,
   449  	net *lntest.NetworkHarness,
   450  	alice, bob, carol *lntest.HarnessNode) *interceptorTestContext {
   451  
   452  	// Connect nodes
   453  	nodes := []*lntest.HarnessNode{alice, bob, carol}
   454  	for i := 0; i < len(nodes); i++ {
   455  		for j := i + 1; j < len(nodes); j++ {
   456  			net.EnsureConnected(t.t, nodes[i], nodes[j])
   457  		}
   458  	}
   459  
   460  	ctx := interceptorTestContext{
   461  		t:     t,
   462  		net:   net,
   463  		alice: alice,
   464  		bob:   bob,
   465  		carol: carol,
   466  		nodes: nodes,
   467  	}
   468  
   469  	return &ctx
   470  }
   471  
   472  // prepareTestCases prepares 4 tests:
   473  // 1. failed htlc.
   474  // 2. resumed htlc.
   475  // 3. settling htlc externally.
   476  // 4. held htlc that is resumed later.
   477  func (c *interceptorTestContext) prepareTestCases() []*interceptorTestCase {
   478  	cases := []*interceptorTestCase{
   479  		{amountMsat: 1000, shouldHold: false,
   480  			interceptorAction: routerrpc.ResolveHoldForwardAction_FAIL},
   481  		{amountMsat: 1000, shouldHold: false,
   482  			interceptorAction: routerrpc.ResolveHoldForwardAction_RESUME},
   483  		{amountMsat: 1000, shouldHold: false,
   484  			interceptorAction: routerrpc.ResolveHoldForwardAction_SETTLE},
   485  		{amountMsat: 1000, shouldHold: true,
   486  			interceptorAction: routerrpc.ResolveHoldForwardAction_RESUME},
   487  	}
   488  
   489  	for _, t := range cases {
   490  		addResponse, err := c.carol.AddInvoice(context.Background(), &lnrpc.Invoice{
   491  			ValueMAtoms: t.amountMsat,
   492  		})
   493  		require.NoError(c.t.t, err, "unable to add invoice")
   494  
   495  		invoice, err := c.carol.LookupInvoice(context.Background(), &lnrpc.PaymentHash{
   496  			RHashStr: hex.EncodeToString(addResponse.RHash),
   497  		})
   498  		require.NoError(c.t.t, err, "unable to find invoice")
   499  
   500  		// We'll need to also decode the returned invoice so we can
   501  		// grab the payment address which is now required for ALL
   502  		// payments.
   503  		payReq, err := c.carol.DecodePayReq(context.Background(), &lnrpc.PayReqString{
   504  			PayReq: invoice.PaymentRequest,
   505  		})
   506  		require.NoError(c.t.t, err, "unable to decode invoice")
   507  
   508  		t.invoice = invoice
   509  		t.payAddr = payReq.PaymentAddr
   510  	}
   511  	return cases
   512  }
   513  
   514  func (c *interceptorTestContext) openChannel(from, to *lntest.HarnessNode,
   515  	chanSize dcrutil.Amount) {
   516  
   517  	c.net.SendCoins(c.t.t, dcrutil.AtomsPerCoin, from)
   518  
   519  	chanPoint := openChannelAndAssert(
   520  		c.t, c.net, from, to,
   521  		lntest.OpenChannelParams{
   522  			Amt: chanSize,
   523  		},
   524  	)
   525  
   526  	c.closeChannelFuncs = append(c.closeChannelFuncs, func() {
   527  		closeChannelAndAssert(c.t, c.net, from, chanPoint, false)
   528  	})
   529  
   530  	c.networkChans = append(c.networkChans, chanPoint)
   531  }
   532  
   533  func (c *interceptorTestContext) closeChannels() {
   534  	for _, f := range c.closeChannelFuncs {
   535  		f()
   536  	}
   537  }
   538  
   539  func (c *interceptorTestContext) waitForChannels() {
   540  	// Wait for all nodes to have seen all channels.
   541  	for _, chanPoint := range c.networkChans {
   542  		for _, node := range c.nodes {
   543  			txid, err := lnrpc.GetChanPointFundingTxid(chanPoint)
   544  			require.NoError(c.t.t, err, "unable to get txid")
   545  
   546  			point := wire.OutPoint{
   547  				Hash:  *txid,
   548  				Index: chanPoint.OutputIndex,
   549  			}
   550  
   551  			err = node.WaitForNetworkChannelOpen(chanPoint)
   552  			require.NoError(c.t.t, err, fmt.Sprintf("(%d): timeout "+
   553  				"waiting for channel(%s) open", node.NodeID,
   554  				point))
   555  		}
   556  	}
   557  }
   558  
   559  // sendAliceToCarolPayment sends a payment from alice to carol and make an
   560  // attempt to pay. The lnrpc.HTLCAttempt is returned.
   561  func (c *interceptorTestContext) sendAliceToCarolPayment(ctx context.Context,
   562  	amtMAtoms int64,
   563  	paymentHash, paymentAddr []byte) (*lnrpc.HTLCAttempt, error) {
   564  
   565  	// Build a route from alice to carol.
   566  	route, err := c.buildRoute(
   567  		ctx, amtMAtoms, []*lntest.HarnessNode{c.bob, c.carol},
   568  		paymentAddr,
   569  	)
   570  	if err != nil {
   571  		return nil, err
   572  	}
   573  	sendReq := &routerrpc.SendToRouteRequest{
   574  		PaymentHash: paymentHash,
   575  		Route:       route,
   576  	}
   577  
   578  	// Send a custom record to the forwarding node.
   579  	route.Hops[0].CustomRecords = map[uint64][]byte{
   580  		customTestKey: customTestValue,
   581  	}
   582  
   583  	// Send the payment.
   584  	return c.alice.RouterClient.SendToRouteV2(ctx, sendReq)
   585  }
   586  
   587  // buildRoute is a helper function to build a route with given hops.
   588  func (c *interceptorTestContext) buildRoute(ctx context.Context, amtMAtoms int64,
   589  	hops []*lntest.HarnessNode, payAddr []byte) (*lnrpc.Route, error) {
   590  
   591  	rpcHops := make([][]byte, 0, len(hops))
   592  	for _, hop := range hops {
   593  		k := hop.PubKeyStr
   594  		pubkey, err := route.NewVertexFromStr(k)
   595  		if err != nil {
   596  			return nil, fmt.Errorf("error parsing %v: %v",
   597  				k, err)
   598  		}
   599  		rpcHops = append(rpcHops, pubkey[:])
   600  	}
   601  
   602  	req := &routerrpc.BuildRouteRequest{
   603  		AmtMAtoms:      amtMAtoms,
   604  		FinalCltvDelta: chainreg.DefaultDecredTimeLockDelta,
   605  		HopPubkeys:     rpcHops,
   606  		PaymentAddr:    payAddr,
   607  	}
   608  
   609  	routeResp, err := c.alice.RouterClient.BuildRoute(ctx, req)
   610  	if err != nil {
   611  		return nil, err
   612  	}
   613  
   614  	return routeResp.Route, nil
   615  }