github.com/sunriselayer/sunrise-da@v0.13.1-sr3/das/checkpoint.go (about)

     1  package das
     2  
     3  import (
     4  	"fmt"
     5  )
     6  
     7  type checkpoint struct {
     8  	SampleFrom  uint64 `json:"sample_from"`
     9  	NetworkHead uint64 `json:"network_head"`
    10  	// Failed heights will be retried
    11  	Failed map[uint64]int `json:"failed,omitempty"`
    12  	// Workers will resume on restart from previous state
    13  	Workers []workerCheckpoint `json:"workers,omitempty"`
    14  }
    15  
    16  // workerCheckpoint will be used to resume worker on restart
    17  type workerCheckpoint struct {
    18  	From    uint64  `json:"from"`
    19  	To      uint64  `json:"to"`
    20  	JobType jobType `json:"job_type"`
    21  }
    22  
    23  func newCheckpoint(stats SamplingStats) checkpoint {
    24  	workers := make([]workerCheckpoint, 0, len(stats.Workers))
    25  	for _, w := range stats.Workers {
    26  		// no need to resume recent jobs after restart. On the other hand, retry jobs will resume from
    27  		// failed heights map. it leaves only catchup jobs to be stored and resumed
    28  		if w.JobType == catchupJob {
    29  			workers = append(workers, workerCheckpoint{
    30  				From:    w.Curr,
    31  				To:      w.To,
    32  				JobType: w.JobType,
    33  			})
    34  		}
    35  	}
    36  	return checkpoint{
    37  		SampleFrom:  stats.CatchupHead + 1,
    38  		NetworkHead: stats.NetworkHead,
    39  		Failed:      stats.Failed,
    40  		Workers:     workers,
    41  	}
    42  }
    43  
    44  func (c checkpoint) String() string {
    45  	str := fmt.Sprintf("SampleFrom: %v, NetworkHead: %v", c.SampleFrom, c.NetworkHead)
    46  
    47  	if len(c.Workers) > 0 {
    48  		str += fmt.Sprintf(", Workers: %v", len(c.Workers))
    49  	}
    50  
    51  	if len(c.Failed) > 0 {
    52  		str += fmt.Sprintf("\nFailed: %v", c.Failed)
    53  	}
    54  
    55  	return str
    56  }