github.com/aergoio/aergo@v1.3.1/tools/mpdumpdiag/main.go (about)

     1  package main
     2  
     3  import (
     4  	"bufio"
     5  	"encoding/binary"
     6  	"encoding/json"
     7  	"io"
     8  	"io/ioutil"
     9  	"os"
    10  
    11  	"github.com/aergoio/aergo/cmd/aergocli/util"
    12  	"github.com/aergoio/aergo/types"
    13  	"github.com/gogo/protobuf/proto"
    14  	"github.com/spf13/cobra"
    15  )
    16  
    17  var (
    18  	rootCmd = &cobra.Command{
    19  		Use: "mpdumpdiag",
    20  	}
    21  	printCmd = &cobra.Command{
    22  		Use:  "print <path to mempool dump>",
    23  		Args: cobra.MinimumNArgs(1),
    24  		Run:  runPrintCmd,
    25  	}
    26  	genCmd = &cobra.Command{
    27  		Use:  "gen <file which has json formatted tx array> <dump path to be generated>",
    28  		Args: cobra.MinimumNArgs(2),
    29  		Run:  runGenCmd,
    30  	}
    31  )
    32  
    33  func init() {
    34  	rootCmd.SetOutput(os.Stdout)
    35  	rootCmd.AddCommand(printCmd)
    36  	rootCmd.AddCommand(genCmd)
    37  }
    38  
    39  func main() {
    40  	if err := rootCmd.Execute(); err != nil {
    41  		os.Exit(1)
    42  	}
    43  }
    44  
    45  func runPrintCmd(cmd *cobra.Command, args []string) {
    46  	filename := args[0]
    47  
    48  	file, err := os.Open(filename)
    49  	if err != nil {
    50  		cmd.Printf("error: failed to open file %s\n", filename)
    51  		return
    52  	}
    53  	reader := bufio.NewReader(file)
    54  
    55  	var count int
    56  	var out []*util.InOutTx
    57  	for {
    58  		buf := types.Tx{}
    59  		byteInt := make([]byte, 4)
    60  		_, err := io.ReadFull(reader, byteInt)
    61  		if err != nil {
    62  			if err != io.EOF {
    63  				cmd.Println("error: on read file for getting record length", err.Error())
    64  			}
    65  			break
    66  		}
    67  
    68  		reclen := binary.LittleEndian.Uint32(byteInt)
    69  		buffer := make([]byte, int(reclen))
    70  		_, err = io.ReadFull(reader, buffer)
    71  		if err != nil {
    72  			if err != io.EOF {
    73  				cmd.Println("error: on read file during loading", err.Error())
    74  			}
    75  			break
    76  		}
    77  
    78  		err = proto.Unmarshal(buffer, &buf)
    79  		if err != nil {
    80  			cmd.Println("error: unmarshall tx err, continue", err.Error())
    81  			continue
    82  		}
    83  		count++
    84  		//mp.put(types.NewTransaction(&buf)) // nolint: errcheck
    85  
    86  		out = append(out, util.ConvTx(types.NewTransaction(&buf).GetTx()))
    87  	}
    88  	b, e := json.MarshalIndent(out, "", " ")
    89  	if e == nil {
    90  		cmd.Printf("%s\n", b)
    91  	} else {
    92  		cmd.Println("error: convert to json ")
    93  	}
    94  	//fmt.Println("total ", count, "txs")
    95  }
    96  
    97  func runGenCmd(cmd *cobra.Command, args []string) {
    98  	file, err := os.Create(args[1])
    99  	if err != nil {
   100  		cmd.Println("error: failed to create target file", err.Error())
   101  		return
   102  	}
   103  	defer file.Close() // nolint: errcheck
   104  
   105  	writer := bufio.NewWriter(file)
   106  	defer writer.Flush() //nolint: errcheck
   107  
   108  	b, err := ioutil.ReadFile(args[0])
   109  	if err != nil {
   110  		cmd.Println("error: failed to read source file", err.Error())
   111  	}
   112  	txlist, err := util.ParseBase58Tx(b)
   113  	for _, v := range txlist {
   114  		var total_data []byte
   115  		data, err := proto.Marshal(v)
   116  		if err != nil {
   117  			cmd.Println("error: marshal failed", err.Error())
   118  			continue
   119  		}
   120  
   121  		byteInt := make([]byte, 4)
   122  		binary.LittleEndian.PutUint32(byteInt, uint32(len(data)))
   123  		total_data = append(total_data, byteInt...)
   124  		total_data = append(total_data, data...)
   125  		length := len(total_data)
   126  		for {
   127  			size, err := writer.Write(total_data)
   128  			if err != nil {
   129  				cmd.Println("error: writing encoded tx fail", err.Error())
   130  				break
   131  			}
   132  			if length != size {
   133  				total_data = total_data[size:]
   134  				length -= size
   135  			} else {
   136  				break
   137  			}
   138  		}
   139  	}
   140  }