github.com/nutsdb/nutsdb@v1.0.4/batch_test.go (about)

     1  package nutsdb
     2  
     3  import (
     4  	"fmt"
     5  	"io/ioutil"
     6  	"log"
     7  	"os"
     8  	"testing"
     9  
    10  	"github.com/stretchr/testify/require"
    11  	"github.com/xujiajun/utils/time2"
    12  )
    13  
    14  var bucket string
    15  
    16  const (
    17  	fileDir  = "/tmp/nutsdb/"
    18  	fileDir1 = "/tmp/nutsdb/nutsdb_batch_write1"
    19  	fileDir2 = "/tmp/nutsdb/nutsdb_batch_write2"
    20  	fileDir3 = "/tmp/nutsdb/nutsdb_batch_write3"
    21  	fileDir4 = "/tmp/nutsdb/nutsdb_batch_write4"
    22  	fileDir5 = "/tmp/nutsdb/nutsdb_batch_write5"
    23  	fileDir6 = "/tmp/nutsdb/nutsdb_batch_write6"
    24  	fileDir7 = "/tmp/nutsdb/nutsdb_batch_write7"
    25  	fileDir8 = "/tmp/nutsdb/nutsdb_batch_write8"
    26  	fileDir9 = "/tmp/nutsdb/nutsdb_batch_write9"
    27  	N        = 100
    28  )
    29  
    30  func init() {
    31  	bucket = "bucketForBatchWrite"
    32  	files, err := ioutil.ReadDir(fileDir)
    33  	if err != nil {
    34  		return
    35  	}
    36  	for _, f := range files {
    37  		name := f.Name()
    38  		if name != "" {
    39  			filePath := fmt.Sprintf("%s/%s", fileDir, name)
    40  			err := os.RemoveAll(filePath)
    41  			if err != nil {
    42  				log.Fatal(err)
    43  			}
    44  		}
    45  	}
    46  }
    47  
    48  func TestBatchWrite(t *testing.T) {
    49  	TestFlushPanic := func(t *testing.T, db *DB) {
    50  		wb, err := db.NewWriteBatch()
    51  		require.NoError(t, err)
    52  		require.NoError(t, wb.Flush())
    53  		require.Error(t, ErrCommitAfterFinish, wb.Flush())
    54  		wb, err = db.NewWriteBatch()
    55  		require.NoError(t, err)
    56  		require.NoError(t, wb.Cancel())
    57  		require.Error(t, ErrCommitAfterFinish, wb.Flush())
    58  	}
    59  
    60  	testEmptyWrite := func(t *testing.T, db *DB) {
    61  		wb, err := db.NewWriteBatch()
    62  		require.NoError(t, err)
    63  		require.NoError(t, wb.Flush())
    64  		wb, err = db.NewWriteBatch()
    65  		require.NoError(t, err)
    66  		require.NoError(t, wb.Flush())
    67  		wb, err = db.NewWriteBatch()
    68  		require.NoError(t, err)
    69  		require.NoError(t, wb.Flush())
    70  	}
    71  
    72  	testWrite := func(t *testing.T, db *DB) {
    73  		txCreateBucket(t, db, DataStructureBTree, bucket, nil)
    74  		key := func(i int) []byte {
    75  			return []byte(fmt.Sprintf("%10d", i))
    76  		}
    77  		val := func(i int) []byte {
    78  			return []byte(fmt.Sprintf("%128d", i))
    79  		}
    80  		wb, err := db.NewWriteBatch()
    81  		require.NoError(t, err)
    82  		time2.Start()
    83  		for i := 0; i < N; i++ {
    84  			require.NoError(t, wb.Put(bucket, key(i), val(i), 0))
    85  		}
    86  		require.NoError(t, wb.Flush())
    87  		// fmt.Printf("Time taken via batch write %v keys: %v\n", N, time2.End())
    88  
    89  		time2.Start()
    90  		if err := db.View(
    91  			func(tx *Tx) error {
    92  				for i := 0; i < N; i++ {
    93  					key := key(i)
    94  					value, err := tx.Get(bucket, key)
    95  					if err != nil {
    96  						return err
    97  					}
    98  					require.Equal(t, val(i), value)
    99  				}
   100  				return nil
   101  			}); err != nil {
   102  			log.Println(err)
   103  		}
   104  
   105  		// fmt.Printf("Time taken via db.View %v keys: %v\n", N, time2.End())
   106  		// err = wb.Reset()
   107  		wb, err = db.NewWriteBatch()
   108  		require.NoError(t, err)
   109  		time2.Start()
   110  		for i := 0; i < N; i++ {
   111  			require.NoError(t, wb.Delete(bucket, key(i)))
   112  		}
   113  		require.NoError(t, wb.Flush())
   114  		// fmt.Printf("Time taken via batch delete %v keys: %v\n", N, time2.End())
   115  
   116  		if err := db.View(
   117  			func(tx *Tx) error {
   118  				for i := 0; i < N; i++ {
   119  					key := key(i)
   120  					_, err := tx.Get(bucket, key)
   121  					require.Error(t, ErrNotFoundKey, err)
   122  				}
   123  				return nil
   124  			}); err != nil {
   125  			log.Println(err)
   126  		}
   127  	}
   128  
   129  	dbs := make([]*DB, 6)
   130  	dbs[0], _ = Open(
   131  		DefaultOptions,
   132  		WithDir(fileDir1),
   133  		WithEntryIdxMode(HintKeyValAndRAMIdxMode),
   134  	)
   135  	dbs[1], _ = Open(
   136  		DefaultOptions,
   137  		WithDir(fileDir2),
   138  		WithEntryIdxMode(HintKeyAndRAMIdxMode),
   139  	)
   140  	dbs[2], _ = Open(
   141  		DefaultOptions,
   142  		WithDir(fileDir4),
   143  		WithEntryIdxMode(HintKeyValAndRAMIdxMode),
   144  		WithMaxBatchCount(35),
   145  	)
   146  	dbs[3], _ = Open(
   147  		DefaultOptions,
   148  		WithDir(fileDir5),
   149  		WithEntryIdxMode(HintKeyAndRAMIdxMode),
   150  		WithMaxBatchCount(35),
   151  	)
   152  	dbs[4], _ = Open(
   153  		DefaultOptions,
   154  		WithDir(fileDir7),
   155  		WithEntryIdxMode(HintKeyValAndRAMIdxMode),
   156  		WithMaxBatchSize(20), // change to 1000, unit test is not ok, 1000000 is ok
   157  	)
   158  	dbs[5], _ = Open(
   159  		DefaultOptions,
   160  		WithDir(fileDir8),
   161  		WithEntryIdxMode(HintKeyAndRAMIdxMode),
   162  		WithMaxBatchSize(20),
   163  	)
   164  
   165  	for _, db := range dbs {
   166  		require.NotEqual(t, db, nil)
   167  		testWrite(t, db)
   168  		testEmptyWrite(t, db)
   169  		TestFlushPanic(t, db)
   170  	}
   171  }
   172  
   173  func TestWriteBatch_SetMaxPendingTxns(t *testing.T) {
   174  	max := 10
   175  	db, err := Open(
   176  		DefaultOptions,
   177  		WithDir("/tmp"),
   178  	)
   179  	require.NoError(t, err)
   180  	wb, err := db.NewWriteBatch()
   181  	require.NoError(t, err)
   182  	wb.SetMaxPendingTxns(max)
   183  	if wb.throttle == nil {
   184  		t.Error("Expected throttle to be initialized, but it was nil")
   185  	}
   186  	if cap(wb.throttle.ch) != max {
   187  		t.Errorf("Expected channel length to be %d, but got %d", max, len(wb.throttle.ch))
   188  	}
   189  	if cap(wb.throttle.errCh) != max {
   190  		t.Errorf("Expected error channel length to be %d, but got %d", max, len(wb.throttle.errCh))
   191  	}
   192  }