github.com/insionng/yougam@v0.0.0-20170714101924-2bc18d833463/libraries/karlseguin/ccache/configuration.go (about)

     1  package ccache
     2  
     3  type Configuration struct {
     4  	maxSize        int64
     5  	buckets        int
     6  	itemsToPrune   int
     7  	deleteBuffer   int
     8  	promoteBuffer  int
     9  	getsPerPromote int32
    10  	tracking       bool
    11  }
    12  
    13  // Creates a configuration object with sensible defaults
    14  // Use this as the start of the fluent configuration:
    15  // e.g.: ccache.New(ccache.Configure().MaxSize(10000))
    16  func Configure() *Configuration {
    17  	return &Configuration{
    18  		buckets:        16,
    19  		itemsToPrune:   500,
    20  		deleteBuffer:   1024,
    21  		getsPerPromote: 3,
    22  		promoteBuffer:  1024,
    23  		maxSize:        5000,
    24  		tracking:       false,
    25  	}
    26  }
    27  
    28  // The max size for the cache
    29  // [5000]
    30  func (c *Configuration) MaxSize(max int64) *Configuration {
    31  	c.maxSize = max
    32  	return c
    33  }
    34  
    35  // Keys are hashed into % bucket count to provide greater concurrency (every set
    36  // requires a write lock on the bucket). Must be a power of 2 (1, 2, 4, 8, 16, ...)
    37  // [16]
    38  func (c *Configuration) Buckets(count uint32) *Configuration {
    39  	if count == 0 || ((count&(^count+1)) == count) == false {
    40  		count = 16
    41  	}
    42  	c.buckets = int(count)
    43  	return c
    44  }
    45  
    46  // The number of items to prune when memory is low
    47  // [500]
    48  func (c *Configuration) ItemsToPrune(count uint32) *Configuration {
    49  	c.itemsToPrune = int(count)
    50  	return c
    51  }
    52  
    53  // The size of the queue for items which should be promoted. If the queue fills
    54  // up, promotions are skipped
    55  // [1024]
    56  func (c *Configuration) PromoteBuffer(size uint32) *Configuration {
    57  	c.promoteBuffer = int(size)
    58  	return c
    59  }
    60  
    61  // The size of the queue for items which should be deleted. If the queue fills
    62  // up, calls to Delete() will block
    63  func (c *Configuration) DeleteBuffer(size uint32) *Configuration {
    64  	c.deleteBuffer = int(size)
    65  	return c
    66  }
    67  
    68  // Give a large cache with a high read / write ratio, it's usually unecessary
    69  // to promote an item on every Get. GetsPerPromote specifies the number of Gets
    70  // a key must have before being promoted
    71  // [3]
    72  func (c *Configuration) GetsPerPromote(count int32) *Configuration {
    73  	c.getsPerPromote = count
    74  	return c
    75  }
    76  
    77  // Typically, a cache is agnostic about how cached values are use. This is fine
    78  // for a typical cache usage, where you fetch an item from the cache, do something
    79  // (write it out) and nothing else.
    80  
    81  // However, if callers are going to keep a reference to a cached item for a long
    82  // time, things get messy. Specifically, the cache can evict the item, while
    83  // references still exist. Technically, this isn't an issue. However, if you reload
    84  // the item back into the cache, you end up with 2 objects representing the same
    85  // data. This is a waste of space and could lead to weird behavior (the type an
    86  // identity map is meant to solve).
    87  
    88  // By turning tracking on and using the cache's TrackingGet, the cache
    89  // won't evict items which you haven't called Release() on. It's a simple reference
    90  // counter.
    91  func (c *Configuration) Track() *Configuration {
    92  	c.tracking = true
    93  	return c
    94  }