github.com/cloudwego/frugal@v0.1.15/internal/utils/pcache.go (about) 1 /* 2 * Copyright 2022 ByteDance Inc. 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package utils 18 19 import ( 20 `sync` 21 `sync/atomic` 22 `unsafe` 23 24 `github.com/cloudwego/frugal/internal/rt` 25 ) 26 27 /** Program Map **/ 28 29 const ( 30 LoadFactor = 0.5 31 InitCapacity = 4096 // must be a power of 2 32 ) 33 34 type ProgramMap struct { 35 n uint64 36 m uint32 37 b []ProgramEntry 38 } 39 40 type ProgramEntry struct { 41 vt *rt.GoType 42 fn interface{} 43 } 44 45 func newProgramMap() *ProgramMap { 46 return &ProgramMap { 47 n: 0, 48 m: InitCapacity - 1, 49 b: make([]ProgramEntry, InitCapacity), 50 } 51 } 52 53 func (self *ProgramMap) get(vt *rt.GoType) interface{} { 54 i := self.m + 1 55 p := vt.Hash & self.m 56 57 /* linear probing */ 58 for ; i > 0; i-- { 59 if b := self.b[p]; b.vt == vt { 60 return b.fn 61 } else if b.vt == nil { 62 break 63 } else { 64 p = (p + 1) & self.m 65 } 66 } 67 68 /* not found */ 69 return nil 70 } 71 72 func (self *ProgramMap) add(vt *rt.GoType, fn interface{}) *ProgramMap { 73 var f float64 74 var p *ProgramMap 75 76 /* check for load factor */ 77 if f = float64(atomic.LoadUint64(&self.n) + 1) / float64(self.m + 1); f <= LoadFactor { 78 p = self.copy() 79 } else { 80 p = self.rehash() 81 } 82 83 /* insert the value */ 84 p.insert(vt, fn) 85 return p 86 } 87 88 func (self *ProgramMap) copy() *ProgramMap { 89 p := new(ProgramMap) 90 p.n = self.n 91 p.m = self.m 92 p.b = make([]ProgramEntry, len(self.b)) 93 copy(p.b, self.b) 94 return p 95 } 96 97 func (self *ProgramMap) rehash() *ProgramMap { 98 c := (self.m + 1) << 1 99 r := &ProgramMap{m: c - 1, b: make([]ProgramEntry, int(c))} 100 101 /* rehash every entry */ 102 for i := uint32(0); i <= self.m; i++ { 103 if b := self.b[i]; b.vt != nil { 104 r.insert(b.vt, b.fn) 105 } 106 } 107 108 /* rebuild successful */ 109 return r 110 } 111 112 func (self *ProgramMap) insert(vt *rt.GoType, fn interface{}) { 113 h := vt.Hash 114 p := h & self.m 115 116 /* linear probing */ 117 for i := uint32(0); i <= self.m; i++ { 118 if b := &self.b[p]; b.vt != nil { 119 p += 1 120 p &= self.m 121 } else { 122 b.vt = vt 123 b.fn = fn 124 atomic.AddUint64(&self.n, 1) 125 return 126 } 127 } 128 129 /* should never happens */ 130 panic("no available slots") 131 } 132 133 /** RCU Program Cache **/ 134 135 type ProgramCache struct { 136 m sync.Mutex 137 p unsafe.Pointer 138 } 139 140 func CreateProgramCache() *ProgramCache { 141 return &ProgramCache { 142 m: sync.Mutex{}, 143 p: unsafe.Pointer(newProgramMap()), 144 } 145 } 146 147 func (self *ProgramCache) Get(vt *rt.GoType) interface{} { 148 return (*ProgramMap)(atomic.LoadPointer(&self.p)).get(vt) 149 } 150 151 func (self *ProgramCache) Compute(vt *rt.GoType, compute func(*rt.GoType) (interface{}, error)) (interface{}, error) { 152 var err error 153 var val interface{} 154 155 /* use defer to prevent inlining of this function */ 156 self.m.Lock() 157 defer self.m.Unlock() 158 159 /* double check with write lock held */ 160 if val = self.Get(vt); val != nil { 161 return val, nil 162 } 163 164 /* compute the value */ 165 if val, err = compute(vt); err != nil { 166 return nil, err 167 } 168 169 /* update the RCU cache */ 170 atomic.StorePointer(&self.p, unsafe.Pointer((*ProgramMap)(atomic.LoadPointer(&self.p)).add(vt, val))) 171 return val, nil 172 }