github.com/nspcc-dev/neo-go@v0.105.2-0.20240517133400-6be757af3eba/pkg/vm/stackitem/serialization.go (about) 1 package stackitem 2 3 import ( 4 "errors" 5 "fmt" 6 "math/big" 7 8 "github.com/nspcc-dev/neo-go/pkg/encoding/bigint" 9 "github.com/nspcc-dev/neo-go/pkg/io" 10 "github.com/nspcc-dev/neo-go/pkg/util" 11 ) 12 13 // MaxDeserialized is the maximum number one deserialized item can contain 14 // (including itself). 15 const MaxDeserialized = 2048 16 17 // MaxSerialized is the maximum number one serialized item can contain 18 // (including itself). 19 const MaxSerialized = MaxDeserialized 20 21 // typicalNumOfItems is the number of items covering most serialization needs. 22 // It's a hint used for map creation, so it does not limit anything, it's just 23 // a microoptimization to avoid excessive reallocations. Most of the serialized 24 // items are structs, so there is at least one of them. 25 const typicalNumOfItems = 4 26 27 // ErrRecursive is returned upon an attempt to serialize some recursive stack item 28 // (like an array including an item with the reference to the same array). 29 var ErrRecursive = errors.New("recursive item") 30 31 // ErrUnserializable is returned upon an attempt to serialize some item that can't 32 // be serialized (like Interop item or Pointer). 33 var ErrUnserializable = errors.New("unserializable") 34 35 // SerializationContext is a serialization context. 36 type SerializationContext struct { 37 uv [9]byte 38 data []byte 39 allowInvalid bool 40 limit int 41 seen map[Item]sliceNoPointer 42 } 43 44 // deserContext is an internal deserialization context. 45 type deserContext struct { 46 *io.BinReader 47 allowInvalid bool 48 limit int 49 } 50 51 // Serialize encodes the given Item into a byte slice. 52 func Serialize(item Item) ([]byte, error) { 53 return SerializeLimited(item, MaxSerialized) 54 } 55 56 // SerializeLimited encodes the given Item into a byte slice using custom 57 // limit to restrict the maximum serialized number of elements. 58 func SerializeLimited(item Item, limit int) ([]byte, error) { 59 sc := SerializationContext{ 60 allowInvalid: false, 61 limit: MaxSerialized, 62 seen: make(map[Item]sliceNoPointer, typicalNumOfItems), 63 } 64 if limit > 0 { 65 sc.limit = limit 66 } 67 err := sc.serialize(item) 68 if err != nil { 69 return nil, err 70 } 71 return sc.data, nil 72 } 73 74 // EncodeBinary encodes the given Item into the given BinWriter. It's 75 // similar to io.Serializable's EncodeBinary but works with Item 76 // interface. 77 func EncodeBinary(item Item, w *io.BinWriter) { 78 data, err := Serialize(item) 79 if err != nil { 80 w.Err = err 81 return 82 } 83 w.WriteBytes(data) 84 } 85 86 // EncodeBinaryProtected encodes the given Item into the given BinWriter. It's 87 // similar to EncodeBinary but allows to encode interop items (only type, 88 // value is lost) and doesn't return any errors in the w. Instead, if an error 89 // (like recursive array) is encountered, it just writes the special InvalidT 90 // type of an element to the w. 91 func EncodeBinaryProtected(item Item, w *io.BinWriter) { 92 sc := SerializationContext{ 93 allowInvalid: true, 94 limit: MaxSerialized, 95 seen: make(map[Item]sliceNoPointer, typicalNumOfItems), 96 } 97 err := sc.serialize(item) 98 if err != nil { 99 w.WriteBytes([]byte{byte(InvalidT)}) 100 return 101 } 102 w.WriteBytes(sc.data) 103 } 104 105 func (w *SerializationContext) writeArray(item Item, arr []Item, start int) error { 106 w.seen[item] = sliceNoPointer{} 107 limit := w.limit 108 w.appendVarUint(uint64(len(arr))) 109 for i := range arr { 110 if err := w.serialize(arr[i]); err != nil { 111 return err 112 } 113 } 114 w.seen[item] = sliceNoPointer{start, len(w.data), limit - w.limit + 1} // number of items including the array itself. 115 return nil 116 } 117 118 // NewSerializationContext returns reusable stack item serialization context. 119 func NewSerializationContext() *SerializationContext { 120 return &SerializationContext{ 121 limit: MaxSerialized, 122 seen: make(map[Item]sliceNoPointer, typicalNumOfItems), 123 } 124 } 125 126 // Serialize returns flat slice of bytes with the given item. The process can be protected 127 // from bad elements if appropriate flag is given (otherwise an error is returned on 128 // encountering any of them). The buffer returned is only valid until the call to Serialize. 129 // The number of serialized items is restricted with MaxSerialized. 130 func (w *SerializationContext) Serialize(item Item, protected bool) ([]byte, error) { 131 w.allowInvalid = protected 132 w.limit = MaxSerialized 133 if w.data != nil { 134 w.data = w.data[:0] 135 } 136 for k := range w.seen { 137 delete(w.seen, k) 138 } 139 err := w.serialize(item) 140 if err != nil && protected { 141 if w.data == nil { 142 w.data = make([]byte, 0, 1) 143 } 144 w.data = append(w.data[:0], byte(InvalidT)) 145 err = nil 146 } 147 return w.data, err 148 } 149 150 func (w *SerializationContext) serialize(item Item) error { 151 if v, ok := w.seen[item]; ok { 152 if v.start == v.end { 153 return ErrRecursive 154 } 155 if len(w.data)+v.end-v.start > MaxSize { 156 return ErrTooBig 157 } 158 w.limit -= v.itemsCount 159 if w.limit < 0 { 160 return errTooBigElements 161 } 162 w.data = append(w.data, w.data[v.start:v.end]...) 163 return nil 164 } 165 w.limit-- 166 if w.limit < 0 { 167 return errTooBigElements 168 } 169 start := len(w.data) 170 switch t := item.(type) { 171 case *ByteArray: 172 w.data = append(w.data, byte(ByteArrayT)) 173 w.appendVarUint(uint64(len(*t))) 174 w.data = append(w.data, *t...) 175 case *Buffer: 176 w.data = append(w.data, byte(BufferT)) 177 w.appendVarUint(uint64(len(*t))) 178 w.data = append(w.data, *t...) 179 case Bool: 180 w.data = append(w.data, byte(BooleanT)) 181 if t { 182 w.data = append(w.data, 1) 183 } else { 184 w.data = append(w.data, 0) 185 } 186 case *BigInteger: 187 w.data = append(w.data, byte(IntegerT)) 188 ln := len(w.data) 189 w.data = append(w.data, 0) 190 data := bigint.ToPreallocatedBytes((*big.Int)(t), w.data[len(w.data):]) 191 w.data[ln] = byte(len(data)) 192 w.data = append(w.data, data...) 193 case *Interop: 194 if w.allowInvalid { 195 w.data = append(w.data, byte(InteropT)) 196 } else { 197 return fmt.Errorf("%w: Interop", ErrUnserializable) 198 } 199 case *Pointer: 200 if w.allowInvalid { 201 w.data = append(w.data, byte(PointerT)) 202 w.appendVarUint(uint64(t.pos)) 203 } else { 204 return fmt.Errorf("%w: Pointer", ErrUnserializable) 205 } 206 case *Array: 207 w.data = append(w.data, byte(ArrayT)) 208 if err := w.writeArray(item, t.value, start); err != nil { 209 return err 210 } 211 case *Struct: 212 w.data = append(w.data, byte(StructT)) 213 if err := w.writeArray(item, t.value, start); err != nil { 214 return err 215 } 216 case *Map: 217 w.seen[item] = sliceNoPointer{} 218 limit := w.limit 219 220 elems := t.value 221 w.data = append(w.data, byte(MapT)) 222 w.appendVarUint(uint64(len(elems))) 223 for i := range elems { 224 if err := w.serialize(elems[i].Key); err != nil { 225 return err 226 } 227 if err := w.serialize(elems[i].Value); err != nil { 228 return err 229 } 230 } 231 w.seen[item] = sliceNoPointer{start, len(w.data), limit - w.limit + 1} // number of items including Map itself. 232 case Null: 233 w.data = append(w.data, byte(AnyT)) 234 case nil: 235 if w.allowInvalid { 236 w.data = append(w.data, byte(InvalidT)) 237 } else { 238 return fmt.Errorf("%w: nil", ErrUnserializable) 239 } 240 } 241 242 if len(w.data) > MaxSize { 243 return errTooBigSize 244 } 245 return nil 246 } 247 248 func (w *SerializationContext) appendVarUint(val uint64) { 249 n := io.PutVarUint(w.uv[:], val) 250 w.data = append(w.data, w.uv[:n]...) 251 } 252 253 // Deserialize decodes the Item from the given byte slice. 254 func Deserialize(data []byte) (Item, error) { 255 r := io.NewBinReaderFromBuf(data) 256 item := DecodeBinary(r) 257 if r.Err != nil { 258 return nil, r.Err 259 } 260 return item, nil 261 } 262 263 // DeserializeLimited returns Item deserialized from the given byte slice. limit 264 // restricts the maximum number of items deserialized item can contain (including 265 // itself). The default limit of MaxDeserialized is used if non-positive limit is 266 // specified. 267 func DeserializeLimited(data []byte, limit int) (Item, error) { 268 r := io.NewBinReaderFromBuf(data) 269 dc := deserContext{ 270 BinReader: r, 271 allowInvalid: false, 272 limit: MaxDeserialized, 273 } 274 if limit > 0 { 275 dc.limit = limit 276 } 277 item := dc.decodeBinary() 278 if r.Err != nil { 279 return nil, r.Err 280 } 281 return item, nil 282 } 283 284 // DecodeBinary decodes the previously serialized Item from the given 285 // reader. It's similar to the io.Serializable's DecodeBinary() but implemented 286 // as a function because Item itself is an interface. Caveat: always check 287 // reader's error value before using the returned Item. 288 func DecodeBinary(r *io.BinReader) Item { 289 dc := deserContext{ 290 BinReader: r, 291 allowInvalid: false, 292 limit: MaxDeserialized, 293 } 294 return dc.decodeBinary() 295 } 296 297 // DecodeBinaryProtected is similar to DecodeBinary but allows Interop and 298 // Invalid values to be present (making it symmetric to EncodeBinaryProtected). 299 func DecodeBinaryProtected(r *io.BinReader) Item { 300 dc := deserContext{ 301 BinReader: r, 302 allowInvalid: true, 303 limit: MaxDeserialized, 304 } 305 return dc.decodeBinary() 306 } 307 308 func (r *deserContext) decodeBinary() Item { 309 var t = Type(r.ReadB()) 310 if r.Err != nil { 311 return nil 312 } 313 314 r.limit-- 315 if r.limit < 0 { 316 r.Err = errTooBigElements 317 return nil 318 } 319 switch t { 320 case ByteArrayT, BufferT: 321 data := r.ReadVarBytes(MaxSize) 322 if t == ByteArrayT { 323 return NewByteArray(data) 324 } 325 return NewBuffer(data) 326 case BooleanT: 327 var b = r.ReadBool() 328 return NewBool(b) 329 case IntegerT: 330 data := r.ReadVarBytes(bigint.MaxBytesLen) 331 num := bigint.FromBytes(data) 332 return NewBigInteger(num) 333 case ArrayT, StructT: 334 size := int(r.ReadVarUint()) 335 if size > r.limit { 336 r.Err = errTooBigElements 337 return nil 338 } 339 arr := make([]Item, size) 340 for i := 0; i < size; i++ { 341 arr[i] = r.decodeBinary() 342 } 343 344 if t == ArrayT { 345 return NewArray(arr) 346 } 347 return NewStruct(arr) 348 case MapT: 349 size := int(r.ReadVarUint()) 350 if size > r.limit/2 { 351 r.Err = errTooBigElements 352 return nil 353 } 354 m := NewMap() 355 for i := 0; i < size; i++ { 356 key := r.decodeBinary() 357 value := r.decodeBinary() 358 if r.Err != nil { 359 break 360 } 361 m.Add(key, value) 362 } 363 return m 364 case AnyT: 365 return Null{} 366 case InteropT: 367 if r.allowInvalid { 368 return NewInterop(nil) 369 } 370 fallthrough 371 case PointerT: 372 if r.allowInvalid { 373 pos := int(r.ReadVarUint()) 374 return NewPointerWithHash(pos, nil, util.Uint160{}) 375 } 376 fallthrough 377 default: 378 if t == InvalidT && r.allowInvalid { 379 return nil 380 } 381 r.Err = fmt.Errorf("%w: %v", ErrInvalidType, t) 382 return nil 383 } 384 } 385 386 // SerializeConvertible serializes Convertible into a slice of bytes. 387 func SerializeConvertible(conv Convertible) ([]byte, error) { 388 item, err := conv.ToStackItem() 389 if err != nil { 390 return nil, err 391 } 392 return Serialize(item) 393 } 394 395 // DeserializeConvertible deserializes Convertible from a slice of bytes. 396 func DeserializeConvertible(data []byte, conv Convertible) error { 397 item, err := Deserialize(data) 398 if err != nil { 399 return err 400 } 401 return conv.FromStackItem(item) 402 }