github.com/wfusion/gofusion@v1.1.14/common/utils/clone/clone.go (about) 1 // Copyright 2019 Huan Du. All rights reserved. 2 // Licensed under the MIT license that can be found in the LICENSE file. 3 4 // Package clone provides functions to deep clone any Go data. 5 // It also provides a wrapper to protect a pointer from any unexpected mutation. 6 package clone 7 8 import ( 9 "fmt" 10 "reflect" 11 "unsafe" 12 ) 13 14 var heapCloneState = &cloneState{ 15 allocator: defaultAllocator, 16 } 17 var cloner = MakeCloner(defaultAllocator) 18 19 func clone(allocator *Allocator, v interface{}) interface{} { 20 if v == nil { 21 return nil 22 } 23 24 val := reflect.ValueOf(v) 25 cloned := allocator.clone(val, false) 26 return cloned.Interface() 27 } 28 29 func cloneSlowly(allocator *Allocator, v interface{}) interface{} { 30 if v == nil { 31 return nil 32 } 33 34 val := reflect.ValueOf(v) 35 cloned := allocator.cloneSlowly(val, false) 36 return cloned.Interface() 37 } 38 39 type cloneState struct { 40 allocator *Allocator 41 visited visitMap 42 invalid invalidPointers 43 44 // The value that should not be cloned by custom func. 45 // It's useful to avoid infinite loop when custom func calls allocator.Clone(). 46 skipCustomFuncValue reflect.Value 47 } 48 49 type visit struct { 50 p uintptr 51 extra int 52 t reflect.Type 53 } 54 55 type visitMap map[visit]reflect.Value 56 type invalidPointers map[visit]reflect.Value 57 58 func (state *cloneState) clone(v reflect.Value) reflect.Value { 59 if state.allocator.isScalar(v.Kind()) { 60 return copyScalarValue(v) 61 } 62 63 switch v.Kind() { 64 case reflect.Array: 65 return state.cloneArray(v) 66 case reflect.Chan: 67 return state.allocator.MakeChan(v.Type(), v.Cap()) 68 case reflect.Interface: 69 return state.cloneInterface(v) 70 case reflect.Map: 71 return state.cloneMap(v) 72 case reflect.Ptr: 73 return state.clonePtr(v) 74 case reflect.Slice: 75 return state.cloneSlice(v) 76 case reflect.Struct: 77 return state.cloneStruct(v) 78 case reflect.String: 79 return state.cloneString(v) 80 default: 81 panic(fmt.Errorf("go-clone: <bug> unsupported type `%v`", v.Type())) 82 } 83 } 84 85 func (state *cloneState) cloneArray(v reflect.Value) reflect.Value { 86 dst := state.allocator.New(v.Type()) 87 state.copyArray(v, dst) 88 return dst.Elem() 89 } 90 91 func (state *cloneState) copyArray(src, nv reflect.Value) { 92 p := unsafe.Pointer(nv.Pointer()) // dst must be a Ptr. 93 dst := nv.Elem() 94 num := src.Len() 95 96 if state.allocator.isScalar(src.Type().Elem().Kind()) { 97 shadowCopy(src, p) 98 return 99 } 100 101 for i := 0; i < num; i++ { 102 dst.Index(i).Set(state.clone(src.Index(i))) 103 } 104 } 105 106 func (state *cloneState) cloneInterface(v reflect.Value) reflect.Value { 107 if v.IsNil() { 108 return reflect.Zero(v.Type()) 109 } 110 111 t := v.Type() 112 elem := v.Elem() 113 return state.clone(elem).Convert(elem.Type()).Convert(t) 114 } 115 116 func (state *cloneState) cloneMap(v reflect.Value) reflect.Value { 117 if v.IsNil() { 118 return reflect.Zero(v.Type()) 119 } 120 121 t := v.Type() 122 123 if state.visited != nil { 124 vst := visit{ 125 p: v.Pointer(), 126 t: t, 127 } 128 129 if val, ok := state.visited[vst]; ok { 130 return val 131 } 132 } 133 134 nv := state.allocator.MakeMap(t, v.Len()) 135 136 if state.visited != nil { 137 vst := visit{ 138 p: v.Pointer(), 139 t: t, 140 } 141 state.visited[vst] = nv 142 } 143 144 for iter := mapIter(v); iter.Next(); { 145 key := state.clone(iter.Key()) 146 value := state.clone(iter.Value()) 147 nv.SetMapIndex(key, value) 148 } 149 150 return nv 151 } 152 153 func (state *cloneState) clonePtr(v reflect.Value) reflect.Value { 154 if v.IsNil() { 155 return reflect.Zero(v.Type()) 156 } 157 158 t := v.Type() 159 160 if state.allocator.isOpaquePointer(t) { 161 if v.CanInterface() { 162 return v 163 } 164 165 ptr := state.allocator.New(t) 166 p := unsafe.Pointer(ptr.Pointer()) 167 shadowCopy(v, p) 168 return ptr.Elem() 169 } 170 171 if state.visited != nil { 172 vst := visit{ 173 p: v.Pointer(), 174 t: t, 175 } 176 177 if val, ok := state.visited[vst]; ok { 178 return val 179 } 180 } 181 182 src := v.Elem() 183 elemType := src.Type() 184 elemKind := src.Kind() 185 nv := state.allocator.New(elemType) 186 187 if state.visited != nil { 188 vst := visit{ 189 p: v.Pointer(), 190 t: t, 191 } 192 state.visited[vst] = nv 193 } 194 195 switch elemKind { 196 case reflect.Struct: 197 state.copyStruct(src, nv) 198 case reflect.Array: 199 state.copyArray(src, nv) 200 default: 201 nv.Elem().Set(state.clone(src)) 202 } 203 204 // If this pointer is the address of a struct field and it's a cycle pointer, 205 // it may be updated. 206 if state.visited != nil { 207 vst := visit{ 208 p: v.Pointer(), 209 t: t, 210 } 211 nv = state.visited[vst] 212 } 213 214 return nv 215 } 216 217 func (state *cloneState) cloneSlice(v reflect.Value) reflect.Value { 218 if v.IsNil() { 219 return reflect.Zero(v.Type()) 220 } 221 222 t := v.Type() 223 num := v.Len() 224 225 if state.visited != nil { 226 vst := visit{ 227 p: v.Pointer(), 228 extra: num, 229 t: t, 230 } 231 232 if val, ok := state.visited[vst]; ok { 233 return val 234 } 235 } 236 237 c := v.Cap() 238 nv := state.allocator.MakeSlice(t, num, c) 239 240 if state.visited != nil { 241 vst := visit{ 242 p: v.Pointer(), 243 extra: num, 244 t: t, 245 } 246 state.visited[vst] = nv 247 } 248 249 // For scalar slice, copy underlying values directly. 250 if state.allocator.isScalar(t.Elem().Kind()) { 251 src := unsafe.Pointer(v.Pointer()) 252 dst := unsafe.Pointer(nv.Pointer()) 253 sz := int(t.Elem().Size()) 254 l := num * sz 255 cc := c * sz 256 copy((*[maxByteSize]byte)(dst)[:l:cc], (*[maxByteSize]byte)(src)[:l:cc]) 257 } else { 258 for i := 0; i < num; i++ { 259 nv.Index(i).Set(state.clone(v.Index(i))) 260 } 261 } 262 263 return nv 264 } 265 266 func (state *cloneState) cloneStruct(v reflect.Value) reflect.Value { 267 t := v.Type() 268 nv := state.allocator.New(t) 269 state.copyStruct(v, nv) 270 return nv.Elem() 271 } 272 273 var typeOfByteSlice = reflect.TypeOf([]byte(nil)) 274 275 func (state *cloneState) cloneString(v reflect.Value) reflect.Value { 276 t := v.Type() 277 l := v.Len() 278 data := state.allocator.MakeSlice(typeOfByteSlice, l, l) 279 280 // The v is an unexported struct field. 281 if !v.CanInterface() { 282 v = reflect.ValueOf(v.String()) 283 } 284 285 reflect.Copy(data, v) 286 287 nv := state.allocator.New(t) 288 slice := data.Interface().([]byte) 289 *(*stringHeader)(unsafe.Pointer(nv.Pointer())) = *(*stringHeader)(unsafe.Pointer(&slice)) 290 291 return nv.Elem() 292 } 293 294 func (state *cloneState) copyStruct(src, nv reflect.Value) { 295 t := src.Type() 296 st := state.allocator.loadStructType(t) 297 ptr := unsafe.Pointer(nv.Pointer()) 298 299 if st.Init(state.allocator, src, nv, state.skipCustomFuncValue == src) { 300 return 301 } 302 303 for _, pf := range st.PointerFields { 304 i := int(pf.Index) 305 p := unsafe.Pointer(uintptr(ptr) + pf.Offset) 306 field := src.Field(i) 307 308 // This field can be referenced by a pointer or interface inside itself. 309 // Put the pointer to this field to visited to avoid any error. 310 // 311 // See https://github.com/huandu/go-clone/issues/3. 312 if state.visited != nil && field.CanAddr() { 313 ft := field.Type() 314 fp := field.Addr().Pointer() 315 vst := visit{ 316 p: fp, 317 t: reflect.PtrTo(ft), 318 } 319 nv := reflect.NewAt(ft, p) 320 321 // The address of this field was visited, so fp must be a cycle pointer. 322 // As this field is not fully cloned, the val stored in visited[visit] must be wrong. 323 // It must be replaced by nv which will be the right value (it's incomplete right now). 324 // 325 // Unfortunately, if the val was used by previous clone routines, 326 // there is no easy way to fix wrong values - all pointers must be traversed and fixed. 327 if val, ok := state.visited[vst]; ok { 328 state.invalid[visit{ 329 p: val.Pointer(), 330 t: vst.t, 331 }] = nv 332 } 333 334 state.visited[vst] = nv 335 } 336 337 v := state.clone(field) 338 shadowCopy(v, p) 339 } 340 } 341 342 var typeOfString = reflect.TypeOf("") 343 344 func shadowCopy(src reflect.Value, p unsafe.Pointer) { 345 switch src.Kind() { 346 case reflect.Bool: 347 *(*bool)(p) = src.Bool() 348 case reflect.Int: 349 *(*int)(p) = int(src.Int()) 350 case reflect.Int8: 351 *(*int8)(p) = int8(src.Int()) 352 case reflect.Int16: 353 *(*int16)(p) = int16(src.Int()) 354 case reflect.Int32: 355 *(*int32)(p) = int32(src.Int()) 356 case reflect.Int64: 357 *(*int64)(p) = src.Int() 358 case reflect.Uint: 359 *(*uint)(p) = uint(src.Uint()) 360 case reflect.Uint8: 361 *(*uint8)(p) = uint8(src.Uint()) 362 case reflect.Uint16: 363 *(*uint16)(p) = uint16(src.Uint()) 364 case reflect.Uint32: 365 *(*uint32)(p) = uint32(src.Uint()) 366 case reflect.Uint64: 367 *(*uint64)(p) = src.Uint() 368 case reflect.Uintptr: 369 *(*uintptr)(p) = uintptr(src.Uint()) 370 case reflect.Float32: 371 *(*float32)(p) = float32(src.Float()) 372 case reflect.Float64: 373 *(*float64)(p) = src.Float() 374 case reflect.Complex64: 375 *(*complex64)(p) = complex64(src.Complex()) 376 case reflect.Complex128: 377 *(*complex128)(p) = src.Complex() 378 379 case reflect.Array: 380 t := src.Type() 381 382 if src.CanAddr() { 383 srcPtr := unsafe.Pointer(src.UnsafeAddr()) 384 sz := t.Size() 385 copy((*[maxByteSize]byte)(p)[:sz:sz], (*[maxByteSize]byte)(srcPtr)[:sz:sz]) 386 return 387 } 388 389 val := reflect.NewAt(t, p).Elem() 390 391 if src.CanInterface() { 392 val.Set(src) 393 return 394 } 395 396 sz := t.Elem().Size() 397 num := src.Len() 398 399 for i := 0; i < num; i++ { 400 elemPtr := unsafe.Pointer(uintptr(p) + uintptr(i)*sz) 401 shadowCopy(src.Index(i), elemPtr) 402 } 403 case reflect.Chan: 404 *((*uintptr)(p)) = src.Pointer() 405 case reflect.Func: 406 t := src.Type() 407 src = copyScalarValue(src) 408 val := reflect.NewAt(t, p).Elem() 409 val.Set(src) 410 case reflect.Interface: 411 *((*interfaceData)(p)) = parseReflectValue(src) 412 case reflect.Map: 413 *((*uintptr)(p)) = src.Pointer() 414 case reflect.Ptr: 415 *((*uintptr)(p)) = src.Pointer() 416 case reflect.Slice: 417 *(*sliceHeader)(p) = sliceHeader{ 418 Data: src.Pointer(), 419 Len: src.Len(), 420 Cap: src.Cap(), 421 } 422 case reflect.String: 423 s := src.String() 424 val := reflect.NewAt(typeOfString, p).Elem() 425 val.SetString(s) 426 case reflect.Struct: 427 t := src.Type() 428 val := reflect.NewAt(t, p).Elem() 429 430 if src.CanInterface() { 431 val.Set(src) 432 return 433 } 434 435 num := t.NumField() 436 437 for i := 0; i < num; i++ { 438 field := t.Field(i) 439 fieldPtr := unsafe.Pointer(uintptr(p) + field.Offset) 440 shadowCopy(src.Field(i), fieldPtr) 441 } 442 case reflect.UnsafePointer: 443 // There is no way to copy unsafe.Pointer value. 444 *((*uintptr)(p)) = src.Pointer() 445 446 default: 447 panic(fmt.Errorf("go-clone: <bug> impossible type `%v` when cloning private field", src.Type())) 448 } 449 } 450 451 // fix tranverses v to update all pointer values in state.invalid. 452 func (state *cloneState) fix(v reflect.Value) { 453 if state == nil || len(state.invalid) == 0 { 454 return 455 } 456 457 fix := &fixState{ 458 allocator: state.allocator, 459 fixed: fixMap{}, 460 invalid: state.invalid, 461 } 462 fix.fix(v) 463 } 464 465 type fixState struct { 466 allocator *Allocator 467 fixed fixMap 468 invalid invalidPointers 469 } 470 471 type fixMap map[visit]struct{} 472 473 func (fix *fixState) new(t reflect.Type) reflect.Value { 474 return fix.allocator.New(t) 475 } 476 477 func (fix *fixState) fix(v reflect.Value) (copied reflect.Value, changed int) { 478 if fix.allocator.isScalar(v.Kind()) { 479 return 480 } 481 482 switch v.Kind() { 483 case reflect.Array: 484 return fix.fixArray(v) 485 case reflect.Chan: 486 // Do nothing. 487 return 488 case reflect.Interface: 489 return fix.fixInterface(v) 490 case reflect.Map: 491 return fix.fixMap(v) 492 case reflect.Ptr: 493 return fix.fixPtr(v) 494 case reflect.Slice: 495 return fix.fixSlice(v) 496 case reflect.Struct: 497 return fix.fixStruct(v) 498 case reflect.String: 499 // Do nothing. 500 return 501 default: 502 panic(fmt.Errorf("go-clone: <bug> unsupported type `%v`", v.Type())) 503 } 504 } 505 506 func (fix *fixState) fixArray(v reflect.Value) (copied reflect.Value, changed int) { 507 t := v.Type() 508 et := t.Elem() 509 kind := et.Kind() 510 511 if fix.allocator.isScalar(kind) { 512 return 513 } 514 515 l := v.Len() 516 517 for i := 0; i < l; i++ { 518 elem := v.Index(i) 519 520 if kind == reflect.Ptr { 521 vst := visit{ 522 p: elem.Pointer(), 523 t: et, 524 } 525 526 if nv, ok := fix.invalid[vst]; ok { 527 // If elem cannot be set, v must be copied to make it settable. 528 // Don't do it unless there is no other choices. 529 if !elem.CanSet() { 530 copied = fix.new(t).Elem() 531 shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) 532 _, changed = fix.fixArray(copied) 533 return 534 } 535 536 elem.Set(nv) 537 changed++ 538 continue 539 } 540 } 541 542 fixed, c := fix.fix(elem) 543 changed += c 544 545 if fixed.IsValid() { 546 // If elem cannot be set, v must be copied to make it settable. 547 // Don't do it unless there is no other choices. 548 if !elem.CanSet() { 549 copied = fix.new(t).Elem() 550 shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) 551 _, changed = fix.fixArray(copied) 552 return 553 } 554 555 elem.Set(fixed) 556 } 557 } 558 559 return 560 } 561 562 func (fix *fixState) fixInterface(v reflect.Value) (copied reflect.Value, changed int) { 563 if v.IsNil() { 564 return 565 } 566 567 elem := v.Elem() 568 t := elem.Type() 569 kind := elem.Kind() 570 571 if kind == reflect.Ptr { 572 vst := visit{ 573 p: elem.Pointer(), 574 t: t, 575 } 576 577 if nv, ok := fix.invalid[vst]; ok { 578 copied = nv.Convert(v.Type()) 579 changed++ 580 return 581 } 582 } 583 584 copied, changed = fix.fix(elem) 585 586 if copied.IsValid() { 587 copied = copied.Convert(v.Type()) 588 } 589 590 return 591 } 592 593 func (fix *fixState) fixMap(v reflect.Value) (copied reflect.Value, changed int) { 594 if v.IsNil() { 595 return 596 } 597 598 t := v.Type() 599 vst := visit{ 600 p: v.Pointer(), 601 t: t, 602 } 603 604 if _, ok := fix.fixed[vst]; ok { 605 return 606 } 607 608 fix.fixed[vst] = struct{}{} 609 610 kt := t.Key() 611 et := t.Elem() 612 keyKind := kt.Kind() 613 elemKind := et.Kind() 614 615 if isScalar := fix.allocator.isScalar; isScalar(keyKind) && isScalar(elemKind) { 616 return 617 } 618 619 invalidKeys := map[reflect.Value][2]reflect.Value{} 620 621 for iter := mapIter(v); iter.Next(); { 622 key := iter.Key() 623 elem := iter.Value() 624 var fixed reflect.Value 625 c := 0 626 627 if elemKind == reflect.Ptr { 628 vst := visit{ 629 p: elem.Pointer(), 630 t: et, 631 } 632 633 if nv, ok := fix.invalid[vst]; ok { 634 fixed = nv 635 c++ 636 } else { 637 fixed, c = fix.fixPtr(elem) 638 } 639 } else { 640 fixed, c = fix.fix(elem) 641 } 642 643 changed += c 644 c = 0 645 646 if fixed.IsValid() { 647 v = forceSetMapIndex(v, key, fixed) 648 elem = fixed 649 fixed = reflect.Value{} 650 } 651 652 if keyKind == reflect.Ptr { 653 vst := visit{ 654 p: key.Pointer(), 655 t: kt, 656 } 657 658 if nv, ok := fix.invalid[vst]; ok { 659 fixed = nv 660 c++ 661 } else { 662 fixed, c = fix.fixPtr(key) 663 } 664 } else { 665 fixed, c = fix.fix(key) 666 } 667 668 changed += c 669 670 // Key cannot be changed immediately inside map range iteration. 671 // Do it later. 672 if fixed.IsValid() { 673 invalidKeys[key] = [2]reflect.Value{fixed, elem} 674 } 675 } 676 677 for key, kv := range invalidKeys { 678 v = forceSetMapIndex(v, key, reflect.Value{}) 679 v = forceSetMapIndex(v, kv[0], kv[1]) 680 } 681 682 return 683 } 684 685 func forceSetMapIndex(v, key, elem reflect.Value) (nv reflect.Value) { 686 nv = v 687 688 if !v.CanInterface() { 689 nv = forceClearROFlag(v) 690 } 691 692 if !key.CanInterface() { 693 key = forceClearROFlag(key) 694 } 695 696 if elem.IsValid() && !elem.CanInterface() { 697 elem = forceClearROFlag(elem) 698 } 699 700 nv.SetMapIndex(key, elem) 701 return 702 } 703 704 func (fix *fixState) fixPtr(v reflect.Value) (copied reflect.Value, changed int) { 705 if v.IsNil() { 706 return 707 } 708 709 vst := visit{ 710 p: v.Pointer(), 711 t: v.Type(), 712 } 713 714 if _, ok := fix.invalid[vst]; ok { 715 panic(fmt.Errorf("go-clone: <bug> invalid pointers must have been fixed in other methods")) 716 } 717 718 if _, ok := fix.fixed[vst]; ok { 719 return 720 } 721 722 fix.fixed[vst] = struct{}{} 723 724 elem := v.Elem() 725 _, changed = fix.fix(elem) 726 return 727 } 728 729 func (fix *fixState) fixSlice(v reflect.Value) (copied reflect.Value, changed int) { 730 if v.IsNil() { 731 return 732 } 733 734 t := v.Type() 735 et := t.Elem() 736 kind := et.Kind() 737 738 if fix.allocator.isScalar(kind) { 739 return 740 } 741 742 l := v.Len() 743 p := unsafe.Pointer(v.Pointer()) 744 vst := visit{ 745 p: uintptr(p), 746 extra: l, 747 t: t, 748 } 749 750 if _, ok := fix.fixed[vst]; ok { 751 return 752 } 753 754 fix.fixed[vst] = struct{}{} 755 756 for i := 0; i < l; i++ { 757 elem := v.Index(i) 758 var fixed reflect.Value 759 c := 0 760 761 if kind == reflect.Ptr { 762 vst := visit{ 763 p: elem.Pointer(), 764 t: et, 765 } 766 767 if nv, ok := fix.invalid[vst]; ok { 768 fixed = nv 769 } else { 770 fixed, c = fix.fixPtr(elem) 771 } 772 } else { 773 fixed, c = fix.fix(elem) 774 } 775 776 changed += c 777 778 if fixed.IsValid() { 779 sz := et.Size() 780 elemPtr := unsafe.Pointer(uintptr(p) + sz*uintptr(i)) 781 shadowCopy(fixed, elemPtr) 782 } 783 } 784 785 return 786 } 787 788 func (fix *fixState) fixStruct(v reflect.Value) (copied reflect.Value, changed int) { 789 t := v.Type() 790 st := fix.allocator.loadStructType(t) 791 792 if len(st.PointerFields) == 0 { 793 return 794 } 795 796 for _, pf := range st.PointerFields { 797 i := int(pf.Index) 798 field := v.Field(i) 799 800 ft := field.Type() 801 802 if ft.Kind() == reflect.Ptr { 803 vst := visit{ 804 p: field.Pointer(), 805 t: ft, 806 } 807 808 if nv, ok := fix.invalid[vst]; ok { 809 // If v is not addressable, a new struct must be allocated. 810 // Don't do it unless there is no other choices. 811 if !v.CanAddr() { 812 copied = fix.new(t).Elem() 813 shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) 814 _, changed = fix.fixStruct(copied) 815 return 816 } 817 818 ptr := unsafe.Pointer(v.Addr().Pointer()) 819 p := unsafe.Pointer(uintptr(ptr) + pf.Offset) 820 shadowCopy(nv, p) 821 continue 822 } 823 } 824 825 fixed, c := fix.fix(field) 826 changed += c 827 828 if fixed.IsValid() { 829 // If v is not addressable, a new struct must be allocated. 830 // Don't do it unless there is no other choices. 831 if !v.CanAddr() { 832 copied = fix.new(t).Elem() 833 shadowCopy(v, unsafe.Pointer(copied.Addr().Pointer())) 834 _, changed = fix.fixStruct(copied) 835 return 836 } 837 838 ptr := unsafe.Pointer(v.Addr().Pointer()) 839 p := unsafe.Pointer(uintptr(ptr) + pf.Offset) 840 shadowCopy(fixed, p) 841 } 842 } 843 844 return 845 }