github.com/epfl-dcsl/gotee@v0.0.0-20200909122901-014b35f5e5e9/src/gosecommon/copy.go (about)

     1  package gosecommon
     2  
     3  import (
     4  	"reflect"
     5  	r "runtime"
     6  	"unsafe"
     7  )
     8  
     9  type rslice struct {
    10  	array unsafe.Pointer
    11  	l     int
    12  	c     int
    13  }
    14  
    15  type CA func(size uintptr) uintptr
    16  
    17  type Copy struct {
    18  	start uintptr
    19  	size  uintptr
    20  }
    21  
    22  type Store = map[uintptr]Copy
    23  
    24  func memcpy(dest, source, l uintptr) {
    25  	if dest == 0 || source == 0 {
    26  		panic("nil argument to copy")
    27  	}
    28  	for i := uintptr(0); i < l; i++ {
    29  		d := (*byte)(unsafe.Pointer(dest + i))
    30  		s := (*byte)(unsafe.Pointer(source + i))
    31  		*d = *s
    32  	}
    33  }
    34  
    35  func CanShallowCopy(rtpe *r.DPTpe) bool {
    36  	tpe := reflect.ConvDPTpeToType(rtpe)
    37  	switch tpe.Kind() {
    38  	case reflect.Map:
    39  		panic("Trying to deep copy a map...")
    40  	case reflect.Array:
    41  		return CanShallowCopy(reflect.ConvTypeToDPTpe(tpe.Elem()))
    42  	case reflect.Struct:
    43  		needed := false
    44  		for i := 0; i < tpe.NumField(); i++ {
    45  			if !CanShallowCopy(reflect.ConvTypeToDPTpe(tpe.Field(i).Type)) {
    46  				needed = true
    47  				break
    48  			}
    49  		}
    50  		return !needed
    51  	case reflect.UnsafePointer:
    52  		fallthrough
    53  	case reflect.Ptr:
    54  		return false
    55  	case reflect.Slice:
    56  		return false
    57  	}
    58  	return true
    59  }
    60  
    61  // needsCopy checks the given type against the supported once and returns
    62  // true if the type requires recursive exploring for copy.
    63  func needsCopy(tpe reflect.Type) (bool, reflect.Kind) {
    64  	switch tpe.Kind() {
    65  	case reflect.Map:
    66  		panic("Trying to deep copy a map...")
    67  	case reflect.UnsafePointer:
    68  		fallthrough
    69  	case reflect.Ptr:
    70  		fallthrough
    71  	case reflect.Struct:
    72  		// we force the inspection from deepcopy
    73  		fallthrough
    74  	case reflect.Array:
    75  		fallthrough
    76  	case reflect.Slice:
    77  		return true, tpe.Kind()
    78  	}
    79  	return false, tpe.Kind()
    80  }
    81  
    82  func ignoreCopy(tpe reflect.Type) bool {
    83  	return tpe == reflect.TypeOf(r.EcallReq{}) ||
    84  		tpe == reflect.TypeOf(r.EcallServerReq{}) ||
    85  		tpe == reflect.TypeOf(&r.EcallServerReq{})
    86  }
    87  
    88  func setPtrValue(ptr uintptr, val uintptr) {
    89  	pptr := (*uintptr)(unsafe.Pointer(ptr))
    90  	*pptr = val
    91  }
    92  
    93  func extractValue(ptr uintptr) uintptr {
    94  	val := *(*uintptr)(unsafe.Pointer(ptr))
    95  	return val
    96  }
    97  
    98  func copyIn(size uintptr) uintptr {
    99  	v := make([]uint8, size)
   100  	return uintptr(unsafe.Pointer(&v[0]))
   101  }
   102  
   103  func DeepCopierSend(src unsafe.Pointer, tpe *r.DPTpe) (unsafe.Pointer, r.AllocTracker) {
   104  	store := make(Store)
   105  	gtpe := reflect.ConvTypePtr(tpe)
   106  	var tracker r.AllocTracker
   107  	allocater := func(size uintptr) uintptr {
   108  		res := r.UnsafeAllocator.Malloc(size)
   109  		tracker = append(tracker, r.TrackerEntry{res, size})
   110  		return res
   111  	}
   112  	cpy := unsafe.Pointer(DeepCopy(uintptr(src), gtpe, store, allocater))
   113  	return cpy, tracker
   114  }
   115  
   116  func DeepCopier(src unsafe.Pointer, tpe *r.DPTpe) unsafe.Pointer {
   117  	store := make(Store)
   118  	gtpe := reflect.ConvTypePtr(tpe)
   119  	return unsafe.Pointer(DeepCopy(uintptr(src), gtpe, store, copyIn))
   120  }
   121  
   122  // deepCopy entry point for deepCopy.
   123  // Takes a pointer type as element, returns pointer to the same type.
   124  func DeepCopy(src uintptr, tpe reflect.Type, store Store, alloc CA) uintptr {
   125  	if tpe.Kind() != reflect.Ptr {
   126  		panic("Call to deepCopy does not respect calling convention.")
   127  	}
   128  	if v, ok := store[src]; ok {
   129  		return v.start
   130  	}
   131  	// Initial shallow copy.
   132  	dest := alloc(tpe.Elem().Size()) //make([]uint8, tpe.Elem().Size())
   133  	memcpy(dest, src, tpe.Elem().Size())
   134  	store[src] = Copy{dest, tpe.Elem().Size()}
   135  
   136  	// Go into the type's deep copy
   137  	deepCopy1(dest, src, tpe.Elem(), store, alloc)
   138  	return dest
   139  }
   140  
   141  // deepCopy1 dest and src are pointers to type tpe.
   142  func deepCopy1(dest, src uintptr, tpe reflect.Type, store Store, alloc CA) {
   143  	b, k := needsCopy(tpe)
   144  	if !b {
   145  		// flat type, not interesting.
   146  		return
   147  	}
   148  	switch k {
   149  	case reflect.Ptr:
   150  		// at that point dest and ptr should be ptrs to ptrs
   151  		val := DeepCopy(extractValue(src), tpe, store, alloc)
   152  		setPtrValue(dest, val)
   153  	case reflect.Struct:
   154  		offset := uintptr(0)
   155  		for i := 0; i < tpe.NumField(); i++ {
   156  			f := tpe.Field(i)
   157  			if b, _ := needsCopy(f.Type); b {
   158  				deepCopy1(dest+offset, src+offset, f.Type, store, alloc)
   159  			}
   160  			offset += f.Type.Size()
   161  		}
   162  	case reflect.Array:
   163  		if b, _ := needsCopy(tpe.Elem()); !b {
   164  			return
   165  		}
   166  		offset := uintptr(0)
   167  		for i := 0; i < tpe.Len(); i++ {
   168  			deepCopy1(dest+offset, src+offset, tpe.Elem(), store, alloc)
   169  			offset += tpe.Elem().Size()
   170  		}
   171  	// TODO handle slices.
   172  	case reflect.Slice:
   173  		rs := (*rslice)(unsafe.Pointer(src))
   174  		//now allocate the new array
   175  		ndest := alloc(uintptr(rs.c))
   176  		memcpy(ndest, uintptr(rs.array), uintptr(rs.l))
   177  		cs := (*rslice)(unsafe.Pointer(dest))
   178  		cs.array = unsafe.Pointer(ndest)
   179  		cs.l = rs.l
   180  		cs.c = rs.c
   181  		if b, _ := needsCopy(tpe.Elem()); b {
   182  			deepCopy1(ndest, uintptr(rs.array), reflect.ArrayOf(rs.c, tpe.Elem()), store, alloc)
   183  		}
   184  	case reflect.UnsafePointer:
   185  		panic("Unsafe pointers are not allowed!")
   186  	case reflect.Chan:
   187  		panic("Must implement the channel registration")
   188  	default:
   189  		panic("Unhandled type")
   190  	}
   191  }
   192  
   193  func DeepCopyStackFrame(size int32, argp *uint8, ftpe reflect.Type) []byte {
   194  	if ftpe.Kind() != reflect.Func {
   195  		panic("Wrong call to DeepCopyStackFrame")
   196  	}
   197  	if size == 0 {
   198  		return nil
   199  	}
   200  	store := make(Store)
   201  	nframe := make([]byte, int(size))
   202  	fptr := uintptr(unsafe.Pointer(&nframe[0]))
   203  	srcptr := uintptr(unsafe.Pointer(argp))
   204  	memcpy(fptr, srcptr, uintptr(size))
   205  	for i := 0; i < ftpe.NumIn(); i++ {
   206  		// handle cross-domain channels
   207  		if ftpe.In(i).Kind() == reflect.Chan {
   208  			extendUnsafeChanType(fptr, ftpe.In(i))
   209  			goto endloop
   210  		}
   211  		if ok, _ := needsCopy(ftpe.In(i)); !ok {
   212  			goto endloop
   213  		}
   214  		deepCopy1(fptr, srcptr, ftpe.In(i), store, copyIn)
   215  	endloop:
   216  		fptr += ftpe.In(i).Size()
   217  		srcptr += ftpe.In(i).Size()
   218  	}
   219  	return nframe
   220  }
   221  
   222  func extendUnsafeChanType(cptr uintptr, ctype reflect.Type) {
   223  	rdpte := reflect.ConvTypeToDPTpe(ctype.Elem())
   224  	r.SetChanType(extractValue(cptr), rdpte)
   225  }