github.com/google/syzkaller@v0.0.0-20240517125934-c0f1611a36d6/prog/size.go (about)

     1  // Copyright 2017 syzkaller project authors. All rights reserved.
     2  // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
     3  
     4  package prog
     5  
     6  import (
     7  	"fmt"
     8  )
     9  
    10  const (
    11  	// Special reference to the outer struct used in len targets.
    12  	ParentRef = "parent"
    13  	// Special reference directly to syscall arguments used in len targets.
    14  	SyscallRef = "syscall"
    15  )
    16  
    17  func (target *Target) assignSizes(args []Arg, fields []Field, parents parentStack,
    18  	syscallArgs []Arg, syscallFields []Field, autos map[Arg]bool, overlayField int) {
    19  	for _, arg := range args {
    20  		target.assignArgSize(arg, args, fields, parents, syscallArgs,
    21  			syscallFields, autos, overlayField)
    22  	}
    23  }
    24  
    25  func (target *Target) assignArgSize(arg Arg, args []Arg, fields []Field, parents parentStack,
    26  	syscallArgs []Arg, syscallFields []Field, autos map[Arg]bool, overlayField int) {
    27  	if arg = InnerArg(arg); arg == nil {
    28  		return // Pointer to optional len field, no need to fill in value.
    29  	}
    30  	typ, ok := arg.Type().(*LenType)
    31  	if !ok {
    32  		return
    33  	}
    34  	if autos != nil {
    35  		if !autos[arg] {
    36  			return
    37  		}
    38  		delete(autos, arg)
    39  	}
    40  	a := arg.(*ConstArg)
    41  	if typ.Path[0] == SyscallRef {
    42  		target.assignSize(a, nil, typ.Path[1:], syscallArgs, syscallFields, parents, 0)
    43  	} else {
    44  		target.assignSize(a, a, typ.Path, args, fields, parents, overlayField)
    45  	}
    46  }
    47  
    48  func (target *Target) assignSize(dst *ConstArg, pos Arg, path []string, args []Arg,
    49  	fields []Field, parents parentStack, overlayField int) {
    50  	found := target.findArg(pos, path, args, fields, parents, overlayField)
    51  	if found != nil && !found.isAnyPtr {
    52  		dst.Val = target.computeSize(found.arg, found.offset, dst.Type().(*LenType))
    53  	}
    54  }
    55  
    56  type foundArg struct {
    57  	arg      Arg
    58  	offset   uint64
    59  	isAnyPtr bool
    60  }
    61  
    62  func (target *Target) findFieldStruct(buf Arg, path []string, parents parentStack) *foundArg {
    63  	switch arg := buf.(type) {
    64  	case *GroupArg:
    65  		typ := arg.Type().(*StructType)
    66  		return target.findArg(buf, path, arg.Inner, typ.Fields, parents, typ.OverlayField)
    67  	case *UnionArg:
    68  		return target.findArg(buf, path, nil, nil, parents, 0)
    69  	default:
    70  		panic(fmt.Sprintf("unexpected arg type %#v", arg))
    71  	}
    72  }
    73  
    74  func (target *Target) findArg(pos Arg, path []string, args []Arg, fields []Field,
    75  	parents parentStack, overlayField int) *foundArg {
    76  	elem := path[0]
    77  	path = path[1:]
    78  	var offset uint64
    79  	for i, buf := range args {
    80  		if i == overlayField {
    81  			offset = 0
    82  		}
    83  		if buf == nil {
    84  			continue
    85  		}
    86  		if elem != fields[i].Name {
    87  			offset += buf.Size()
    88  			continue
    89  		}
    90  		if typ := buf.Type(); typ == target.any.ptrPtr || typ == target.any.ptr64 {
    91  			// If path points into squashed argument, we don't have the target argument.
    92  			// In such case we simply leave size argument as is. It can't happen during generation,
    93  			// only during mutation and mutation can set size to random values, so it should be fine.
    94  			return &foundArg{buf, offset, true}
    95  		}
    96  		buf = InnerArg(buf)
    97  		if buf == nil {
    98  			return &foundArg{nil, offset, false}
    99  		}
   100  		if len(path) != 0 {
   101  			return target.findFieldStruct(buf, path, parents)
   102  		}
   103  		return &foundArg{buf, offset, false}
   104  	}
   105  	if elem == ParentRef {
   106  		parents, buf := popStack(parents)
   107  		if len(path) != 0 {
   108  			return target.findFieldStruct(buf, path, parents)
   109  		}
   110  		return &foundArg{buf, noOffset, false}
   111  	}
   112  	for parents, buf := popStack(parents); buf != nil; parents, buf = popStack(parents) {
   113  		if elem != buf.Type().TemplateName() {
   114  			continue
   115  		}
   116  		if len(path) != 0 {
   117  			return target.findFieldStruct(buf, path, parents)
   118  		}
   119  		return &foundArg{buf, noOffset, false}
   120  	}
   121  	var fieldNames []string
   122  	for _, field := range fields {
   123  		fieldNames = append(fieldNames, field.Name)
   124  	}
   125  	posName := "nil"
   126  	if pos != nil {
   127  		posName = pos.Type().Name()
   128  	}
   129  	panic(fmt.Sprintf("path references non existent field %q, pos=%q, argsMap: %v, path: %v",
   130  		elem, posName, fieldNames, path))
   131  }
   132  
   133  const noOffset = ^uint64(0)
   134  
   135  func (target *Target) computeSize(arg Arg, offset uint64, lenType *LenType) uint64 {
   136  	if lenType.Offset {
   137  		if offset == noOffset {
   138  			panic("offset of a non-field")
   139  		}
   140  		return offset * 8 / lenType.BitSize
   141  	}
   142  	if arg == nil {
   143  		// For e.g. optional pointers.
   144  		return 0
   145  	}
   146  	bitSize := lenType.BitSize
   147  	if bitSize == 0 {
   148  		bitSize = 8
   149  	}
   150  	switch arg.Type().(type) {
   151  	case *VmaType:
   152  		a := arg.(*PointerArg)
   153  		return a.VmaSize * 8 / bitSize
   154  	case *ArrayType:
   155  		a := arg.(*GroupArg)
   156  		if lenType.BitSize != 0 {
   157  			return a.Size() * 8 / bitSize
   158  		}
   159  		return uint64(len(a.Inner))
   160  	default:
   161  		return arg.Size() * 8 / bitSize
   162  	}
   163  }
   164  
   165  func (target *Target) assignSizesArray(args []Arg, fields []Field, autos map[Arg]bool) {
   166  	target.assignSizes(args, fields, nil, args, fields, autos, 0)
   167  	for _, arg := range args {
   168  		foreachSubArgWithStack(arg, func(arg Arg, ctx *ArgCtx) {
   169  			if typ, ok := arg.Type().(*StructType); ok {
   170  				target.assignSizes(arg.(*GroupArg).Inner, typ.Fields, ctx.parentStack, args, fields, autos, typ.OverlayField)
   171  			}
   172  			if v, ok := arg.(*UnionArg); ok {
   173  				target.assignArgSize(v.Option, nil, nil, ctx.parentStack, args, fields, autos, 0)
   174  			}
   175  		})
   176  	}
   177  }
   178  
   179  func (target *Target) assignSizesCall(c *Call) {
   180  	target.assignSizesArray(c.Args, c.Meta.Args, nil)
   181  }
   182  
   183  func (r *randGen) mutateSize(arg *ConstArg, parent []Arg, fields []Field) bool {
   184  	typ := arg.Type().(*LenType)
   185  	elemSize := typ.BitSize / 8
   186  	if elemSize == 0 {
   187  		elemSize = 1
   188  		// TODO(dvyukov): implement path support for size mutation.
   189  		if len(typ.Path) == 1 {
   190  			for i, field := range parent {
   191  				if typ.Path[0] != fields[i].Name {
   192  					continue
   193  				}
   194  				if inner := InnerArg(field); inner != nil {
   195  					switch targetType := inner.Type().(type) {
   196  					case *VmaType:
   197  						return false
   198  					case *BufferType:
   199  						// Don't mutate size of compressed images.
   200  						// If we do, then our code will fail/crash on decompression.
   201  						if targetType.Kind == BufferCompressed {
   202  							return false
   203  						}
   204  					case *ArrayType:
   205  						if targetType.Elem.Varlen() {
   206  							return false
   207  						}
   208  						elemSize = targetType.Elem.Size()
   209  					}
   210  				}
   211  				break
   212  			}
   213  		}
   214  	}
   215  	if r.oneOf(100) {
   216  		arg.Val = r.rand64()
   217  		return true
   218  	}
   219  	if r.bin() {
   220  		// Small adjustment to trigger missed size checks.
   221  		if arg.Val != 0 && r.bin() {
   222  			arg.Val = r.randRangeInt(0, arg.Val-1, arg.Type().TypeBitSize(), 0)
   223  		} else {
   224  			arg.Val = r.randRangeInt(arg.Val+1, arg.Val+100, arg.Type().TypeBitSize(), 0)
   225  		}
   226  		return true
   227  	}
   228  	// Try to provoke int overflows.
   229  	max := ^uint64(0)
   230  	if r.oneOf(3) {
   231  		max = 1<<32 - 1
   232  		if r.oneOf(2) {
   233  			max = 1<<16 - 1
   234  			if r.oneOf(2) {
   235  				max = 1<<8 - 1
   236  			}
   237  		}
   238  	}
   239  	n := max / elemSize
   240  	delta := uint64(1000 - r.biasedRand(1000, 10))
   241  	if elemSize == 1 || r.oneOf(10) {
   242  		n -= delta
   243  	} else {
   244  		n += delta
   245  	}
   246  	arg.Val = n
   247  	return true
   248  }