github.com/google/syzkaller@v0.0.0-20251211124644-a066d2bc4b02/prog/size.go (about)

     1  // Copyright 2017 syzkaller project authors. All rights reserved.
     2  // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
     3  
     4  package prog
     5  
     6  import (
     7  	"fmt"
     8  )
     9  
    10  const (
    11  	// Special reference to the outer struct used in len targets.
    12  	ParentRef = "parent"
    13  	// Special reference directly to syscall arguments used in len targets.
    14  	SyscallRef = "syscall"
    15  )
    16  
    17  func (target *Target) assignSizes(args []Arg, fields []Field, parents parentStack,
    18  	syscallArgs []Arg, syscallFields []Field, autos map[Arg]bool, overlayField int) {
    19  	for _, arg := range args {
    20  		target.assignArgSize(arg, args, fields, parents, syscallArgs,
    21  			syscallFields, autos, overlayField)
    22  	}
    23  }
    24  
    25  // nolint:revive
    26  func (target *Target) assignArgSize(arg Arg, args []Arg, fields []Field, parents parentStack,
    27  	syscallArgs []Arg, syscallFields []Field, autos map[Arg]bool, overlayField int) {
    28  	if arg = InnerArg(arg); arg == nil {
    29  		return // Pointer to optional len field, no need to fill in value.
    30  	}
    31  	typ, ok := arg.Type().(*LenType)
    32  	if !ok {
    33  		return
    34  	}
    35  	if autos != nil {
    36  		if !autos[arg] {
    37  			return
    38  		}
    39  		delete(autos, arg)
    40  	}
    41  	a := arg.(*ConstArg)
    42  	if typ.Path[0] == SyscallRef {
    43  		target.assignSize(a, nil, typ.Path[1:], syscallArgs, syscallFields, parents, 0)
    44  	} else {
    45  		target.assignSize(a, a, typ.Path, args, fields, parents, overlayField)
    46  	}
    47  }
    48  
    49  func (target *Target) assignSize(dst *ConstArg, pos Arg, path []string, args []Arg,
    50  	fields []Field, parents parentStack, overlayField int) {
    51  	found := target.findArg(pos, path, args, fields, parents, overlayField)
    52  	if found != nil && !found.isAnyPtr {
    53  		dst.Val = target.computeSize(found.arg, found.offset, dst.Type().(*LenType))
    54  	}
    55  }
    56  
    57  type foundArg struct {
    58  	arg      Arg
    59  	offset   uint64
    60  	isAnyPtr bool
    61  }
    62  
    63  func (target *Target) findFieldStruct(buf Arg, path []string, parents parentStack) *foundArg {
    64  	switch arg := buf.(type) {
    65  	case *GroupArg:
    66  		typ := arg.Type().(*StructType)
    67  		return target.findArg(buf, path, arg.Inner, typ.Fields, parents, typ.OverlayField)
    68  	case *UnionArg:
    69  		return target.findArg(buf, path, nil, nil, parents, 0)
    70  	default:
    71  		panic(fmt.Sprintf("unexpected arg type %#v", arg))
    72  	}
    73  }
    74  
    75  func (target *Target) findArg(pos Arg, path []string, args []Arg, fields []Field,
    76  	parents parentStack, overlayField int) *foundArg {
    77  	elem := path[0]
    78  	path = path[1:]
    79  	var offset uint64
    80  	for i, buf := range args {
    81  		if i == overlayField {
    82  			offset = 0
    83  		}
    84  		if buf == nil {
    85  			continue
    86  		}
    87  		if elem != fields[i].Name {
    88  			offset += buf.Size()
    89  			continue
    90  		}
    91  		if typ := buf.Type(); typ == target.any.ptrPtr || typ == target.any.ptr64 {
    92  			// If path points into squashed argument, we don't have the target argument.
    93  			// In such case we simply leave size argument as is. It can't happen during generation,
    94  			// only during mutation and mutation can set size to random values, so it should be fine.
    95  			return &foundArg{buf, offset, true}
    96  		}
    97  		buf = InnerArg(buf)
    98  		if buf == nil {
    99  			return &foundArg{nil, offset, false}
   100  		}
   101  		if len(path) != 0 {
   102  			return target.findFieldStruct(buf, path, parents)
   103  		}
   104  		return &foundArg{buf, offset, false}
   105  	}
   106  	if elem == ParentRef {
   107  		parents, buf := popStack(parents)
   108  		if len(path) != 0 {
   109  			return target.findFieldStruct(buf, path, parents)
   110  		}
   111  		return &foundArg{buf, noOffset, false}
   112  	}
   113  	for parents, buf := popStack(parents); buf != nil; parents, buf = popStack(parents) {
   114  		if elem != buf.Type().TemplateName() {
   115  			continue
   116  		}
   117  		if len(path) != 0 {
   118  			return target.findFieldStruct(buf, path, parents)
   119  		}
   120  		return &foundArg{buf, noOffset, false}
   121  	}
   122  	var fieldNames []string
   123  	for _, field := range fields {
   124  		fieldNames = append(fieldNames, field.Name)
   125  	}
   126  	posName := "nil"
   127  	if pos != nil {
   128  		posName = pos.Type().Name()
   129  	}
   130  	panic(fmt.Sprintf("path references non existent field %q, pos=%q, argsMap: %v, path: %v",
   131  		elem, posName, fieldNames, path))
   132  }
   133  
   134  const noOffset = ^uint64(0)
   135  
   136  func (target *Target) computeSize(arg Arg, offset uint64, lenType *LenType) uint64 {
   137  	if lenType.Offset {
   138  		if offset == noOffset {
   139  			panic("offset of a non-field")
   140  		}
   141  		return offset * 8 / lenType.BitSize
   142  	}
   143  	if arg == nil {
   144  		// For e.g. optional pointers.
   145  		return 0
   146  	}
   147  	bitSize := lenType.BitSize
   148  	if bitSize == 0 {
   149  		bitSize = 8
   150  	}
   151  	switch arg.Type().(type) {
   152  	case *VmaType:
   153  		a := arg.(*PointerArg)
   154  		return a.VmaSize * 8 / bitSize
   155  	case *ArrayType:
   156  		a := arg.(*GroupArg)
   157  		if lenType.BitSize != 0 {
   158  			return a.Size() * 8 / bitSize
   159  		}
   160  		return uint64(len(a.Inner))
   161  	default:
   162  		return arg.Size() * 8 / bitSize
   163  	}
   164  }
   165  
   166  func (target *Target) assignSizesArray(args []Arg, fields []Field, autos map[Arg]bool) {
   167  	target.assignSizes(args, fields, nil, args, fields, autos, 0)
   168  	for _, arg := range args {
   169  		foreachSubArgWithStack(arg, func(arg Arg, ctx *ArgCtx) {
   170  			if typ, ok := arg.Type().(*StructType); ok {
   171  				target.assignSizes(arg.(*GroupArg).Inner, typ.Fields, ctx.parentStack, args, fields, autos, typ.OverlayField)
   172  			}
   173  			if v, ok := arg.(*UnionArg); ok {
   174  				target.assignArgSize(v.Option, nil, nil, ctx.parentStack, args, fields, autos, 0)
   175  			}
   176  		})
   177  	}
   178  }
   179  
   180  func (target *Target) assignSizesCall(c *Call) {
   181  	target.assignSizesArray(c.Args, c.Meta.Args, nil)
   182  }
   183  
   184  func (r *randGen) mutateSize(arg *ConstArg, parent []Arg, fields []Field) bool {
   185  	typ := arg.Type().(*LenType)
   186  	elemSize := typ.BitSize / 8
   187  	if elemSize == 0 {
   188  		elemSize = 1
   189  		// TODO(dvyukov): implement path support for size mutation.
   190  		if len(typ.Path) == 1 {
   191  			for i, field := range parent {
   192  				if typ.Path[0] != fields[i].Name {
   193  					continue
   194  				}
   195  				if inner := InnerArg(field); inner != nil {
   196  					switch targetType := inner.Type().(type) {
   197  					case *VmaType:
   198  						return false
   199  					case *BufferType:
   200  						// Don't mutate size of compressed images.
   201  						// If we do, then our code will fail/crash on decompression.
   202  						if targetType.Kind == BufferCompressed {
   203  							return false
   204  						}
   205  					case *ArrayType:
   206  						if targetType.Elem.Varlen() {
   207  							return false
   208  						}
   209  						elemSize = targetType.Elem.Size()
   210  					}
   211  				}
   212  				break
   213  			}
   214  		}
   215  	}
   216  	if r.oneOf(100) {
   217  		arg.Val = r.rand64()
   218  		return true
   219  	}
   220  	if r.bin() {
   221  		// Small adjustment to trigger missed size checks.
   222  		if arg.Val != 0 && r.bin() {
   223  			arg.Val = r.randRangeInt(0, arg.Val-1, arg.Type().TypeBitSize(), 0)
   224  		} else {
   225  			arg.Val = r.randRangeInt(arg.Val+1, arg.Val+100, arg.Type().TypeBitSize(), 0)
   226  		}
   227  		return true
   228  	}
   229  	// Try to provoke int overflows.
   230  	max := ^uint64(0)
   231  	if r.oneOf(3) {
   232  		max = 1<<32 - 1
   233  		if r.oneOf(2) {
   234  			max = 1<<16 - 1
   235  			if r.oneOf(2) {
   236  				max = 1<<8 - 1
   237  			}
   238  		}
   239  	}
   240  	n := max / elemSize
   241  	delta := uint64(1000 - r.biasedRand(1000, 10))
   242  	if elemSize == 1 || r.oneOf(10) {
   243  		n -= delta
   244  	} else {
   245  		n += delta
   246  	}
   247  	arg.Val = n
   248  	return true
   249  }