github.com/google/syzkaller@v0.0.0-20240517125934-c0f1611a36d6/prog/minimization.go (about)

     1  // Copyright 2018 syzkaller project authors. All rights reserved.
     2  // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
     3  
     4  package prog
     5  
     6  import (
     7  	"bytes"
     8  	"fmt"
     9  	"reflect"
    10  )
    11  
    12  // Minimize minimizes program p into an equivalent program using the equivalence
    13  // predicate pred. It iteratively generates simpler programs and asks pred
    14  // whether it is equal to the original program or not. If it is equivalent then
    15  // the simplification attempt is committed and the process continues.
    16  func Minimize(p0 *Prog, callIndex0 int, crash bool, pred0 func(*Prog, int) bool) (*Prog, int) {
    17  	pred := func(p *Prog, callIndex int) bool {
    18  		p.sanitizeFix()
    19  		p.debugValidate()
    20  		return pred0(p, callIndex)
    21  	}
    22  	name0 := ""
    23  	if callIndex0 != -1 {
    24  		if callIndex0 < 0 || callIndex0 >= len(p0.Calls) {
    25  			panic("bad call index")
    26  		}
    27  		name0 = p0.Calls[callIndex0].Meta.Name
    28  	}
    29  
    30  	// Try to remove all calls except the last one one-by-one.
    31  	p0, callIndex0 = removeCalls(p0, callIndex0, crash, pred)
    32  
    33  	// Try to reset all call props to their default values.
    34  	p0 = resetCallProps(p0, callIndex0, pred)
    35  
    36  	// Try to minimize individual calls.
    37  	for i := 0; i < len(p0.Calls); i++ {
    38  		if p0.Calls[i].Meta.Attrs.NoMinimize {
    39  			continue
    40  		}
    41  		ctx := &minimizeArgsCtx{
    42  			target:     p0.Target,
    43  			p0:         &p0,
    44  			callIndex0: callIndex0,
    45  			crash:      crash,
    46  			pred:       pred,
    47  			triedPaths: make(map[string]bool),
    48  		}
    49  	again:
    50  		ctx.p = p0.Clone()
    51  		ctx.call = ctx.p.Calls[i]
    52  		for j, field := range ctx.call.Meta.Args {
    53  			if ctx.do(ctx.call.Args[j], field.Name, "") {
    54  				goto again
    55  			}
    56  		}
    57  		p0 = minimizeCallProps(p0, i, callIndex0, pred)
    58  	}
    59  
    60  	if callIndex0 != -1 {
    61  		if callIndex0 < 0 || callIndex0 >= len(p0.Calls) || name0 != p0.Calls[callIndex0].Meta.Name {
    62  			panic(fmt.Sprintf("bad call index after minimization: ncalls=%v index=%v call=%v/%v",
    63  				len(p0.Calls), callIndex0, name0, p0.Calls[callIndex0].Meta.Name))
    64  		}
    65  	}
    66  	return p0, callIndex0
    67  }
    68  
    69  func removeCalls(p0 *Prog, callIndex0 int, crash bool, pred func(*Prog, int) bool) (*Prog, int) {
    70  	if callIndex0 >= 0 && callIndex0+2 < len(p0.Calls) {
    71  		// It's frequently the case that all subsequent calls were not necessary.
    72  		// Try to drop them all at once.
    73  		p := p0.Clone()
    74  		for i := len(p0.Calls) - 1; i > callIndex0; i-- {
    75  			p.RemoveCall(i)
    76  		}
    77  		if pred(p, callIndex0) {
    78  			p0 = p
    79  		}
    80  	}
    81  	for i := len(p0.Calls) - 1; i >= 0; i-- {
    82  		if i == callIndex0 {
    83  			continue
    84  		}
    85  		callIndex := callIndex0
    86  		if i < callIndex {
    87  			callIndex--
    88  		}
    89  		p := p0.Clone()
    90  		p.RemoveCall(i)
    91  		if !pred(p, callIndex) {
    92  			continue
    93  		}
    94  		p0 = p
    95  		callIndex0 = callIndex
    96  	}
    97  	return p0, callIndex0
    98  }
    99  
   100  func resetCallProps(p0 *Prog, callIndex0 int, pred func(*Prog, int) bool) *Prog {
   101  	// Try to reset all call props to their default values.
   102  	// This should be reasonable for many progs.
   103  	p := p0.Clone()
   104  	anyDifferent := false
   105  	for idx := range p.Calls {
   106  		if !reflect.DeepEqual(p.Calls[idx].Props, CallProps{}) {
   107  			p.Calls[idx].Props = CallProps{}
   108  			anyDifferent = true
   109  		}
   110  	}
   111  	if anyDifferent && pred(p, callIndex0) {
   112  		return p
   113  	}
   114  	return p0
   115  }
   116  
   117  func minimizeCallProps(p0 *Prog, callIndex, callIndex0 int, pred func(*Prog, int) bool) *Prog {
   118  	props := p0.Calls[callIndex].Props
   119  
   120  	// Try to drop fault injection.
   121  	if props.FailNth > 0 {
   122  		p := p0.Clone()
   123  		p.Calls[callIndex].Props.FailNth = 0
   124  		if pred(p, callIndex0) {
   125  			p0 = p
   126  		}
   127  	}
   128  
   129  	// Try to drop async.
   130  	if props.Async {
   131  		p := p0.Clone()
   132  		p.Calls[callIndex].Props.Async = false
   133  		if pred(p, callIndex0) {
   134  			p0 = p
   135  		}
   136  	}
   137  
   138  	// Try to drop rerun.
   139  	if props.Rerun > 0 {
   140  		p := p0.Clone()
   141  		p.Calls[callIndex].Props.Rerun = 0
   142  		if pred(p, callIndex0) {
   143  			p0 = p
   144  		}
   145  	}
   146  
   147  	return p0
   148  }
   149  
   150  type minimizeArgsCtx struct {
   151  	target     *Target
   152  	p0         **Prog
   153  	p          *Prog
   154  	call       *Call
   155  	callIndex0 int
   156  	crash      bool
   157  	pred       func(*Prog, int) bool
   158  	triedPaths map[string]bool
   159  }
   160  
   161  func (ctx *minimizeArgsCtx) do(arg Arg, field, path string) bool {
   162  	path += fmt.Sprintf("-%v", field)
   163  	if ctx.triedPaths[path] {
   164  		return false
   165  	}
   166  	p0 := *ctx.p0
   167  	if arg.Type().minimize(ctx, arg, path) {
   168  		return true
   169  	}
   170  	if *ctx.p0 == ctx.p {
   171  		// If minimize committed a new program, it must return true.
   172  		// Otherwise *ctx.p0 and ctx.p will point to the same program
   173  		// and any temp mutations to ctx.p will unintentionally affect ctx.p0.
   174  		panic("shared program committed")
   175  	}
   176  	if *ctx.p0 != p0 {
   177  		// New program was committed, but we did not start iteration anew.
   178  		// This means we are iterating over a stale tree and any changes won't be visible.
   179  		panic("iterating over stale program")
   180  	}
   181  	ctx.triedPaths[path] = true
   182  	return false
   183  }
   184  
   185  func (typ *TypeCommon) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   186  	return false
   187  }
   188  
   189  func (typ *StructType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   190  	a := arg.(*GroupArg)
   191  	for i, innerArg := range a.Inner {
   192  		if ctx.do(innerArg, typ.Fields[i].Name, path) {
   193  			return true
   194  		}
   195  	}
   196  	return false
   197  }
   198  
   199  func (typ *UnionType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   200  	a := arg.(*UnionArg)
   201  	return ctx.do(a.Option, typ.Fields[a.Index].Name, path)
   202  }
   203  
   204  func (typ *PtrType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   205  	a := arg.(*PointerArg)
   206  	if a.Res == nil {
   207  		return false
   208  	}
   209  	if path1 := path + ">"; !ctx.triedPaths[path1] {
   210  		removeArg(a.Res)
   211  		replaceArg(a, MakeSpecialPointerArg(a.Type(), a.Dir(), 0))
   212  		ctx.target.assignSizesCall(ctx.call)
   213  		if ctx.pred(ctx.p, ctx.callIndex0) {
   214  			*ctx.p0 = ctx.p
   215  		}
   216  		ctx.triedPaths[path1] = true
   217  		return true
   218  	}
   219  	return ctx.do(a.Res, "", path)
   220  }
   221  
   222  func (typ *ArrayType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   223  	a := arg.(*GroupArg)
   224  	for i := len(a.Inner) - 1; i >= 0; i-- {
   225  		elem := a.Inner[i]
   226  		elemPath := fmt.Sprintf("%v-%v", path, i)
   227  		// Try to remove individual elements one-by-one.
   228  		if !ctx.crash && !ctx.triedPaths[elemPath] &&
   229  			(typ.Kind == ArrayRandLen ||
   230  				typ.Kind == ArrayRangeLen && uint64(len(a.Inner)) > typ.RangeBegin) {
   231  			ctx.triedPaths[elemPath] = true
   232  			copy(a.Inner[i:], a.Inner[i+1:])
   233  			a.Inner = a.Inner[:len(a.Inner)-1]
   234  			removeArg(elem)
   235  			ctx.target.assignSizesCall(ctx.call)
   236  			if ctx.pred(ctx.p, ctx.callIndex0) {
   237  				*ctx.p0 = ctx.p
   238  			}
   239  			return true
   240  		}
   241  		if ctx.do(elem, "", elemPath) {
   242  			return true
   243  		}
   244  	}
   245  	return false
   246  }
   247  
   248  func (typ *IntType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   249  	return minimizeInt(ctx, arg, path)
   250  }
   251  
   252  func (typ *FlagsType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   253  	return minimizeInt(ctx, arg, path)
   254  }
   255  
   256  func (typ *ProcType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   257  	if !typ.Optional() {
   258  		// Default value for ProcType is 0 (same for all PID's).
   259  		// Usually 0 either does not make sense at all or make different PIDs collide
   260  		// (since we use ProcType to separate value ranges for different PIDs).
   261  		// So don't change ProcType to 0 unless the type is explicitly marked as opt
   262  		// (in that case we will also generate 0 anyway).
   263  		return false
   264  	}
   265  	return minimizeInt(ctx, arg, path)
   266  }
   267  
   268  func minimizeInt(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   269  	// TODO: try to reset bits in ints
   270  	// TODO: try to set separate flags
   271  	if ctx.crash {
   272  		return false
   273  	}
   274  	a := arg.(*ConstArg)
   275  	def := arg.Type().DefaultArg(arg.Dir()).(*ConstArg)
   276  	if a.Val == def.Val {
   277  		return false
   278  	}
   279  	v0 := a.Val
   280  	a.Val = def.Val
   281  
   282  	// By mutating an integer, we risk violating conditional fields.
   283  	// If the fields are patched, the minimization process must be restarted.
   284  	patched := ctx.call.setDefaultConditions(ctx.p.Target, false)
   285  	if ctx.pred(ctx.p, ctx.callIndex0) {
   286  		*ctx.p0 = ctx.p
   287  		ctx.triedPaths[path] = true
   288  		return true
   289  	}
   290  	a.Val = v0
   291  	if patched {
   292  		// No sense to return here.
   293  		ctx.triedPaths[path] = true
   294  	}
   295  	return patched
   296  }
   297  
   298  func (typ *ResourceType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   299  	if ctx.crash {
   300  		return false
   301  	}
   302  	a := arg.(*ResultArg)
   303  	if a.Res == nil {
   304  		return false
   305  	}
   306  	r0 := a.Res
   307  	delete(a.Res.uses, a)
   308  	a.Res, a.Val = nil, typ.Default()
   309  	if ctx.pred(ctx.p, ctx.callIndex0) {
   310  		*ctx.p0 = ctx.p
   311  	} else {
   312  		a.Res, a.Val = r0, 0
   313  		a.Res.uses[a] = true
   314  	}
   315  	ctx.triedPaths[path] = true
   316  	return true
   317  }
   318  
   319  func (typ *BufferType) minimize(ctx *minimizeArgsCtx, arg Arg, path string) bool {
   320  	if arg.Dir() == DirOut {
   321  		return false
   322  	}
   323  	if typ.IsCompressed() {
   324  		panic(fmt.Sprintf("minimizing `no_minimize` call %v", ctx.call.Meta.Name))
   325  	}
   326  	a := arg.(*DataArg)
   327  	switch typ.Kind {
   328  	case BufferBlobRand, BufferBlobRange:
   329  		// TODO: try to set individual bytes to 0
   330  		len0 := len(a.Data())
   331  		minLen := int(typ.RangeBegin)
   332  		for step := len(a.Data()) - minLen; len(a.Data()) > minLen && step > 0; {
   333  			if len(a.Data())-step >= minLen {
   334  				a.data = a.Data()[:len(a.Data())-step]
   335  				ctx.target.assignSizesCall(ctx.call)
   336  				if ctx.pred(ctx.p, ctx.callIndex0) {
   337  					continue
   338  				}
   339  				a.data = a.Data()[:len(a.Data())+step]
   340  				ctx.target.assignSizesCall(ctx.call)
   341  			}
   342  			step /= 2
   343  			if ctx.crash {
   344  				break
   345  			}
   346  		}
   347  		if len(a.Data()) != len0 {
   348  			*ctx.p0 = ctx.p
   349  			ctx.triedPaths[path] = true
   350  			return true
   351  		}
   352  	case BufferFilename:
   353  		// Try to undo target.SpecialFileLenghts mutation
   354  		// and reduce file name length.
   355  		if !typ.Varlen() {
   356  			return false
   357  		}
   358  		data0 := append([]byte{}, a.Data()...)
   359  		a.data = bytes.TrimRight(a.Data(), specialFileLenPad+"\x00")
   360  		if !typ.NoZ {
   361  			a.data = append(a.data, 0)
   362  		}
   363  		if bytes.Equal(a.data, data0) {
   364  			return false
   365  		}
   366  		ctx.target.assignSizesCall(ctx.call)
   367  		if ctx.pred(ctx.p, ctx.callIndex0) {
   368  			*ctx.p0 = ctx.p
   369  		}
   370  		ctx.triedPaths[path] = true
   371  		return true
   372  	}
   373  	return false
   374  }