github.com/bir3/gocompiler@v0.9.2202/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go (about)

     1  // Code generated from _gen/RISCV64latelower.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  func rewriteValueRISCV64latelower(v *Value) bool {
     6  	switch v.Op {
     7  	case OpRISCV64SLLI:
     8  		return rewriteValueRISCV64latelower_OpRISCV64SLLI(v)
     9  	case OpRISCV64SRAI:
    10  		return rewriteValueRISCV64latelower_OpRISCV64SRAI(v)
    11  	case OpRISCV64SRLI:
    12  		return rewriteValueRISCV64latelower_OpRISCV64SRLI(v)
    13  	}
    14  	return false
    15  }
    16  func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
    17  	v_0 := v.Args[0]
    18  	b := v.Block
    19  	typ := &b.Func.Config.Types
    20  	// match: (SLLI [c] (MOVBUreg x))
    21  	// cond: c <= 56
    22  	// result: (SRLI [56-c] (SLLI <typ.UInt64> [56] x))
    23  	for {
    24  		c := auxIntToInt64(v.AuxInt)
    25  		if v_0.Op != OpRISCV64MOVBUreg {
    26  			break
    27  		}
    28  		x := v_0.Args[0]
    29  		if !(c <= 56) {
    30  			break
    31  		}
    32  		v.reset(OpRISCV64SRLI)
    33  		v.AuxInt = int64ToAuxInt(56 - c)
    34  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
    35  		v0.AuxInt = int64ToAuxInt(56)
    36  		v0.AddArg(x)
    37  		v.AddArg(v0)
    38  		return true
    39  	}
    40  	// match: (SLLI [c] (MOVHUreg x))
    41  	// cond: c <= 48
    42  	// result: (SRLI [48-c] (SLLI <typ.UInt64> [48] x))
    43  	for {
    44  		c := auxIntToInt64(v.AuxInt)
    45  		if v_0.Op != OpRISCV64MOVHUreg {
    46  			break
    47  		}
    48  		x := v_0.Args[0]
    49  		if !(c <= 48) {
    50  			break
    51  		}
    52  		v.reset(OpRISCV64SRLI)
    53  		v.AuxInt = int64ToAuxInt(48 - c)
    54  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
    55  		v0.AuxInt = int64ToAuxInt(48)
    56  		v0.AddArg(x)
    57  		v.AddArg(v0)
    58  		return true
    59  	}
    60  	// match: (SLLI [c] (MOVWUreg x))
    61  	// cond: c <= 32
    62  	// result: (SRLI [32-c] (SLLI <typ.UInt64> [32] x))
    63  	for {
    64  		c := auxIntToInt64(v.AuxInt)
    65  		if v_0.Op != OpRISCV64MOVWUreg {
    66  			break
    67  		}
    68  		x := v_0.Args[0]
    69  		if !(c <= 32) {
    70  			break
    71  		}
    72  		v.reset(OpRISCV64SRLI)
    73  		v.AuxInt = int64ToAuxInt(32 - c)
    74  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
    75  		v0.AuxInt = int64ToAuxInt(32)
    76  		v0.AddArg(x)
    77  		v.AddArg(v0)
    78  		return true
    79  	}
    80  	// match: (SLLI [0] x)
    81  	// result: x
    82  	for {
    83  		if auxIntToInt64(v.AuxInt) != 0 {
    84  			break
    85  		}
    86  		x := v_0
    87  		v.copyOf(x)
    88  		return true
    89  	}
    90  	return false
    91  }
    92  func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
    93  	v_0 := v.Args[0]
    94  	b := v.Block
    95  	typ := &b.Func.Config.Types
    96  	// match: (SRAI [c] (MOVBreg x))
    97  	// cond: c < 8
    98  	// result: (SRAI [56+c] (SLLI <typ.Int64> [56] x))
    99  	for {
   100  		c := auxIntToInt64(v.AuxInt)
   101  		if v_0.Op != OpRISCV64MOVBreg {
   102  			break
   103  		}
   104  		x := v_0.Args[0]
   105  		if !(c < 8) {
   106  			break
   107  		}
   108  		v.reset(OpRISCV64SRAI)
   109  		v.AuxInt = int64ToAuxInt(56 + c)
   110  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
   111  		v0.AuxInt = int64ToAuxInt(56)
   112  		v0.AddArg(x)
   113  		v.AddArg(v0)
   114  		return true
   115  	}
   116  	// match: (SRAI [c] (MOVHreg x))
   117  	// cond: c < 16
   118  	// result: (SRAI [48+c] (SLLI <typ.Int64> [48] x))
   119  	for {
   120  		c := auxIntToInt64(v.AuxInt)
   121  		if v_0.Op != OpRISCV64MOVHreg {
   122  			break
   123  		}
   124  		x := v_0.Args[0]
   125  		if !(c < 16) {
   126  			break
   127  		}
   128  		v.reset(OpRISCV64SRAI)
   129  		v.AuxInt = int64ToAuxInt(48 + c)
   130  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
   131  		v0.AuxInt = int64ToAuxInt(48)
   132  		v0.AddArg(x)
   133  		v.AddArg(v0)
   134  		return true
   135  	}
   136  	// match: (SRAI [c] (MOVWreg x))
   137  	// cond: c < 32
   138  	// result: (SRAI [32+c] (SLLI <typ.Int64> [32] x))
   139  	for {
   140  		c := auxIntToInt64(v.AuxInt)
   141  		if v_0.Op != OpRISCV64MOVWreg {
   142  			break
   143  		}
   144  		x := v_0.Args[0]
   145  		if !(c < 32) {
   146  			break
   147  		}
   148  		v.reset(OpRISCV64SRAI)
   149  		v.AuxInt = int64ToAuxInt(32 + c)
   150  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
   151  		v0.AuxInt = int64ToAuxInt(32)
   152  		v0.AddArg(x)
   153  		v.AddArg(v0)
   154  		return true
   155  	}
   156  	// match: (SRAI [0] x)
   157  	// result: x
   158  	for {
   159  		if auxIntToInt64(v.AuxInt) != 0 {
   160  			break
   161  		}
   162  		x := v_0
   163  		v.copyOf(x)
   164  		return true
   165  	}
   166  	return false
   167  }
   168  func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
   169  	v_0 := v.Args[0]
   170  	b := v.Block
   171  	typ := &b.Func.Config.Types
   172  	// match: (SRLI [c] (MOVBUreg x))
   173  	// cond: c < 8
   174  	// result: (SRLI [56+c] (SLLI <typ.UInt64> [56] x))
   175  	for {
   176  		c := auxIntToInt64(v.AuxInt)
   177  		if v_0.Op != OpRISCV64MOVBUreg {
   178  			break
   179  		}
   180  		x := v_0.Args[0]
   181  		if !(c < 8) {
   182  			break
   183  		}
   184  		v.reset(OpRISCV64SRLI)
   185  		v.AuxInt = int64ToAuxInt(56 + c)
   186  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   187  		v0.AuxInt = int64ToAuxInt(56)
   188  		v0.AddArg(x)
   189  		v.AddArg(v0)
   190  		return true
   191  	}
   192  	// match: (SRLI [c] (MOVHUreg x))
   193  	// cond: c < 16
   194  	// result: (SRLI [48+c] (SLLI <typ.UInt64> [48] x))
   195  	for {
   196  		c := auxIntToInt64(v.AuxInt)
   197  		if v_0.Op != OpRISCV64MOVHUreg {
   198  			break
   199  		}
   200  		x := v_0.Args[0]
   201  		if !(c < 16) {
   202  			break
   203  		}
   204  		v.reset(OpRISCV64SRLI)
   205  		v.AuxInt = int64ToAuxInt(48 + c)
   206  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   207  		v0.AuxInt = int64ToAuxInt(48)
   208  		v0.AddArg(x)
   209  		v.AddArg(v0)
   210  		return true
   211  	}
   212  	// match: (SRLI [c] (MOVWUreg x))
   213  	// cond: c < 32
   214  	// result: (SRLI [32+c] (SLLI <typ.UInt64> [32] x))
   215  	for {
   216  		c := auxIntToInt64(v.AuxInt)
   217  		if v_0.Op != OpRISCV64MOVWUreg {
   218  			break
   219  		}
   220  		x := v_0.Args[0]
   221  		if !(c < 32) {
   222  			break
   223  		}
   224  		v.reset(OpRISCV64SRLI)
   225  		v.AuxInt = int64ToAuxInt(32 + c)
   226  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   227  		v0.AuxInt = int64ToAuxInt(32)
   228  		v0.AddArg(x)
   229  		v.AddArg(v0)
   230  		return true
   231  	}
   232  	// match: (SRLI [0] x)
   233  	// result: x
   234  	for {
   235  		if auxIntToInt64(v.AuxInt) != 0 {
   236  			break
   237  		}
   238  		x := v_0
   239  		v.copyOf(x)
   240  		return true
   241  	}
   242  	return false
   243  }
   244  func rewriteBlockRISCV64latelower(b *Block) bool {
   245  	return false
   246  }