Source file src/cmd/compile/internal/ssa/rewriteRISCV64.go

     1  // Code generated from _gen/RISCV64.rules using 'go generate'; DO NOT EDIT.
     2  
     3  package ssa
     4  
     5  import "math"
     6  import "cmd/compile/internal/types"
     7  
     8  func rewriteValueRISCV64(v *Value) bool {
     9  	switch v.Op {
    10  	case OpAbs:
    11  		v.Op = OpRISCV64FABSD
    12  		return true
    13  	case OpAdd16:
    14  		v.Op = OpRISCV64ADD
    15  		return true
    16  	case OpAdd32:
    17  		v.Op = OpRISCV64ADD
    18  		return true
    19  	case OpAdd32F:
    20  		v.Op = OpRISCV64FADDS
    21  		return true
    22  	case OpAdd64:
    23  		v.Op = OpRISCV64ADD
    24  		return true
    25  	case OpAdd64F:
    26  		v.Op = OpRISCV64FADDD
    27  		return true
    28  	case OpAdd8:
    29  		v.Op = OpRISCV64ADD
    30  		return true
    31  	case OpAddPtr:
    32  		v.Op = OpRISCV64ADD
    33  		return true
    34  	case OpAddr:
    35  		return rewriteValueRISCV64_OpAddr(v)
    36  	case OpAnd16:
    37  		v.Op = OpRISCV64AND
    38  		return true
    39  	case OpAnd32:
    40  		v.Op = OpRISCV64AND
    41  		return true
    42  	case OpAnd64:
    43  		v.Op = OpRISCV64AND
    44  		return true
    45  	case OpAnd8:
    46  		v.Op = OpRISCV64AND
    47  		return true
    48  	case OpAndB:
    49  		v.Op = OpRISCV64AND
    50  		return true
    51  	case OpAtomicAdd32:
    52  		v.Op = OpRISCV64LoweredAtomicAdd32
    53  		return true
    54  	case OpAtomicAdd64:
    55  		v.Op = OpRISCV64LoweredAtomicAdd64
    56  		return true
    57  	case OpAtomicAnd32:
    58  		v.Op = OpRISCV64LoweredAtomicAnd32
    59  		return true
    60  	case OpAtomicAnd8:
    61  		return rewriteValueRISCV64_OpAtomicAnd8(v)
    62  	case OpAtomicCompareAndSwap32:
    63  		return rewriteValueRISCV64_OpAtomicCompareAndSwap32(v)
    64  	case OpAtomicCompareAndSwap64:
    65  		v.Op = OpRISCV64LoweredAtomicCas64
    66  		return true
    67  	case OpAtomicExchange32:
    68  		v.Op = OpRISCV64LoweredAtomicExchange32
    69  		return true
    70  	case OpAtomicExchange64:
    71  		v.Op = OpRISCV64LoweredAtomicExchange64
    72  		return true
    73  	case OpAtomicLoad32:
    74  		v.Op = OpRISCV64LoweredAtomicLoad32
    75  		return true
    76  	case OpAtomicLoad64:
    77  		v.Op = OpRISCV64LoweredAtomicLoad64
    78  		return true
    79  	case OpAtomicLoad8:
    80  		v.Op = OpRISCV64LoweredAtomicLoad8
    81  		return true
    82  	case OpAtomicLoadPtr:
    83  		v.Op = OpRISCV64LoweredAtomicLoad64
    84  		return true
    85  	case OpAtomicOr32:
    86  		v.Op = OpRISCV64LoweredAtomicOr32
    87  		return true
    88  	case OpAtomicOr8:
    89  		return rewriteValueRISCV64_OpAtomicOr8(v)
    90  	case OpAtomicStore32:
    91  		v.Op = OpRISCV64LoweredAtomicStore32
    92  		return true
    93  	case OpAtomicStore64:
    94  		v.Op = OpRISCV64LoweredAtomicStore64
    95  		return true
    96  	case OpAtomicStore8:
    97  		v.Op = OpRISCV64LoweredAtomicStore8
    98  		return true
    99  	case OpAtomicStorePtrNoWB:
   100  		v.Op = OpRISCV64LoweredAtomicStore64
   101  		return true
   102  	case OpAvg64u:
   103  		return rewriteValueRISCV64_OpAvg64u(v)
   104  	case OpClosureCall:
   105  		v.Op = OpRISCV64CALLclosure
   106  		return true
   107  	case OpCom16:
   108  		v.Op = OpRISCV64NOT
   109  		return true
   110  	case OpCom32:
   111  		v.Op = OpRISCV64NOT
   112  		return true
   113  	case OpCom64:
   114  		v.Op = OpRISCV64NOT
   115  		return true
   116  	case OpCom8:
   117  		v.Op = OpRISCV64NOT
   118  		return true
   119  	case OpConst16:
   120  		return rewriteValueRISCV64_OpConst16(v)
   121  	case OpConst32:
   122  		return rewriteValueRISCV64_OpConst32(v)
   123  	case OpConst32F:
   124  		return rewriteValueRISCV64_OpConst32F(v)
   125  	case OpConst64:
   126  		return rewriteValueRISCV64_OpConst64(v)
   127  	case OpConst64F:
   128  		return rewriteValueRISCV64_OpConst64F(v)
   129  	case OpConst8:
   130  		return rewriteValueRISCV64_OpConst8(v)
   131  	case OpConstBool:
   132  		return rewriteValueRISCV64_OpConstBool(v)
   133  	case OpConstNil:
   134  		return rewriteValueRISCV64_OpConstNil(v)
   135  	case OpCopysign:
   136  		v.Op = OpRISCV64FSGNJD
   137  		return true
   138  	case OpCvt32Fto32:
   139  		v.Op = OpRISCV64FCVTWS
   140  		return true
   141  	case OpCvt32Fto64:
   142  		v.Op = OpRISCV64FCVTLS
   143  		return true
   144  	case OpCvt32Fto64F:
   145  		v.Op = OpRISCV64FCVTDS
   146  		return true
   147  	case OpCvt32to32F:
   148  		v.Op = OpRISCV64FCVTSW
   149  		return true
   150  	case OpCvt32to64F:
   151  		v.Op = OpRISCV64FCVTDW
   152  		return true
   153  	case OpCvt64Fto32:
   154  		v.Op = OpRISCV64FCVTWD
   155  		return true
   156  	case OpCvt64Fto32F:
   157  		v.Op = OpRISCV64FCVTSD
   158  		return true
   159  	case OpCvt64Fto64:
   160  		v.Op = OpRISCV64FCVTLD
   161  		return true
   162  	case OpCvt64to32F:
   163  		v.Op = OpRISCV64FCVTSL
   164  		return true
   165  	case OpCvt64to64F:
   166  		v.Op = OpRISCV64FCVTDL
   167  		return true
   168  	case OpCvtBoolToUint8:
   169  		v.Op = OpCopy
   170  		return true
   171  	case OpDiv16:
   172  		return rewriteValueRISCV64_OpDiv16(v)
   173  	case OpDiv16u:
   174  		return rewriteValueRISCV64_OpDiv16u(v)
   175  	case OpDiv32:
   176  		return rewriteValueRISCV64_OpDiv32(v)
   177  	case OpDiv32F:
   178  		v.Op = OpRISCV64FDIVS
   179  		return true
   180  	case OpDiv32u:
   181  		v.Op = OpRISCV64DIVUW
   182  		return true
   183  	case OpDiv64:
   184  		return rewriteValueRISCV64_OpDiv64(v)
   185  	case OpDiv64F:
   186  		v.Op = OpRISCV64FDIVD
   187  		return true
   188  	case OpDiv64u:
   189  		v.Op = OpRISCV64DIVU
   190  		return true
   191  	case OpDiv8:
   192  		return rewriteValueRISCV64_OpDiv8(v)
   193  	case OpDiv8u:
   194  		return rewriteValueRISCV64_OpDiv8u(v)
   195  	case OpEq16:
   196  		return rewriteValueRISCV64_OpEq16(v)
   197  	case OpEq32:
   198  		return rewriteValueRISCV64_OpEq32(v)
   199  	case OpEq32F:
   200  		v.Op = OpRISCV64FEQS
   201  		return true
   202  	case OpEq64:
   203  		return rewriteValueRISCV64_OpEq64(v)
   204  	case OpEq64F:
   205  		v.Op = OpRISCV64FEQD
   206  		return true
   207  	case OpEq8:
   208  		return rewriteValueRISCV64_OpEq8(v)
   209  	case OpEqB:
   210  		return rewriteValueRISCV64_OpEqB(v)
   211  	case OpEqPtr:
   212  		return rewriteValueRISCV64_OpEqPtr(v)
   213  	case OpFMA:
   214  		v.Op = OpRISCV64FMADDD
   215  		return true
   216  	case OpGetCallerPC:
   217  		v.Op = OpRISCV64LoweredGetCallerPC
   218  		return true
   219  	case OpGetCallerSP:
   220  		v.Op = OpRISCV64LoweredGetCallerSP
   221  		return true
   222  	case OpGetClosurePtr:
   223  		v.Op = OpRISCV64LoweredGetClosurePtr
   224  		return true
   225  	case OpHmul32:
   226  		return rewriteValueRISCV64_OpHmul32(v)
   227  	case OpHmul32u:
   228  		return rewriteValueRISCV64_OpHmul32u(v)
   229  	case OpHmul64:
   230  		v.Op = OpRISCV64MULH
   231  		return true
   232  	case OpHmul64u:
   233  		v.Op = OpRISCV64MULHU
   234  		return true
   235  	case OpInterCall:
   236  		v.Op = OpRISCV64CALLinter
   237  		return true
   238  	case OpIsInBounds:
   239  		v.Op = OpLess64U
   240  		return true
   241  	case OpIsNonNil:
   242  		v.Op = OpRISCV64SNEZ
   243  		return true
   244  	case OpIsSliceInBounds:
   245  		v.Op = OpLeq64U
   246  		return true
   247  	case OpLeq16:
   248  		return rewriteValueRISCV64_OpLeq16(v)
   249  	case OpLeq16U:
   250  		return rewriteValueRISCV64_OpLeq16U(v)
   251  	case OpLeq32:
   252  		return rewriteValueRISCV64_OpLeq32(v)
   253  	case OpLeq32F:
   254  		v.Op = OpRISCV64FLES
   255  		return true
   256  	case OpLeq32U:
   257  		return rewriteValueRISCV64_OpLeq32U(v)
   258  	case OpLeq64:
   259  		return rewriteValueRISCV64_OpLeq64(v)
   260  	case OpLeq64F:
   261  		v.Op = OpRISCV64FLED
   262  		return true
   263  	case OpLeq64U:
   264  		return rewriteValueRISCV64_OpLeq64U(v)
   265  	case OpLeq8:
   266  		return rewriteValueRISCV64_OpLeq8(v)
   267  	case OpLeq8U:
   268  		return rewriteValueRISCV64_OpLeq8U(v)
   269  	case OpLess16:
   270  		return rewriteValueRISCV64_OpLess16(v)
   271  	case OpLess16U:
   272  		return rewriteValueRISCV64_OpLess16U(v)
   273  	case OpLess32:
   274  		return rewriteValueRISCV64_OpLess32(v)
   275  	case OpLess32F:
   276  		v.Op = OpRISCV64FLTS
   277  		return true
   278  	case OpLess32U:
   279  		return rewriteValueRISCV64_OpLess32U(v)
   280  	case OpLess64:
   281  		v.Op = OpRISCV64SLT
   282  		return true
   283  	case OpLess64F:
   284  		v.Op = OpRISCV64FLTD
   285  		return true
   286  	case OpLess64U:
   287  		v.Op = OpRISCV64SLTU
   288  		return true
   289  	case OpLess8:
   290  		return rewriteValueRISCV64_OpLess8(v)
   291  	case OpLess8U:
   292  		return rewriteValueRISCV64_OpLess8U(v)
   293  	case OpLoad:
   294  		return rewriteValueRISCV64_OpLoad(v)
   295  	case OpLocalAddr:
   296  		return rewriteValueRISCV64_OpLocalAddr(v)
   297  	case OpLsh16x16:
   298  		return rewriteValueRISCV64_OpLsh16x16(v)
   299  	case OpLsh16x32:
   300  		return rewriteValueRISCV64_OpLsh16x32(v)
   301  	case OpLsh16x64:
   302  		return rewriteValueRISCV64_OpLsh16x64(v)
   303  	case OpLsh16x8:
   304  		return rewriteValueRISCV64_OpLsh16x8(v)
   305  	case OpLsh32x16:
   306  		return rewriteValueRISCV64_OpLsh32x16(v)
   307  	case OpLsh32x32:
   308  		return rewriteValueRISCV64_OpLsh32x32(v)
   309  	case OpLsh32x64:
   310  		return rewriteValueRISCV64_OpLsh32x64(v)
   311  	case OpLsh32x8:
   312  		return rewriteValueRISCV64_OpLsh32x8(v)
   313  	case OpLsh64x16:
   314  		return rewriteValueRISCV64_OpLsh64x16(v)
   315  	case OpLsh64x32:
   316  		return rewriteValueRISCV64_OpLsh64x32(v)
   317  	case OpLsh64x64:
   318  		return rewriteValueRISCV64_OpLsh64x64(v)
   319  	case OpLsh64x8:
   320  		return rewriteValueRISCV64_OpLsh64x8(v)
   321  	case OpLsh8x16:
   322  		return rewriteValueRISCV64_OpLsh8x16(v)
   323  	case OpLsh8x32:
   324  		return rewriteValueRISCV64_OpLsh8x32(v)
   325  	case OpLsh8x64:
   326  		return rewriteValueRISCV64_OpLsh8x64(v)
   327  	case OpLsh8x8:
   328  		return rewriteValueRISCV64_OpLsh8x8(v)
   329  	case OpMod16:
   330  		return rewriteValueRISCV64_OpMod16(v)
   331  	case OpMod16u:
   332  		return rewriteValueRISCV64_OpMod16u(v)
   333  	case OpMod32:
   334  		return rewriteValueRISCV64_OpMod32(v)
   335  	case OpMod32u:
   336  		v.Op = OpRISCV64REMUW
   337  		return true
   338  	case OpMod64:
   339  		return rewriteValueRISCV64_OpMod64(v)
   340  	case OpMod64u:
   341  		v.Op = OpRISCV64REMU
   342  		return true
   343  	case OpMod8:
   344  		return rewriteValueRISCV64_OpMod8(v)
   345  	case OpMod8u:
   346  		return rewriteValueRISCV64_OpMod8u(v)
   347  	case OpMove:
   348  		return rewriteValueRISCV64_OpMove(v)
   349  	case OpMul16:
   350  		return rewriteValueRISCV64_OpMul16(v)
   351  	case OpMul32:
   352  		v.Op = OpRISCV64MULW
   353  		return true
   354  	case OpMul32F:
   355  		v.Op = OpRISCV64FMULS
   356  		return true
   357  	case OpMul64:
   358  		v.Op = OpRISCV64MUL
   359  		return true
   360  	case OpMul64F:
   361  		v.Op = OpRISCV64FMULD
   362  		return true
   363  	case OpMul64uhilo:
   364  		v.Op = OpRISCV64LoweredMuluhilo
   365  		return true
   366  	case OpMul64uover:
   367  		v.Op = OpRISCV64LoweredMuluover
   368  		return true
   369  	case OpMul8:
   370  		return rewriteValueRISCV64_OpMul8(v)
   371  	case OpNeg16:
   372  		v.Op = OpRISCV64NEG
   373  		return true
   374  	case OpNeg32:
   375  		v.Op = OpRISCV64NEG
   376  		return true
   377  	case OpNeg32F:
   378  		v.Op = OpRISCV64FNEGS
   379  		return true
   380  	case OpNeg64:
   381  		v.Op = OpRISCV64NEG
   382  		return true
   383  	case OpNeg64F:
   384  		v.Op = OpRISCV64FNEGD
   385  		return true
   386  	case OpNeg8:
   387  		v.Op = OpRISCV64NEG
   388  		return true
   389  	case OpNeq16:
   390  		return rewriteValueRISCV64_OpNeq16(v)
   391  	case OpNeq32:
   392  		return rewriteValueRISCV64_OpNeq32(v)
   393  	case OpNeq32F:
   394  		v.Op = OpRISCV64FNES
   395  		return true
   396  	case OpNeq64:
   397  		return rewriteValueRISCV64_OpNeq64(v)
   398  	case OpNeq64F:
   399  		v.Op = OpRISCV64FNED
   400  		return true
   401  	case OpNeq8:
   402  		return rewriteValueRISCV64_OpNeq8(v)
   403  	case OpNeqB:
   404  		return rewriteValueRISCV64_OpNeqB(v)
   405  	case OpNeqPtr:
   406  		return rewriteValueRISCV64_OpNeqPtr(v)
   407  	case OpNilCheck:
   408  		v.Op = OpRISCV64LoweredNilCheck
   409  		return true
   410  	case OpNot:
   411  		v.Op = OpRISCV64SEQZ
   412  		return true
   413  	case OpOffPtr:
   414  		return rewriteValueRISCV64_OpOffPtr(v)
   415  	case OpOr16:
   416  		v.Op = OpRISCV64OR
   417  		return true
   418  	case OpOr32:
   419  		v.Op = OpRISCV64OR
   420  		return true
   421  	case OpOr64:
   422  		v.Op = OpRISCV64OR
   423  		return true
   424  	case OpOr8:
   425  		v.Op = OpRISCV64OR
   426  		return true
   427  	case OpOrB:
   428  		v.Op = OpRISCV64OR
   429  		return true
   430  	case OpPanicBounds:
   431  		return rewriteValueRISCV64_OpPanicBounds(v)
   432  	case OpPubBarrier:
   433  		v.Op = OpRISCV64LoweredPubBarrier
   434  		return true
   435  	case OpRISCV64ADD:
   436  		return rewriteValueRISCV64_OpRISCV64ADD(v)
   437  	case OpRISCV64ADDI:
   438  		return rewriteValueRISCV64_OpRISCV64ADDI(v)
   439  	case OpRISCV64AND:
   440  		return rewriteValueRISCV64_OpRISCV64AND(v)
   441  	case OpRISCV64ANDI:
   442  		return rewriteValueRISCV64_OpRISCV64ANDI(v)
   443  	case OpRISCV64FADDD:
   444  		return rewriteValueRISCV64_OpRISCV64FADDD(v)
   445  	case OpRISCV64FADDS:
   446  		return rewriteValueRISCV64_OpRISCV64FADDS(v)
   447  	case OpRISCV64FMADDD:
   448  		return rewriteValueRISCV64_OpRISCV64FMADDD(v)
   449  	case OpRISCV64FMADDS:
   450  		return rewriteValueRISCV64_OpRISCV64FMADDS(v)
   451  	case OpRISCV64FMSUBD:
   452  		return rewriteValueRISCV64_OpRISCV64FMSUBD(v)
   453  	case OpRISCV64FMSUBS:
   454  		return rewriteValueRISCV64_OpRISCV64FMSUBS(v)
   455  	case OpRISCV64FNMADDD:
   456  		return rewriteValueRISCV64_OpRISCV64FNMADDD(v)
   457  	case OpRISCV64FNMADDS:
   458  		return rewriteValueRISCV64_OpRISCV64FNMADDS(v)
   459  	case OpRISCV64FNMSUBD:
   460  		return rewriteValueRISCV64_OpRISCV64FNMSUBD(v)
   461  	case OpRISCV64FNMSUBS:
   462  		return rewriteValueRISCV64_OpRISCV64FNMSUBS(v)
   463  	case OpRISCV64FSUBD:
   464  		return rewriteValueRISCV64_OpRISCV64FSUBD(v)
   465  	case OpRISCV64FSUBS:
   466  		return rewriteValueRISCV64_OpRISCV64FSUBS(v)
   467  	case OpRISCV64MOVBUload:
   468  		return rewriteValueRISCV64_OpRISCV64MOVBUload(v)
   469  	case OpRISCV64MOVBUreg:
   470  		return rewriteValueRISCV64_OpRISCV64MOVBUreg(v)
   471  	case OpRISCV64MOVBload:
   472  		return rewriteValueRISCV64_OpRISCV64MOVBload(v)
   473  	case OpRISCV64MOVBreg:
   474  		return rewriteValueRISCV64_OpRISCV64MOVBreg(v)
   475  	case OpRISCV64MOVBstore:
   476  		return rewriteValueRISCV64_OpRISCV64MOVBstore(v)
   477  	case OpRISCV64MOVBstorezero:
   478  		return rewriteValueRISCV64_OpRISCV64MOVBstorezero(v)
   479  	case OpRISCV64MOVDload:
   480  		return rewriteValueRISCV64_OpRISCV64MOVDload(v)
   481  	case OpRISCV64MOVDnop:
   482  		return rewriteValueRISCV64_OpRISCV64MOVDnop(v)
   483  	case OpRISCV64MOVDreg:
   484  		return rewriteValueRISCV64_OpRISCV64MOVDreg(v)
   485  	case OpRISCV64MOVDstore:
   486  		return rewriteValueRISCV64_OpRISCV64MOVDstore(v)
   487  	case OpRISCV64MOVDstorezero:
   488  		return rewriteValueRISCV64_OpRISCV64MOVDstorezero(v)
   489  	case OpRISCV64MOVHUload:
   490  		return rewriteValueRISCV64_OpRISCV64MOVHUload(v)
   491  	case OpRISCV64MOVHUreg:
   492  		return rewriteValueRISCV64_OpRISCV64MOVHUreg(v)
   493  	case OpRISCV64MOVHload:
   494  		return rewriteValueRISCV64_OpRISCV64MOVHload(v)
   495  	case OpRISCV64MOVHreg:
   496  		return rewriteValueRISCV64_OpRISCV64MOVHreg(v)
   497  	case OpRISCV64MOVHstore:
   498  		return rewriteValueRISCV64_OpRISCV64MOVHstore(v)
   499  	case OpRISCV64MOVHstorezero:
   500  		return rewriteValueRISCV64_OpRISCV64MOVHstorezero(v)
   501  	case OpRISCV64MOVWUload:
   502  		return rewriteValueRISCV64_OpRISCV64MOVWUload(v)
   503  	case OpRISCV64MOVWUreg:
   504  		return rewriteValueRISCV64_OpRISCV64MOVWUreg(v)
   505  	case OpRISCV64MOVWload:
   506  		return rewriteValueRISCV64_OpRISCV64MOVWload(v)
   507  	case OpRISCV64MOVWreg:
   508  		return rewriteValueRISCV64_OpRISCV64MOVWreg(v)
   509  	case OpRISCV64MOVWstore:
   510  		return rewriteValueRISCV64_OpRISCV64MOVWstore(v)
   511  	case OpRISCV64MOVWstorezero:
   512  		return rewriteValueRISCV64_OpRISCV64MOVWstorezero(v)
   513  	case OpRISCV64NEG:
   514  		return rewriteValueRISCV64_OpRISCV64NEG(v)
   515  	case OpRISCV64NEGW:
   516  		return rewriteValueRISCV64_OpRISCV64NEGW(v)
   517  	case OpRISCV64OR:
   518  		return rewriteValueRISCV64_OpRISCV64OR(v)
   519  	case OpRISCV64ORI:
   520  		return rewriteValueRISCV64_OpRISCV64ORI(v)
   521  	case OpRISCV64SEQZ:
   522  		return rewriteValueRISCV64_OpRISCV64SEQZ(v)
   523  	case OpRISCV64SLL:
   524  		return rewriteValueRISCV64_OpRISCV64SLL(v)
   525  	case OpRISCV64SLLI:
   526  		return rewriteValueRISCV64_OpRISCV64SLLI(v)
   527  	case OpRISCV64SLT:
   528  		return rewriteValueRISCV64_OpRISCV64SLT(v)
   529  	case OpRISCV64SLTI:
   530  		return rewriteValueRISCV64_OpRISCV64SLTI(v)
   531  	case OpRISCV64SLTIU:
   532  		return rewriteValueRISCV64_OpRISCV64SLTIU(v)
   533  	case OpRISCV64SLTU:
   534  		return rewriteValueRISCV64_OpRISCV64SLTU(v)
   535  	case OpRISCV64SNEZ:
   536  		return rewriteValueRISCV64_OpRISCV64SNEZ(v)
   537  	case OpRISCV64SRA:
   538  		return rewriteValueRISCV64_OpRISCV64SRA(v)
   539  	case OpRISCV64SRAI:
   540  		return rewriteValueRISCV64_OpRISCV64SRAI(v)
   541  	case OpRISCV64SRAW:
   542  		return rewriteValueRISCV64_OpRISCV64SRAW(v)
   543  	case OpRISCV64SRL:
   544  		return rewriteValueRISCV64_OpRISCV64SRL(v)
   545  	case OpRISCV64SRLI:
   546  		return rewriteValueRISCV64_OpRISCV64SRLI(v)
   547  	case OpRISCV64SRLW:
   548  		return rewriteValueRISCV64_OpRISCV64SRLW(v)
   549  	case OpRISCV64SUB:
   550  		return rewriteValueRISCV64_OpRISCV64SUB(v)
   551  	case OpRISCV64SUBW:
   552  		return rewriteValueRISCV64_OpRISCV64SUBW(v)
   553  	case OpRISCV64XOR:
   554  		return rewriteValueRISCV64_OpRISCV64XOR(v)
   555  	case OpRotateLeft16:
   556  		return rewriteValueRISCV64_OpRotateLeft16(v)
   557  	case OpRotateLeft32:
   558  		return rewriteValueRISCV64_OpRotateLeft32(v)
   559  	case OpRotateLeft64:
   560  		return rewriteValueRISCV64_OpRotateLeft64(v)
   561  	case OpRotateLeft8:
   562  		return rewriteValueRISCV64_OpRotateLeft8(v)
   563  	case OpRound32F:
   564  		v.Op = OpRISCV64LoweredRound32F
   565  		return true
   566  	case OpRound64F:
   567  		v.Op = OpRISCV64LoweredRound64F
   568  		return true
   569  	case OpRsh16Ux16:
   570  		return rewriteValueRISCV64_OpRsh16Ux16(v)
   571  	case OpRsh16Ux32:
   572  		return rewriteValueRISCV64_OpRsh16Ux32(v)
   573  	case OpRsh16Ux64:
   574  		return rewriteValueRISCV64_OpRsh16Ux64(v)
   575  	case OpRsh16Ux8:
   576  		return rewriteValueRISCV64_OpRsh16Ux8(v)
   577  	case OpRsh16x16:
   578  		return rewriteValueRISCV64_OpRsh16x16(v)
   579  	case OpRsh16x32:
   580  		return rewriteValueRISCV64_OpRsh16x32(v)
   581  	case OpRsh16x64:
   582  		return rewriteValueRISCV64_OpRsh16x64(v)
   583  	case OpRsh16x8:
   584  		return rewriteValueRISCV64_OpRsh16x8(v)
   585  	case OpRsh32Ux16:
   586  		return rewriteValueRISCV64_OpRsh32Ux16(v)
   587  	case OpRsh32Ux32:
   588  		return rewriteValueRISCV64_OpRsh32Ux32(v)
   589  	case OpRsh32Ux64:
   590  		return rewriteValueRISCV64_OpRsh32Ux64(v)
   591  	case OpRsh32Ux8:
   592  		return rewriteValueRISCV64_OpRsh32Ux8(v)
   593  	case OpRsh32x16:
   594  		return rewriteValueRISCV64_OpRsh32x16(v)
   595  	case OpRsh32x32:
   596  		return rewriteValueRISCV64_OpRsh32x32(v)
   597  	case OpRsh32x64:
   598  		return rewriteValueRISCV64_OpRsh32x64(v)
   599  	case OpRsh32x8:
   600  		return rewriteValueRISCV64_OpRsh32x8(v)
   601  	case OpRsh64Ux16:
   602  		return rewriteValueRISCV64_OpRsh64Ux16(v)
   603  	case OpRsh64Ux32:
   604  		return rewriteValueRISCV64_OpRsh64Ux32(v)
   605  	case OpRsh64Ux64:
   606  		return rewriteValueRISCV64_OpRsh64Ux64(v)
   607  	case OpRsh64Ux8:
   608  		return rewriteValueRISCV64_OpRsh64Ux8(v)
   609  	case OpRsh64x16:
   610  		return rewriteValueRISCV64_OpRsh64x16(v)
   611  	case OpRsh64x32:
   612  		return rewriteValueRISCV64_OpRsh64x32(v)
   613  	case OpRsh64x64:
   614  		return rewriteValueRISCV64_OpRsh64x64(v)
   615  	case OpRsh64x8:
   616  		return rewriteValueRISCV64_OpRsh64x8(v)
   617  	case OpRsh8Ux16:
   618  		return rewriteValueRISCV64_OpRsh8Ux16(v)
   619  	case OpRsh8Ux32:
   620  		return rewriteValueRISCV64_OpRsh8Ux32(v)
   621  	case OpRsh8Ux64:
   622  		return rewriteValueRISCV64_OpRsh8Ux64(v)
   623  	case OpRsh8Ux8:
   624  		return rewriteValueRISCV64_OpRsh8Ux8(v)
   625  	case OpRsh8x16:
   626  		return rewriteValueRISCV64_OpRsh8x16(v)
   627  	case OpRsh8x32:
   628  		return rewriteValueRISCV64_OpRsh8x32(v)
   629  	case OpRsh8x64:
   630  		return rewriteValueRISCV64_OpRsh8x64(v)
   631  	case OpRsh8x8:
   632  		return rewriteValueRISCV64_OpRsh8x8(v)
   633  	case OpSelect0:
   634  		return rewriteValueRISCV64_OpSelect0(v)
   635  	case OpSelect1:
   636  		return rewriteValueRISCV64_OpSelect1(v)
   637  	case OpSignExt16to32:
   638  		v.Op = OpRISCV64MOVHreg
   639  		return true
   640  	case OpSignExt16to64:
   641  		v.Op = OpRISCV64MOVHreg
   642  		return true
   643  	case OpSignExt32to64:
   644  		v.Op = OpRISCV64MOVWreg
   645  		return true
   646  	case OpSignExt8to16:
   647  		v.Op = OpRISCV64MOVBreg
   648  		return true
   649  	case OpSignExt8to32:
   650  		v.Op = OpRISCV64MOVBreg
   651  		return true
   652  	case OpSignExt8to64:
   653  		v.Op = OpRISCV64MOVBreg
   654  		return true
   655  	case OpSlicemask:
   656  		return rewriteValueRISCV64_OpSlicemask(v)
   657  	case OpSqrt:
   658  		v.Op = OpRISCV64FSQRTD
   659  		return true
   660  	case OpSqrt32:
   661  		v.Op = OpRISCV64FSQRTS
   662  		return true
   663  	case OpStaticCall:
   664  		v.Op = OpRISCV64CALLstatic
   665  		return true
   666  	case OpStore:
   667  		return rewriteValueRISCV64_OpStore(v)
   668  	case OpSub16:
   669  		v.Op = OpRISCV64SUB
   670  		return true
   671  	case OpSub32:
   672  		v.Op = OpRISCV64SUB
   673  		return true
   674  	case OpSub32F:
   675  		v.Op = OpRISCV64FSUBS
   676  		return true
   677  	case OpSub64:
   678  		v.Op = OpRISCV64SUB
   679  		return true
   680  	case OpSub64F:
   681  		v.Op = OpRISCV64FSUBD
   682  		return true
   683  	case OpSub8:
   684  		v.Op = OpRISCV64SUB
   685  		return true
   686  	case OpSubPtr:
   687  		v.Op = OpRISCV64SUB
   688  		return true
   689  	case OpTailCall:
   690  		v.Op = OpRISCV64CALLtail
   691  		return true
   692  	case OpTrunc16to8:
   693  		v.Op = OpCopy
   694  		return true
   695  	case OpTrunc32to16:
   696  		v.Op = OpCopy
   697  		return true
   698  	case OpTrunc32to8:
   699  		v.Op = OpCopy
   700  		return true
   701  	case OpTrunc64to16:
   702  		v.Op = OpCopy
   703  		return true
   704  	case OpTrunc64to32:
   705  		v.Op = OpCopy
   706  		return true
   707  	case OpTrunc64to8:
   708  		v.Op = OpCopy
   709  		return true
   710  	case OpWB:
   711  		v.Op = OpRISCV64LoweredWB
   712  		return true
   713  	case OpXor16:
   714  		v.Op = OpRISCV64XOR
   715  		return true
   716  	case OpXor32:
   717  		v.Op = OpRISCV64XOR
   718  		return true
   719  	case OpXor64:
   720  		v.Op = OpRISCV64XOR
   721  		return true
   722  	case OpXor8:
   723  		v.Op = OpRISCV64XOR
   724  		return true
   725  	case OpZero:
   726  		return rewriteValueRISCV64_OpZero(v)
   727  	case OpZeroExt16to32:
   728  		v.Op = OpRISCV64MOVHUreg
   729  		return true
   730  	case OpZeroExt16to64:
   731  		v.Op = OpRISCV64MOVHUreg
   732  		return true
   733  	case OpZeroExt32to64:
   734  		v.Op = OpRISCV64MOVWUreg
   735  		return true
   736  	case OpZeroExt8to16:
   737  		v.Op = OpRISCV64MOVBUreg
   738  		return true
   739  	case OpZeroExt8to32:
   740  		v.Op = OpRISCV64MOVBUreg
   741  		return true
   742  	case OpZeroExt8to64:
   743  		v.Op = OpRISCV64MOVBUreg
   744  		return true
   745  	}
   746  	return false
   747  }
   748  func rewriteValueRISCV64_OpAddr(v *Value) bool {
   749  	v_0 := v.Args[0]
   750  	// match: (Addr {sym} base)
   751  	// result: (MOVaddr {sym} [0] base)
   752  	for {
   753  		sym := auxToSym(v.Aux)
   754  		base := v_0
   755  		v.reset(OpRISCV64MOVaddr)
   756  		v.AuxInt = int32ToAuxInt(0)
   757  		v.Aux = symToAux(sym)
   758  		v.AddArg(base)
   759  		return true
   760  	}
   761  }
   762  func rewriteValueRISCV64_OpAtomicAnd8(v *Value) bool {
   763  	v_2 := v.Args[2]
   764  	v_1 := v.Args[1]
   765  	v_0 := v.Args[0]
   766  	b := v.Block
   767  	typ := &b.Func.Config.Types
   768  	// match: (AtomicAnd8 ptr val mem)
   769  	// result: (LoweredAtomicAnd32 (ANDI <typ.Uintptr> [^3] ptr) (NOT <typ.UInt32> (SLL <typ.UInt32> (XORI <typ.UInt32> [0xff] (ZeroExt8to32 val)) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr)))) mem)
   770  	for {
   771  		ptr := v_0
   772  		val := v_1
   773  		mem := v_2
   774  		v.reset(OpRISCV64LoweredAtomicAnd32)
   775  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   776  		v0.AuxInt = int64ToAuxInt(^3)
   777  		v0.AddArg(ptr)
   778  		v1 := b.NewValue0(v.Pos, OpRISCV64NOT, typ.UInt32)
   779  		v2 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   780  		v3 := b.NewValue0(v.Pos, OpRISCV64XORI, typ.UInt32)
   781  		v3.AuxInt = int64ToAuxInt(0xff)
   782  		v4 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   783  		v4.AddArg(val)
   784  		v3.AddArg(v4)
   785  		v5 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   786  		v5.AuxInt = int64ToAuxInt(3)
   787  		v6 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   788  		v6.AuxInt = int64ToAuxInt(3)
   789  		v6.AddArg(ptr)
   790  		v5.AddArg(v6)
   791  		v2.AddArg2(v3, v5)
   792  		v1.AddArg(v2)
   793  		v.AddArg3(v0, v1, mem)
   794  		return true
   795  	}
   796  }
   797  func rewriteValueRISCV64_OpAtomicCompareAndSwap32(v *Value) bool {
   798  	v_3 := v.Args[3]
   799  	v_2 := v.Args[2]
   800  	v_1 := v.Args[1]
   801  	v_0 := v.Args[0]
   802  	b := v.Block
   803  	typ := &b.Func.Config.Types
   804  	// match: (AtomicCompareAndSwap32 ptr old new mem)
   805  	// result: (LoweredAtomicCas32 ptr (SignExt32to64 old) new mem)
   806  	for {
   807  		ptr := v_0
   808  		old := v_1
   809  		new := v_2
   810  		mem := v_3
   811  		v.reset(OpRISCV64LoweredAtomicCas32)
   812  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
   813  		v0.AddArg(old)
   814  		v.AddArg4(ptr, v0, new, mem)
   815  		return true
   816  	}
   817  }
   818  func rewriteValueRISCV64_OpAtomicOr8(v *Value) bool {
   819  	v_2 := v.Args[2]
   820  	v_1 := v.Args[1]
   821  	v_0 := v.Args[0]
   822  	b := v.Block
   823  	typ := &b.Func.Config.Types
   824  	// match: (AtomicOr8 ptr val mem)
   825  	// result: (LoweredAtomicOr32 (ANDI <typ.Uintptr> [^3] ptr) (SLL <typ.UInt32> (ZeroExt8to32 val) (SLLI <typ.UInt64> [3] (ANDI <typ.UInt64> [3] ptr))) mem)
   826  	for {
   827  		ptr := v_0
   828  		val := v_1
   829  		mem := v_2
   830  		v.reset(OpRISCV64LoweredAtomicOr32)
   831  		v0 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.Uintptr)
   832  		v0.AuxInt = int64ToAuxInt(^3)
   833  		v0.AddArg(ptr)
   834  		v1 := b.NewValue0(v.Pos, OpRISCV64SLL, typ.UInt32)
   835  		v2 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
   836  		v2.AddArg(val)
   837  		v3 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
   838  		v3.AuxInt = int64ToAuxInt(3)
   839  		v4 := b.NewValue0(v.Pos, OpRISCV64ANDI, typ.UInt64)
   840  		v4.AuxInt = int64ToAuxInt(3)
   841  		v4.AddArg(ptr)
   842  		v3.AddArg(v4)
   843  		v1.AddArg2(v2, v3)
   844  		v.AddArg3(v0, v1, mem)
   845  		return true
   846  	}
   847  }
   848  func rewriteValueRISCV64_OpAvg64u(v *Value) bool {
   849  	v_1 := v.Args[1]
   850  	v_0 := v.Args[0]
   851  	b := v.Block
   852  	// match: (Avg64u <t> x y)
   853  	// result: (ADD (ADD <t> (SRLI <t> [1] x) (SRLI <t> [1] y)) (ANDI <t> [1] (AND <t> x y)))
   854  	for {
   855  		t := v.Type
   856  		x := v_0
   857  		y := v_1
   858  		v.reset(OpRISCV64ADD)
   859  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, t)
   860  		v1 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   861  		v1.AuxInt = int64ToAuxInt(1)
   862  		v1.AddArg(x)
   863  		v2 := b.NewValue0(v.Pos, OpRISCV64SRLI, t)
   864  		v2.AuxInt = int64ToAuxInt(1)
   865  		v2.AddArg(y)
   866  		v0.AddArg2(v1, v2)
   867  		v3 := b.NewValue0(v.Pos, OpRISCV64ANDI, t)
   868  		v3.AuxInt = int64ToAuxInt(1)
   869  		v4 := b.NewValue0(v.Pos, OpRISCV64AND, t)
   870  		v4.AddArg2(x, y)
   871  		v3.AddArg(v4)
   872  		v.AddArg2(v0, v3)
   873  		return true
   874  	}
   875  }
   876  func rewriteValueRISCV64_OpConst16(v *Value) bool {
   877  	// match: (Const16 [val])
   878  	// result: (MOVDconst [int64(val)])
   879  	for {
   880  		val := auxIntToInt16(v.AuxInt)
   881  		v.reset(OpRISCV64MOVDconst)
   882  		v.AuxInt = int64ToAuxInt(int64(val))
   883  		return true
   884  	}
   885  }
   886  func rewriteValueRISCV64_OpConst32(v *Value) bool {
   887  	// match: (Const32 [val])
   888  	// result: (MOVDconst [int64(val)])
   889  	for {
   890  		val := auxIntToInt32(v.AuxInt)
   891  		v.reset(OpRISCV64MOVDconst)
   892  		v.AuxInt = int64ToAuxInt(int64(val))
   893  		return true
   894  	}
   895  }
   896  func rewriteValueRISCV64_OpConst32F(v *Value) bool {
   897  	b := v.Block
   898  	typ := &b.Func.Config.Types
   899  	// match: (Const32F [val])
   900  	// result: (FMVSX (MOVDconst [int64(math.Float32bits(val))]))
   901  	for {
   902  		val := auxIntToFloat32(v.AuxInt)
   903  		v.reset(OpRISCV64FMVSX)
   904  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   905  		v0.AuxInt = int64ToAuxInt(int64(math.Float32bits(val)))
   906  		v.AddArg(v0)
   907  		return true
   908  	}
   909  }
   910  func rewriteValueRISCV64_OpConst64(v *Value) bool {
   911  	// match: (Const64 [val])
   912  	// result: (MOVDconst [int64(val)])
   913  	for {
   914  		val := auxIntToInt64(v.AuxInt)
   915  		v.reset(OpRISCV64MOVDconst)
   916  		v.AuxInt = int64ToAuxInt(int64(val))
   917  		return true
   918  	}
   919  }
   920  func rewriteValueRISCV64_OpConst64F(v *Value) bool {
   921  	b := v.Block
   922  	typ := &b.Func.Config.Types
   923  	// match: (Const64F [val])
   924  	// result: (FMVDX (MOVDconst [int64(math.Float64bits(val))]))
   925  	for {
   926  		val := auxIntToFloat64(v.AuxInt)
   927  		v.reset(OpRISCV64FMVDX)
   928  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
   929  		v0.AuxInt = int64ToAuxInt(int64(math.Float64bits(val)))
   930  		v.AddArg(v0)
   931  		return true
   932  	}
   933  }
   934  func rewriteValueRISCV64_OpConst8(v *Value) bool {
   935  	// match: (Const8 [val])
   936  	// result: (MOVDconst [int64(val)])
   937  	for {
   938  		val := auxIntToInt8(v.AuxInt)
   939  		v.reset(OpRISCV64MOVDconst)
   940  		v.AuxInt = int64ToAuxInt(int64(val))
   941  		return true
   942  	}
   943  }
   944  func rewriteValueRISCV64_OpConstBool(v *Value) bool {
   945  	// match: (ConstBool [val])
   946  	// result: (MOVDconst [int64(b2i(val))])
   947  	for {
   948  		val := auxIntToBool(v.AuxInt)
   949  		v.reset(OpRISCV64MOVDconst)
   950  		v.AuxInt = int64ToAuxInt(int64(b2i(val)))
   951  		return true
   952  	}
   953  }
   954  func rewriteValueRISCV64_OpConstNil(v *Value) bool {
   955  	// match: (ConstNil)
   956  	// result: (MOVDconst [0])
   957  	for {
   958  		v.reset(OpRISCV64MOVDconst)
   959  		v.AuxInt = int64ToAuxInt(0)
   960  		return true
   961  	}
   962  }
   963  func rewriteValueRISCV64_OpDiv16(v *Value) bool {
   964  	v_1 := v.Args[1]
   965  	v_0 := v.Args[0]
   966  	b := v.Block
   967  	typ := &b.Func.Config.Types
   968  	// match: (Div16 x y [false])
   969  	// result: (DIVW (SignExt16to32 x) (SignExt16to32 y))
   970  	for {
   971  		if auxIntToBool(v.AuxInt) != false {
   972  			break
   973  		}
   974  		x := v_0
   975  		y := v_1
   976  		v.reset(OpRISCV64DIVW)
   977  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   978  		v0.AddArg(x)
   979  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
   980  		v1.AddArg(y)
   981  		v.AddArg2(v0, v1)
   982  		return true
   983  	}
   984  	return false
   985  }
   986  func rewriteValueRISCV64_OpDiv16u(v *Value) bool {
   987  	v_1 := v.Args[1]
   988  	v_0 := v.Args[0]
   989  	b := v.Block
   990  	typ := &b.Func.Config.Types
   991  	// match: (Div16u x y)
   992  	// result: (DIVUW (ZeroExt16to32 x) (ZeroExt16to32 y))
   993  	for {
   994  		x := v_0
   995  		y := v_1
   996  		v.reset(OpRISCV64DIVUW)
   997  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
   998  		v0.AddArg(x)
   999  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  1000  		v1.AddArg(y)
  1001  		v.AddArg2(v0, v1)
  1002  		return true
  1003  	}
  1004  }
  1005  func rewriteValueRISCV64_OpDiv32(v *Value) bool {
  1006  	v_1 := v.Args[1]
  1007  	v_0 := v.Args[0]
  1008  	// match: (Div32 x y [false])
  1009  	// result: (DIVW x y)
  1010  	for {
  1011  		if auxIntToBool(v.AuxInt) != false {
  1012  			break
  1013  		}
  1014  		x := v_0
  1015  		y := v_1
  1016  		v.reset(OpRISCV64DIVW)
  1017  		v.AddArg2(x, y)
  1018  		return true
  1019  	}
  1020  	return false
  1021  }
  1022  func rewriteValueRISCV64_OpDiv64(v *Value) bool {
  1023  	v_1 := v.Args[1]
  1024  	v_0 := v.Args[0]
  1025  	// match: (Div64 x y [false])
  1026  	// result: (DIV x y)
  1027  	for {
  1028  		if auxIntToBool(v.AuxInt) != false {
  1029  			break
  1030  		}
  1031  		x := v_0
  1032  		y := v_1
  1033  		v.reset(OpRISCV64DIV)
  1034  		v.AddArg2(x, y)
  1035  		return true
  1036  	}
  1037  	return false
  1038  }
  1039  func rewriteValueRISCV64_OpDiv8(v *Value) bool {
  1040  	v_1 := v.Args[1]
  1041  	v_0 := v.Args[0]
  1042  	b := v.Block
  1043  	typ := &b.Func.Config.Types
  1044  	// match: (Div8 x y)
  1045  	// result: (DIVW (SignExt8to32 x) (SignExt8to32 y))
  1046  	for {
  1047  		x := v_0
  1048  		y := v_1
  1049  		v.reset(OpRISCV64DIVW)
  1050  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1051  		v0.AddArg(x)
  1052  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  1053  		v1.AddArg(y)
  1054  		v.AddArg2(v0, v1)
  1055  		return true
  1056  	}
  1057  }
  1058  func rewriteValueRISCV64_OpDiv8u(v *Value) bool {
  1059  	v_1 := v.Args[1]
  1060  	v_0 := v.Args[0]
  1061  	b := v.Block
  1062  	typ := &b.Func.Config.Types
  1063  	// match: (Div8u x y)
  1064  	// result: (DIVUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  1065  	for {
  1066  		x := v_0
  1067  		y := v_1
  1068  		v.reset(OpRISCV64DIVUW)
  1069  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1070  		v0.AddArg(x)
  1071  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  1072  		v1.AddArg(y)
  1073  		v.AddArg2(v0, v1)
  1074  		return true
  1075  	}
  1076  }
  1077  func rewriteValueRISCV64_OpEq16(v *Value) bool {
  1078  	v_1 := v.Args[1]
  1079  	v_0 := v.Args[0]
  1080  	b := v.Block
  1081  	typ := &b.Func.Config.Types
  1082  	// match: (Eq16 x y)
  1083  	// result: (SEQZ (SUB <x.Type> (ZeroExt16to64 x) (ZeroExt16to64 y)))
  1084  	for {
  1085  		x := v_0
  1086  		y := v_1
  1087  		v.reset(OpRISCV64SEQZ)
  1088  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1089  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1090  		v1.AddArg(x)
  1091  		v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1092  		v2.AddArg(y)
  1093  		v0.AddArg2(v1, v2)
  1094  		v.AddArg(v0)
  1095  		return true
  1096  	}
  1097  }
  1098  func rewriteValueRISCV64_OpEq32(v *Value) bool {
  1099  	v_1 := v.Args[1]
  1100  	v_0 := v.Args[0]
  1101  	b := v.Block
  1102  	typ := &b.Func.Config.Types
  1103  	// match: (Eq32 x y)
  1104  	// cond: x.Type.IsSigned()
  1105  	// result: (SEQZ (SUB <x.Type> (SignExt32to64 x) (SignExt32to64 y)))
  1106  	for {
  1107  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1108  			x := v_0
  1109  			y := v_1
  1110  			if !(x.Type.IsSigned()) {
  1111  				continue
  1112  			}
  1113  			v.reset(OpRISCV64SEQZ)
  1114  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1115  			v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1116  			v1.AddArg(x)
  1117  			v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1118  			v2.AddArg(y)
  1119  			v0.AddArg2(v1, v2)
  1120  			v.AddArg(v0)
  1121  			return true
  1122  		}
  1123  		break
  1124  	}
  1125  	// match: (Eq32 x y)
  1126  	// cond: !x.Type.IsSigned()
  1127  	// result: (SEQZ (SUB <x.Type> (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1128  	for {
  1129  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  1130  			x := v_0
  1131  			y := v_1
  1132  			if !(!x.Type.IsSigned()) {
  1133  				continue
  1134  			}
  1135  			v.reset(OpRISCV64SEQZ)
  1136  			v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1137  			v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1138  			v1.AddArg(x)
  1139  			v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1140  			v2.AddArg(y)
  1141  			v0.AddArg2(v1, v2)
  1142  			v.AddArg(v0)
  1143  			return true
  1144  		}
  1145  		break
  1146  	}
  1147  	return false
  1148  }
  1149  func rewriteValueRISCV64_OpEq64(v *Value) bool {
  1150  	v_1 := v.Args[1]
  1151  	v_0 := v.Args[0]
  1152  	b := v.Block
  1153  	// match: (Eq64 x y)
  1154  	// result: (SEQZ (SUB <x.Type> x y))
  1155  	for {
  1156  		x := v_0
  1157  		y := v_1
  1158  		v.reset(OpRISCV64SEQZ)
  1159  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1160  		v0.AddArg2(x, y)
  1161  		v.AddArg(v0)
  1162  		return true
  1163  	}
  1164  }
  1165  func rewriteValueRISCV64_OpEq8(v *Value) bool {
  1166  	v_1 := v.Args[1]
  1167  	v_0 := v.Args[0]
  1168  	b := v.Block
  1169  	typ := &b.Func.Config.Types
  1170  	// match: (Eq8 x y)
  1171  	// result: (SEQZ (SUB <x.Type> (ZeroExt8to64 x) (ZeroExt8to64 y)))
  1172  	for {
  1173  		x := v_0
  1174  		y := v_1
  1175  		v.reset(OpRISCV64SEQZ)
  1176  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, x.Type)
  1177  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1178  		v1.AddArg(x)
  1179  		v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1180  		v2.AddArg(y)
  1181  		v0.AddArg2(v1, v2)
  1182  		v.AddArg(v0)
  1183  		return true
  1184  	}
  1185  }
  1186  func rewriteValueRISCV64_OpEqB(v *Value) bool {
  1187  	v_1 := v.Args[1]
  1188  	v_0 := v.Args[0]
  1189  	b := v.Block
  1190  	typ := &b.Func.Config.Types
  1191  	// match: (EqB x y)
  1192  	// result: (SEQZ (SUB <typ.Bool> x y))
  1193  	for {
  1194  		x := v_0
  1195  		y := v_1
  1196  		v.reset(OpRISCV64SEQZ)
  1197  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  1198  		v0.AddArg2(x, y)
  1199  		v.AddArg(v0)
  1200  		return true
  1201  	}
  1202  }
  1203  func rewriteValueRISCV64_OpEqPtr(v *Value) bool {
  1204  	v_1 := v.Args[1]
  1205  	v_0 := v.Args[0]
  1206  	b := v.Block
  1207  	typ := &b.Func.Config.Types
  1208  	// match: (EqPtr x y)
  1209  	// result: (SEQZ (SUB <typ.Uintptr> x y))
  1210  	for {
  1211  		x := v_0
  1212  		y := v_1
  1213  		v.reset(OpRISCV64SEQZ)
  1214  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Uintptr)
  1215  		v0.AddArg2(x, y)
  1216  		v.AddArg(v0)
  1217  		return true
  1218  	}
  1219  }
  1220  func rewriteValueRISCV64_OpHmul32(v *Value) bool {
  1221  	v_1 := v.Args[1]
  1222  	v_0 := v.Args[0]
  1223  	b := v.Block
  1224  	typ := &b.Func.Config.Types
  1225  	// match: (Hmul32 x y)
  1226  	// result: (SRAI [32] (MUL (SignExt32to64 x) (SignExt32to64 y)))
  1227  	for {
  1228  		x := v_0
  1229  		y := v_1
  1230  		v.reset(OpRISCV64SRAI)
  1231  		v.AuxInt = int64ToAuxInt(32)
  1232  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1233  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1234  		v1.AddArg(x)
  1235  		v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1236  		v2.AddArg(y)
  1237  		v0.AddArg2(v1, v2)
  1238  		v.AddArg(v0)
  1239  		return true
  1240  	}
  1241  }
  1242  func rewriteValueRISCV64_OpHmul32u(v *Value) bool {
  1243  	v_1 := v.Args[1]
  1244  	v_0 := v.Args[0]
  1245  	b := v.Block
  1246  	typ := &b.Func.Config.Types
  1247  	// match: (Hmul32u x y)
  1248  	// result: (SRLI [32] (MUL (ZeroExt32to64 x) (ZeroExt32to64 y)))
  1249  	for {
  1250  		x := v_0
  1251  		y := v_1
  1252  		v.reset(OpRISCV64SRLI)
  1253  		v.AuxInt = int64ToAuxInt(32)
  1254  		v0 := b.NewValue0(v.Pos, OpRISCV64MUL, typ.Int64)
  1255  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1256  		v1.AddArg(x)
  1257  		v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1258  		v2.AddArg(y)
  1259  		v0.AddArg2(v1, v2)
  1260  		v.AddArg(v0)
  1261  		return true
  1262  	}
  1263  }
  1264  func rewriteValueRISCV64_OpLeq16(v *Value) bool {
  1265  	v_1 := v.Args[1]
  1266  	v_0 := v.Args[0]
  1267  	b := v.Block
  1268  	typ := &b.Func.Config.Types
  1269  	// match: (Leq16 x y)
  1270  	// result: (Not (Less16 y x))
  1271  	for {
  1272  		x := v_0
  1273  		y := v_1
  1274  		v.reset(OpNot)
  1275  		v0 := b.NewValue0(v.Pos, OpLess16, typ.Bool)
  1276  		v0.AddArg2(y, x)
  1277  		v.AddArg(v0)
  1278  		return true
  1279  	}
  1280  }
  1281  func rewriteValueRISCV64_OpLeq16U(v *Value) bool {
  1282  	v_1 := v.Args[1]
  1283  	v_0 := v.Args[0]
  1284  	b := v.Block
  1285  	typ := &b.Func.Config.Types
  1286  	// match: (Leq16U x y)
  1287  	// result: (Not (Less16U y x))
  1288  	for {
  1289  		x := v_0
  1290  		y := v_1
  1291  		v.reset(OpNot)
  1292  		v0 := b.NewValue0(v.Pos, OpLess16U, typ.Bool)
  1293  		v0.AddArg2(y, x)
  1294  		v.AddArg(v0)
  1295  		return true
  1296  	}
  1297  }
  1298  func rewriteValueRISCV64_OpLeq32(v *Value) bool {
  1299  	v_1 := v.Args[1]
  1300  	v_0 := v.Args[0]
  1301  	b := v.Block
  1302  	typ := &b.Func.Config.Types
  1303  	// match: (Leq32 x y)
  1304  	// result: (Not (Less32 y x))
  1305  	for {
  1306  		x := v_0
  1307  		y := v_1
  1308  		v.reset(OpNot)
  1309  		v0 := b.NewValue0(v.Pos, OpLess32, typ.Bool)
  1310  		v0.AddArg2(y, x)
  1311  		v.AddArg(v0)
  1312  		return true
  1313  	}
  1314  }
  1315  func rewriteValueRISCV64_OpLeq32U(v *Value) bool {
  1316  	v_1 := v.Args[1]
  1317  	v_0 := v.Args[0]
  1318  	b := v.Block
  1319  	typ := &b.Func.Config.Types
  1320  	// match: (Leq32U x y)
  1321  	// result: (Not (Less32U y x))
  1322  	for {
  1323  		x := v_0
  1324  		y := v_1
  1325  		v.reset(OpNot)
  1326  		v0 := b.NewValue0(v.Pos, OpLess32U, typ.Bool)
  1327  		v0.AddArg2(y, x)
  1328  		v.AddArg(v0)
  1329  		return true
  1330  	}
  1331  }
  1332  func rewriteValueRISCV64_OpLeq64(v *Value) bool {
  1333  	v_1 := v.Args[1]
  1334  	v_0 := v.Args[0]
  1335  	b := v.Block
  1336  	typ := &b.Func.Config.Types
  1337  	// match: (Leq64 x y)
  1338  	// result: (Not (Less64 y x))
  1339  	for {
  1340  		x := v_0
  1341  		y := v_1
  1342  		v.reset(OpNot)
  1343  		v0 := b.NewValue0(v.Pos, OpLess64, typ.Bool)
  1344  		v0.AddArg2(y, x)
  1345  		v.AddArg(v0)
  1346  		return true
  1347  	}
  1348  }
  1349  func rewriteValueRISCV64_OpLeq64U(v *Value) bool {
  1350  	v_1 := v.Args[1]
  1351  	v_0 := v.Args[0]
  1352  	b := v.Block
  1353  	typ := &b.Func.Config.Types
  1354  	// match: (Leq64U x y)
  1355  	// result: (Not (Less64U y x))
  1356  	for {
  1357  		x := v_0
  1358  		y := v_1
  1359  		v.reset(OpNot)
  1360  		v0 := b.NewValue0(v.Pos, OpLess64U, typ.Bool)
  1361  		v0.AddArg2(y, x)
  1362  		v.AddArg(v0)
  1363  		return true
  1364  	}
  1365  }
  1366  func rewriteValueRISCV64_OpLeq8(v *Value) bool {
  1367  	v_1 := v.Args[1]
  1368  	v_0 := v.Args[0]
  1369  	b := v.Block
  1370  	typ := &b.Func.Config.Types
  1371  	// match: (Leq8 x y)
  1372  	// result: (Not (Less8 y x))
  1373  	for {
  1374  		x := v_0
  1375  		y := v_1
  1376  		v.reset(OpNot)
  1377  		v0 := b.NewValue0(v.Pos, OpLess8, typ.Bool)
  1378  		v0.AddArg2(y, x)
  1379  		v.AddArg(v0)
  1380  		return true
  1381  	}
  1382  }
  1383  func rewriteValueRISCV64_OpLeq8U(v *Value) bool {
  1384  	v_1 := v.Args[1]
  1385  	v_0 := v.Args[0]
  1386  	b := v.Block
  1387  	typ := &b.Func.Config.Types
  1388  	// match: (Leq8U x y)
  1389  	// result: (Not (Less8U y x))
  1390  	for {
  1391  		x := v_0
  1392  		y := v_1
  1393  		v.reset(OpNot)
  1394  		v0 := b.NewValue0(v.Pos, OpLess8U, typ.Bool)
  1395  		v0.AddArg2(y, x)
  1396  		v.AddArg(v0)
  1397  		return true
  1398  	}
  1399  }
  1400  func rewriteValueRISCV64_OpLess16(v *Value) bool {
  1401  	v_1 := v.Args[1]
  1402  	v_0 := v.Args[0]
  1403  	b := v.Block
  1404  	typ := &b.Func.Config.Types
  1405  	// match: (Less16 x y)
  1406  	// result: (SLT (SignExt16to64 x) (SignExt16to64 y))
  1407  	for {
  1408  		x := v_0
  1409  		y := v_1
  1410  		v.reset(OpRISCV64SLT)
  1411  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1412  		v0.AddArg(x)
  1413  		v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  1414  		v1.AddArg(y)
  1415  		v.AddArg2(v0, v1)
  1416  		return true
  1417  	}
  1418  }
  1419  func rewriteValueRISCV64_OpLess16U(v *Value) bool {
  1420  	v_1 := v.Args[1]
  1421  	v_0 := v.Args[0]
  1422  	b := v.Block
  1423  	typ := &b.Func.Config.Types
  1424  	// match: (Less16U x y)
  1425  	// result: (SLTU (ZeroExt16to64 x) (ZeroExt16to64 y))
  1426  	for {
  1427  		x := v_0
  1428  		y := v_1
  1429  		v.reset(OpRISCV64SLTU)
  1430  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1431  		v0.AddArg(x)
  1432  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1433  		v1.AddArg(y)
  1434  		v.AddArg2(v0, v1)
  1435  		return true
  1436  	}
  1437  }
  1438  func rewriteValueRISCV64_OpLess32(v *Value) bool {
  1439  	v_1 := v.Args[1]
  1440  	v_0 := v.Args[0]
  1441  	b := v.Block
  1442  	typ := &b.Func.Config.Types
  1443  	// match: (Less32 x y)
  1444  	// result: (SLT (SignExt32to64 x) (SignExt32to64 y))
  1445  	for {
  1446  		x := v_0
  1447  		y := v_1
  1448  		v.reset(OpRISCV64SLT)
  1449  		v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1450  		v0.AddArg(x)
  1451  		v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  1452  		v1.AddArg(y)
  1453  		v.AddArg2(v0, v1)
  1454  		return true
  1455  	}
  1456  }
  1457  func rewriteValueRISCV64_OpLess32U(v *Value) bool {
  1458  	v_1 := v.Args[1]
  1459  	v_0 := v.Args[0]
  1460  	b := v.Block
  1461  	typ := &b.Func.Config.Types
  1462  	// match: (Less32U x y)
  1463  	// result: (SLTU (ZeroExt32to64 x) (ZeroExt32to64 y))
  1464  	for {
  1465  		x := v_0
  1466  		y := v_1
  1467  		v.reset(OpRISCV64SLTU)
  1468  		v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1469  		v0.AddArg(x)
  1470  		v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1471  		v1.AddArg(y)
  1472  		v.AddArg2(v0, v1)
  1473  		return true
  1474  	}
  1475  }
  1476  func rewriteValueRISCV64_OpLess8(v *Value) bool {
  1477  	v_1 := v.Args[1]
  1478  	v_0 := v.Args[0]
  1479  	b := v.Block
  1480  	typ := &b.Func.Config.Types
  1481  	// match: (Less8 x y)
  1482  	// result: (SLT (SignExt8to64 x) (SignExt8to64 y))
  1483  	for {
  1484  		x := v_0
  1485  		y := v_1
  1486  		v.reset(OpRISCV64SLT)
  1487  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1488  		v0.AddArg(x)
  1489  		v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  1490  		v1.AddArg(y)
  1491  		v.AddArg2(v0, v1)
  1492  		return true
  1493  	}
  1494  }
  1495  func rewriteValueRISCV64_OpLess8U(v *Value) bool {
  1496  	v_1 := v.Args[1]
  1497  	v_0 := v.Args[0]
  1498  	b := v.Block
  1499  	typ := &b.Func.Config.Types
  1500  	// match: (Less8U x y)
  1501  	// result: (SLTU (ZeroExt8to64 x) (ZeroExt8to64 y))
  1502  	for {
  1503  		x := v_0
  1504  		y := v_1
  1505  		v.reset(OpRISCV64SLTU)
  1506  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1507  		v0.AddArg(x)
  1508  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1509  		v1.AddArg(y)
  1510  		v.AddArg2(v0, v1)
  1511  		return true
  1512  	}
  1513  }
  1514  func rewriteValueRISCV64_OpLoad(v *Value) bool {
  1515  	v_1 := v.Args[1]
  1516  	v_0 := v.Args[0]
  1517  	// match: (Load <t> ptr mem)
  1518  	// cond: t.IsBoolean()
  1519  	// result: (MOVBUload ptr mem)
  1520  	for {
  1521  		t := v.Type
  1522  		ptr := v_0
  1523  		mem := v_1
  1524  		if !(t.IsBoolean()) {
  1525  			break
  1526  		}
  1527  		v.reset(OpRISCV64MOVBUload)
  1528  		v.AddArg2(ptr, mem)
  1529  		return true
  1530  	}
  1531  	// match: (Load <t> ptr mem)
  1532  	// cond: ( is8BitInt(t) && t.IsSigned())
  1533  	// result: (MOVBload ptr mem)
  1534  	for {
  1535  		t := v.Type
  1536  		ptr := v_0
  1537  		mem := v_1
  1538  		if !(is8BitInt(t) && t.IsSigned()) {
  1539  			break
  1540  		}
  1541  		v.reset(OpRISCV64MOVBload)
  1542  		v.AddArg2(ptr, mem)
  1543  		return true
  1544  	}
  1545  	// match: (Load <t> ptr mem)
  1546  	// cond: ( is8BitInt(t) && !t.IsSigned())
  1547  	// result: (MOVBUload ptr mem)
  1548  	for {
  1549  		t := v.Type
  1550  		ptr := v_0
  1551  		mem := v_1
  1552  		if !(is8BitInt(t) && !t.IsSigned()) {
  1553  			break
  1554  		}
  1555  		v.reset(OpRISCV64MOVBUload)
  1556  		v.AddArg2(ptr, mem)
  1557  		return true
  1558  	}
  1559  	// match: (Load <t> ptr mem)
  1560  	// cond: (is16BitInt(t) && t.IsSigned())
  1561  	// result: (MOVHload ptr mem)
  1562  	for {
  1563  		t := v.Type
  1564  		ptr := v_0
  1565  		mem := v_1
  1566  		if !(is16BitInt(t) && t.IsSigned()) {
  1567  			break
  1568  		}
  1569  		v.reset(OpRISCV64MOVHload)
  1570  		v.AddArg2(ptr, mem)
  1571  		return true
  1572  	}
  1573  	// match: (Load <t> ptr mem)
  1574  	// cond: (is16BitInt(t) && !t.IsSigned())
  1575  	// result: (MOVHUload ptr mem)
  1576  	for {
  1577  		t := v.Type
  1578  		ptr := v_0
  1579  		mem := v_1
  1580  		if !(is16BitInt(t) && !t.IsSigned()) {
  1581  			break
  1582  		}
  1583  		v.reset(OpRISCV64MOVHUload)
  1584  		v.AddArg2(ptr, mem)
  1585  		return true
  1586  	}
  1587  	// match: (Load <t> ptr mem)
  1588  	// cond: (is32BitInt(t) && t.IsSigned())
  1589  	// result: (MOVWload ptr mem)
  1590  	for {
  1591  		t := v.Type
  1592  		ptr := v_0
  1593  		mem := v_1
  1594  		if !(is32BitInt(t) && t.IsSigned()) {
  1595  			break
  1596  		}
  1597  		v.reset(OpRISCV64MOVWload)
  1598  		v.AddArg2(ptr, mem)
  1599  		return true
  1600  	}
  1601  	// match: (Load <t> ptr mem)
  1602  	// cond: (is32BitInt(t) && !t.IsSigned())
  1603  	// result: (MOVWUload ptr mem)
  1604  	for {
  1605  		t := v.Type
  1606  		ptr := v_0
  1607  		mem := v_1
  1608  		if !(is32BitInt(t) && !t.IsSigned()) {
  1609  			break
  1610  		}
  1611  		v.reset(OpRISCV64MOVWUload)
  1612  		v.AddArg2(ptr, mem)
  1613  		return true
  1614  	}
  1615  	// match: (Load <t> ptr mem)
  1616  	// cond: (is64BitInt(t) || isPtr(t))
  1617  	// result: (MOVDload ptr mem)
  1618  	for {
  1619  		t := v.Type
  1620  		ptr := v_0
  1621  		mem := v_1
  1622  		if !(is64BitInt(t) || isPtr(t)) {
  1623  			break
  1624  		}
  1625  		v.reset(OpRISCV64MOVDload)
  1626  		v.AddArg2(ptr, mem)
  1627  		return true
  1628  	}
  1629  	// match: (Load <t> ptr mem)
  1630  	// cond: is32BitFloat(t)
  1631  	// result: (FMOVWload ptr mem)
  1632  	for {
  1633  		t := v.Type
  1634  		ptr := v_0
  1635  		mem := v_1
  1636  		if !(is32BitFloat(t)) {
  1637  			break
  1638  		}
  1639  		v.reset(OpRISCV64FMOVWload)
  1640  		v.AddArg2(ptr, mem)
  1641  		return true
  1642  	}
  1643  	// match: (Load <t> ptr mem)
  1644  	// cond: is64BitFloat(t)
  1645  	// result: (FMOVDload ptr mem)
  1646  	for {
  1647  		t := v.Type
  1648  		ptr := v_0
  1649  		mem := v_1
  1650  		if !(is64BitFloat(t)) {
  1651  			break
  1652  		}
  1653  		v.reset(OpRISCV64FMOVDload)
  1654  		v.AddArg2(ptr, mem)
  1655  		return true
  1656  	}
  1657  	return false
  1658  }
  1659  func rewriteValueRISCV64_OpLocalAddr(v *Value) bool {
  1660  	v_1 := v.Args[1]
  1661  	v_0 := v.Args[0]
  1662  	b := v.Block
  1663  	typ := &b.Func.Config.Types
  1664  	// match: (LocalAddr <t> {sym} base mem)
  1665  	// cond: t.Elem().HasPointers()
  1666  	// result: (MOVaddr {sym} (SPanchored base mem))
  1667  	for {
  1668  		t := v.Type
  1669  		sym := auxToSym(v.Aux)
  1670  		base := v_0
  1671  		mem := v_1
  1672  		if !(t.Elem().HasPointers()) {
  1673  			break
  1674  		}
  1675  		v.reset(OpRISCV64MOVaddr)
  1676  		v.Aux = symToAux(sym)
  1677  		v0 := b.NewValue0(v.Pos, OpSPanchored, typ.Uintptr)
  1678  		v0.AddArg2(base, mem)
  1679  		v.AddArg(v0)
  1680  		return true
  1681  	}
  1682  	// match: (LocalAddr <t> {sym} base _)
  1683  	// cond: !t.Elem().HasPointers()
  1684  	// result: (MOVaddr {sym} base)
  1685  	for {
  1686  		t := v.Type
  1687  		sym := auxToSym(v.Aux)
  1688  		base := v_0
  1689  		if !(!t.Elem().HasPointers()) {
  1690  			break
  1691  		}
  1692  		v.reset(OpRISCV64MOVaddr)
  1693  		v.Aux = symToAux(sym)
  1694  		v.AddArg(base)
  1695  		return true
  1696  	}
  1697  	return false
  1698  }
  1699  func rewriteValueRISCV64_OpLsh16x16(v *Value) bool {
  1700  	v_1 := v.Args[1]
  1701  	v_0 := v.Args[0]
  1702  	b := v.Block
  1703  	typ := &b.Func.Config.Types
  1704  	// match: (Lsh16x16 <t> x y)
  1705  	// cond: !shiftIsBounded(v)
  1706  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1707  	for {
  1708  		t := v.Type
  1709  		x := v_0
  1710  		y := v_1
  1711  		if !(!shiftIsBounded(v)) {
  1712  			break
  1713  		}
  1714  		v.reset(OpRISCV64AND)
  1715  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1716  		v0.AddArg2(x, y)
  1717  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1718  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1719  		v2.AuxInt = int64ToAuxInt(64)
  1720  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1721  		v3.AddArg(y)
  1722  		v2.AddArg(v3)
  1723  		v1.AddArg(v2)
  1724  		v.AddArg2(v0, v1)
  1725  		return true
  1726  	}
  1727  	// match: (Lsh16x16 x y)
  1728  	// cond: shiftIsBounded(v)
  1729  	// result: (SLL x y)
  1730  	for {
  1731  		x := v_0
  1732  		y := v_1
  1733  		if !(shiftIsBounded(v)) {
  1734  			break
  1735  		}
  1736  		v.reset(OpRISCV64SLL)
  1737  		v.AddArg2(x, y)
  1738  		return true
  1739  	}
  1740  	return false
  1741  }
  1742  func rewriteValueRISCV64_OpLsh16x32(v *Value) bool {
  1743  	v_1 := v.Args[1]
  1744  	v_0 := v.Args[0]
  1745  	b := v.Block
  1746  	typ := &b.Func.Config.Types
  1747  	// match: (Lsh16x32 <t> x y)
  1748  	// cond: !shiftIsBounded(v)
  1749  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1750  	for {
  1751  		t := v.Type
  1752  		x := v_0
  1753  		y := v_1
  1754  		if !(!shiftIsBounded(v)) {
  1755  			break
  1756  		}
  1757  		v.reset(OpRISCV64AND)
  1758  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1759  		v0.AddArg2(x, y)
  1760  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1761  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1762  		v2.AuxInt = int64ToAuxInt(64)
  1763  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1764  		v3.AddArg(y)
  1765  		v2.AddArg(v3)
  1766  		v1.AddArg(v2)
  1767  		v.AddArg2(v0, v1)
  1768  		return true
  1769  	}
  1770  	// match: (Lsh16x32 x y)
  1771  	// cond: shiftIsBounded(v)
  1772  	// result: (SLL x y)
  1773  	for {
  1774  		x := v_0
  1775  		y := v_1
  1776  		if !(shiftIsBounded(v)) {
  1777  			break
  1778  		}
  1779  		v.reset(OpRISCV64SLL)
  1780  		v.AddArg2(x, y)
  1781  		return true
  1782  	}
  1783  	return false
  1784  }
  1785  func rewriteValueRISCV64_OpLsh16x64(v *Value) bool {
  1786  	v_1 := v.Args[1]
  1787  	v_0 := v.Args[0]
  1788  	b := v.Block
  1789  	// match: (Lsh16x64 <t> x y)
  1790  	// cond: !shiftIsBounded(v)
  1791  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] y)))
  1792  	for {
  1793  		t := v.Type
  1794  		x := v_0
  1795  		y := v_1
  1796  		if !(!shiftIsBounded(v)) {
  1797  			break
  1798  		}
  1799  		v.reset(OpRISCV64AND)
  1800  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1801  		v0.AddArg2(x, y)
  1802  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1803  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1804  		v2.AuxInt = int64ToAuxInt(64)
  1805  		v2.AddArg(y)
  1806  		v1.AddArg(v2)
  1807  		v.AddArg2(v0, v1)
  1808  		return true
  1809  	}
  1810  	// match: (Lsh16x64 x y)
  1811  	// cond: shiftIsBounded(v)
  1812  	// result: (SLL x y)
  1813  	for {
  1814  		x := v_0
  1815  		y := v_1
  1816  		if !(shiftIsBounded(v)) {
  1817  			break
  1818  		}
  1819  		v.reset(OpRISCV64SLL)
  1820  		v.AddArg2(x, y)
  1821  		return true
  1822  	}
  1823  	return false
  1824  }
  1825  func rewriteValueRISCV64_OpLsh16x8(v *Value) bool {
  1826  	v_1 := v.Args[1]
  1827  	v_0 := v.Args[0]
  1828  	b := v.Block
  1829  	typ := &b.Func.Config.Types
  1830  	// match: (Lsh16x8 <t> x y)
  1831  	// cond: !shiftIsBounded(v)
  1832  	// result: (AND (SLL <t> x y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  1833  	for {
  1834  		t := v.Type
  1835  		x := v_0
  1836  		y := v_1
  1837  		if !(!shiftIsBounded(v)) {
  1838  			break
  1839  		}
  1840  		v.reset(OpRISCV64AND)
  1841  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1842  		v0.AddArg2(x, y)
  1843  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  1844  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1845  		v2.AuxInt = int64ToAuxInt(64)
  1846  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  1847  		v3.AddArg(y)
  1848  		v2.AddArg(v3)
  1849  		v1.AddArg(v2)
  1850  		v.AddArg2(v0, v1)
  1851  		return true
  1852  	}
  1853  	// match: (Lsh16x8 x y)
  1854  	// cond: shiftIsBounded(v)
  1855  	// result: (SLL x y)
  1856  	for {
  1857  		x := v_0
  1858  		y := v_1
  1859  		if !(shiftIsBounded(v)) {
  1860  			break
  1861  		}
  1862  		v.reset(OpRISCV64SLL)
  1863  		v.AddArg2(x, y)
  1864  		return true
  1865  	}
  1866  	return false
  1867  }
  1868  func rewriteValueRISCV64_OpLsh32x16(v *Value) bool {
  1869  	v_1 := v.Args[1]
  1870  	v_0 := v.Args[0]
  1871  	b := v.Block
  1872  	typ := &b.Func.Config.Types
  1873  	// match: (Lsh32x16 <t> x y)
  1874  	// cond: !shiftIsBounded(v)
  1875  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  1876  	for {
  1877  		t := v.Type
  1878  		x := v_0
  1879  		y := v_1
  1880  		if !(!shiftIsBounded(v)) {
  1881  			break
  1882  		}
  1883  		v.reset(OpRISCV64AND)
  1884  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1885  		v0.AddArg2(x, y)
  1886  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1887  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1888  		v2.AuxInt = int64ToAuxInt(64)
  1889  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  1890  		v3.AddArg(y)
  1891  		v2.AddArg(v3)
  1892  		v1.AddArg(v2)
  1893  		v.AddArg2(v0, v1)
  1894  		return true
  1895  	}
  1896  	// match: (Lsh32x16 x y)
  1897  	// cond: shiftIsBounded(v)
  1898  	// result: (SLL x y)
  1899  	for {
  1900  		x := v_0
  1901  		y := v_1
  1902  		if !(shiftIsBounded(v)) {
  1903  			break
  1904  		}
  1905  		v.reset(OpRISCV64SLL)
  1906  		v.AddArg2(x, y)
  1907  		return true
  1908  	}
  1909  	return false
  1910  }
  1911  func rewriteValueRISCV64_OpLsh32x32(v *Value) bool {
  1912  	v_1 := v.Args[1]
  1913  	v_0 := v.Args[0]
  1914  	b := v.Block
  1915  	typ := &b.Func.Config.Types
  1916  	// match: (Lsh32x32 <t> x y)
  1917  	// cond: !shiftIsBounded(v)
  1918  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  1919  	for {
  1920  		t := v.Type
  1921  		x := v_0
  1922  		y := v_1
  1923  		if !(!shiftIsBounded(v)) {
  1924  			break
  1925  		}
  1926  		v.reset(OpRISCV64AND)
  1927  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1928  		v0.AddArg2(x, y)
  1929  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1930  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1931  		v2.AuxInt = int64ToAuxInt(64)
  1932  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  1933  		v3.AddArg(y)
  1934  		v2.AddArg(v3)
  1935  		v1.AddArg(v2)
  1936  		v.AddArg2(v0, v1)
  1937  		return true
  1938  	}
  1939  	// match: (Lsh32x32 x y)
  1940  	// cond: shiftIsBounded(v)
  1941  	// result: (SLL x y)
  1942  	for {
  1943  		x := v_0
  1944  		y := v_1
  1945  		if !(shiftIsBounded(v)) {
  1946  			break
  1947  		}
  1948  		v.reset(OpRISCV64SLL)
  1949  		v.AddArg2(x, y)
  1950  		return true
  1951  	}
  1952  	return false
  1953  }
  1954  func rewriteValueRISCV64_OpLsh32x64(v *Value) bool {
  1955  	v_1 := v.Args[1]
  1956  	v_0 := v.Args[0]
  1957  	b := v.Block
  1958  	// match: (Lsh32x64 <t> x y)
  1959  	// cond: !shiftIsBounded(v)
  1960  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] y)))
  1961  	for {
  1962  		t := v.Type
  1963  		x := v_0
  1964  		y := v_1
  1965  		if !(!shiftIsBounded(v)) {
  1966  			break
  1967  		}
  1968  		v.reset(OpRISCV64AND)
  1969  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  1970  		v0.AddArg2(x, y)
  1971  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  1972  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  1973  		v2.AuxInt = int64ToAuxInt(64)
  1974  		v2.AddArg(y)
  1975  		v1.AddArg(v2)
  1976  		v.AddArg2(v0, v1)
  1977  		return true
  1978  	}
  1979  	// match: (Lsh32x64 x y)
  1980  	// cond: shiftIsBounded(v)
  1981  	// result: (SLL x y)
  1982  	for {
  1983  		x := v_0
  1984  		y := v_1
  1985  		if !(shiftIsBounded(v)) {
  1986  			break
  1987  		}
  1988  		v.reset(OpRISCV64SLL)
  1989  		v.AddArg2(x, y)
  1990  		return true
  1991  	}
  1992  	return false
  1993  }
  1994  func rewriteValueRISCV64_OpLsh32x8(v *Value) bool {
  1995  	v_1 := v.Args[1]
  1996  	v_0 := v.Args[0]
  1997  	b := v.Block
  1998  	typ := &b.Func.Config.Types
  1999  	// match: (Lsh32x8 <t> x y)
  2000  	// cond: !shiftIsBounded(v)
  2001  	// result: (AND (SLL <t> x y) (Neg32 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2002  	for {
  2003  		t := v.Type
  2004  		x := v_0
  2005  		y := v_1
  2006  		if !(!shiftIsBounded(v)) {
  2007  			break
  2008  		}
  2009  		v.reset(OpRISCV64AND)
  2010  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2011  		v0.AddArg2(x, y)
  2012  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  2013  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2014  		v2.AuxInt = int64ToAuxInt(64)
  2015  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2016  		v3.AddArg(y)
  2017  		v2.AddArg(v3)
  2018  		v1.AddArg(v2)
  2019  		v.AddArg2(v0, v1)
  2020  		return true
  2021  	}
  2022  	// match: (Lsh32x8 x y)
  2023  	// cond: shiftIsBounded(v)
  2024  	// result: (SLL x y)
  2025  	for {
  2026  		x := v_0
  2027  		y := v_1
  2028  		if !(shiftIsBounded(v)) {
  2029  			break
  2030  		}
  2031  		v.reset(OpRISCV64SLL)
  2032  		v.AddArg2(x, y)
  2033  		return true
  2034  	}
  2035  	return false
  2036  }
  2037  func rewriteValueRISCV64_OpLsh64x16(v *Value) bool {
  2038  	v_1 := v.Args[1]
  2039  	v_0 := v.Args[0]
  2040  	b := v.Block
  2041  	typ := &b.Func.Config.Types
  2042  	// match: (Lsh64x16 <t> x y)
  2043  	// cond: !shiftIsBounded(v)
  2044  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2045  	for {
  2046  		t := v.Type
  2047  		x := v_0
  2048  		y := v_1
  2049  		if !(!shiftIsBounded(v)) {
  2050  			break
  2051  		}
  2052  		v.reset(OpRISCV64AND)
  2053  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2054  		v0.AddArg2(x, y)
  2055  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2056  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2057  		v2.AuxInt = int64ToAuxInt(64)
  2058  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2059  		v3.AddArg(y)
  2060  		v2.AddArg(v3)
  2061  		v1.AddArg(v2)
  2062  		v.AddArg2(v0, v1)
  2063  		return true
  2064  	}
  2065  	// match: (Lsh64x16 x y)
  2066  	// cond: shiftIsBounded(v)
  2067  	// result: (SLL x y)
  2068  	for {
  2069  		x := v_0
  2070  		y := v_1
  2071  		if !(shiftIsBounded(v)) {
  2072  			break
  2073  		}
  2074  		v.reset(OpRISCV64SLL)
  2075  		v.AddArg2(x, y)
  2076  		return true
  2077  	}
  2078  	return false
  2079  }
  2080  func rewriteValueRISCV64_OpLsh64x32(v *Value) bool {
  2081  	v_1 := v.Args[1]
  2082  	v_0 := v.Args[0]
  2083  	b := v.Block
  2084  	typ := &b.Func.Config.Types
  2085  	// match: (Lsh64x32 <t> x y)
  2086  	// cond: !shiftIsBounded(v)
  2087  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2088  	for {
  2089  		t := v.Type
  2090  		x := v_0
  2091  		y := v_1
  2092  		if !(!shiftIsBounded(v)) {
  2093  			break
  2094  		}
  2095  		v.reset(OpRISCV64AND)
  2096  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2097  		v0.AddArg2(x, y)
  2098  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2099  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2100  		v2.AuxInt = int64ToAuxInt(64)
  2101  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2102  		v3.AddArg(y)
  2103  		v2.AddArg(v3)
  2104  		v1.AddArg(v2)
  2105  		v.AddArg2(v0, v1)
  2106  		return true
  2107  	}
  2108  	// match: (Lsh64x32 x y)
  2109  	// cond: shiftIsBounded(v)
  2110  	// result: (SLL x y)
  2111  	for {
  2112  		x := v_0
  2113  		y := v_1
  2114  		if !(shiftIsBounded(v)) {
  2115  			break
  2116  		}
  2117  		v.reset(OpRISCV64SLL)
  2118  		v.AddArg2(x, y)
  2119  		return true
  2120  	}
  2121  	return false
  2122  }
  2123  func rewriteValueRISCV64_OpLsh64x64(v *Value) bool {
  2124  	v_1 := v.Args[1]
  2125  	v_0 := v.Args[0]
  2126  	b := v.Block
  2127  	// match: (Lsh64x64 <t> x y)
  2128  	// cond: !shiftIsBounded(v)
  2129  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  2130  	for {
  2131  		t := v.Type
  2132  		x := v_0
  2133  		y := v_1
  2134  		if !(!shiftIsBounded(v)) {
  2135  			break
  2136  		}
  2137  		v.reset(OpRISCV64AND)
  2138  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2139  		v0.AddArg2(x, y)
  2140  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2141  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2142  		v2.AuxInt = int64ToAuxInt(64)
  2143  		v2.AddArg(y)
  2144  		v1.AddArg(v2)
  2145  		v.AddArg2(v0, v1)
  2146  		return true
  2147  	}
  2148  	// match: (Lsh64x64 x y)
  2149  	// cond: shiftIsBounded(v)
  2150  	// result: (SLL x y)
  2151  	for {
  2152  		x := v_0
  2153  		y := v_1
  2154  		if !(shiftIsBounded(v)) {
  2155  			break
  2156  		}
  2157  		v.reset(OpRISCV64SLL)
  2158  		v.AddArg2(x, y)
  2159  		return true
  2160  	}
  2161  	return false
  2162  }
  2163  func rewriteValueRISCV64_OpLsh64x8(v *Value) bool {
  2164  	v_1 := v.Args[1]
  2165  	v_0 := v.Args[0]
  2166  	b := v.Block
  2167  	typ := &b.Func.Config.Types
  2168  	// match: (Lsh64x8 <t> x y)
  2169  	// cond: !shiftIsBounded(v)
  2170  	// result: (AND (SLL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2171  	for {
  2172  		t := v.Type
  2173  		x := v_0
  2174  		y := v_1
  2175  		if !(!shiftIsBounded(v)) {
  2176  			break
  2177  		}
  2178  		v.reset(OpRISCV64AND)
  2179  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2180  		v0.AddArg2(x, y)
  2181  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  2182  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2183  		v2.AuxInt = int64ToAuxInt(64)
  2184  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2185  		v3.AddArg(y)
  2186  		v2.AddArg(v3)
  2187  		v1.AddArg(v2)
  2188  		v.AddArg2(v0, v1)
  2189  		return true
  2190  	}
  2191  	// match: (Lsh64x8 x y)
  2192  	// cond: shiftIsBounded(v)
  2193  	// result: (SLL x y)
  2194  	for {
  2195  		x := v_0
  2196  		y := v_1
  2197  		if !(shiftIsBounded(v)) {
  2198  			break
  2199  		}
  2200  		v.reset(OpRISCV64SLL)
  2201  		v.AddArg2(x, y)
  2202  		return true
  2203  	}
  2204  	return false
  2205  }
  2206  func rewriteValueRISCV64_OpLsh8x16(v *Value) bool {
  2207  	v_1 := v.Args[1]
  2208  	v_0 := v.Args[0]
  2209  	b := v.Block
  2210  	typ := &b.Func.Config.Types
  2211  	// match: (Lsh8x16 <t> x y)
  2212  	// cond: !shiftIsBounded(v)
  2213  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  2214  	for {
  2215  		t := v.Type
  2216  		x := v_0
  2217  		y := v_1
  2218  		if !(!shiftIsBounded(v)) {
  2219  			break
  2220  		}
  2221  		v.reset(OpRISCV64AND)
  2222  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2223  		v0.AddArg2(x, y)
  2224  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2225  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2226  		v2.AuxInt = int64ToAuxInt(64)
  2227  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  2228  		v3.AddArg(y)
  2229  		v2.AddArg(v3)
  2230  		v1.AddArg(v2)
  2231  		v.AddArg2(v0, v1)
  2232  		return true
  2233  	}
  2234  	// match: (Lsh8x16 x y)
  2235  	// cond: shiftIsBounded(v)
  2236  	// result: (SLL x y)
  2237  	for {
  2238  		x := v_0
  2239  		y := v_1
  2240  		if !(shiftIsBounded(v)) {
  2241  			break
  2242  		}
  2243  		v.reset(OpRISCV64SLL)
  2244  		v.AddArg2(x, y)
  2245  		return true
  2246  	}
  2247  	return false
  2248  }
  2249  func rewriteValueRISCV64_OpLsh8x32(v *Value) bool {
  2250  	v_1 := v.Args[1]
  2251  	v_0 := v.Args[0]
  2252  	b := v.Block
  2253  	typ := &b.Func.Config.Types
  2254  	// match: (Lsh8x32 <t> x y)
  2255  	// cond: !shiftIsBounded(v)
  2256  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  2257  	for {
  2258  		t := v.Type
  2259  		x := v_0
  2260  		y := v_1
  2261  		if !(!shiftIsBounded(v)) {
  2262  			break
  2263  		}
  2264  		v.reset(OpRISCV64AND)
  2265  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2266  		v0.AddArg2(x, y)
  2267  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2268  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2269  		v2.AuxInt = int64ToAuxInt(64)
  2270  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  2271  		v3.AddArg(y)
  2272  		v2.AddArg(v3)
  2273  		v1.AddArg(v2)
  2274  		v.AddArg2(v0, v1)
  2275  		return true
  2276  	}
  2277  	// match: (Lsh8x32 x y)
  2278  	// cond: shiftIsBounded(v)
  2279  	// result: (SLL x y)
  2280  	for {
  2281  		x := v_0
  2282  		y := v_1
  2283  		if !(shiftIsBounded(v)) {
  2284  			break
  2285  		}
  2286  		v.reset(OpRISCV64SLL)
  2287  		v.AddArg2(x, y)
  2288  		return true
  2289  	}
  2290  	return false
  2291  }
  2292  func rewriteValueRISCV64_OpLsh8x64(v *Value) bool {
  2293  	v_1 := v.Args[1]
  2294  	v_0 := v.Args[0]
  2295  	b := v.Block
  2296  	// match: (Lsh8x64 <t> x y)
  2297  	// cond: !shiftIsBounded(v)
  2298  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] y)))
  2299  	for {
  2300  		t := v.Type
  2301  		x := v_0
  2302  		y := v_1
  2303  		if !(!shiftIsBounded(v)) {
  2304  			break
  2305  		}
  2306  		v.reset(OpRISCV64AND)
  2307  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2308  		v0.AddArg2(x, y)
  2309  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2310  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2311  		v2.AuxInt = int64ToAuxInt(64)
  2312  		v2.AddArg(y)
  2313  		v1.AddArg(v2)
  2314  		v.AddArg2(v0, v1)
  2315  		return true
  2316  	}
  2317  	// match: (Lsh8x64 x y)
  2318  	// cond: shiftIsBounded(v)
  2319  	// result: (SLL x y)
  2320  	for {
  2321  		x := v_0
  2322  		y := v_1
  2323  		if !(shiftIsBounded(v)) {
  2324  			break
  2325  		}
  2326  		v.reset(OpRISCV64SLL)
  2327  		v.AddArg2(x, y)
  2328  		return true
  2329  	}
  2330  	return false
  2331  }
  2332  func rewriteValueRISCV64_OpLsh8x8(v *Value) bool {
  2333  	v_1 := v.Args[1]
  2334  	v_0 := v.Args[0]
  2335  	b := v.Block
  2336  	typ := &b.Func.Config.Types
  2337  	// match: (Lsh8x8 <t> x y)
  2338  	// cond: !shiftIsBounded(v)
  2339  	// result: (AND (SLL <t> x y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  2340  	for {
  2341  		t := v.Type
  2342  		x := v_0
  2343  		y := v_1
  2344  		if !(!shiftIsBounded(v)) {
  2345  			break
  2346  		}
  2347  		v.reset(OpRISCV64AND)
  2348  		v0 := b.NewValue0(v.Pos, OpRISCV64SLL, t)
  2349  		v0.AddArg2(x, y)
  2350  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  2351  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  2352  		v2.AuxInt = int64ToAuxInt(64)
  2353  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  2354  		v3.AddArg(y)
  2355  		v2.AddArg(v3)
  2356  		v1.AddArg(v2)
  2357  		v.AddArg2(v0, v1)
  2358  		return true
  2359  	}
  2360  	// match: (Lsh8x8 x y)
  2361  	// cond: shiftIsBounded(v)
  2362  	// result: (SLL x y)
  2363  	for {
  2364  		x := v_0
  2365  		y := v_1
  2366  		if !(shiftIsBounded(v)) {
  2367  			break
  2368  		}
  2369  		v.reset(OpRISCV64SLL)
  2370  		v.AddArg2(x, y)
  2371  		return true
  2372  	}
  2373  	return false
  2374  }
  2375  func rewriteValueRISCV64_OpMod16(v *Value) bool {
  2376  	v_1 := v.Args[1]
  2377  	v_0 := v.Args[0]
  2378  	b := v.Block
  2379  	typ := &b.Func.Config.Types
  2380  	// match: (Mod16 x y [false])
  2381  	// result: (REMW (SignExt16to32 x) (SignExt16to32 y))
  2382  	for {
  2383  		if auxIntToBool(v.AuxInt) != false {
  2384  			break
  2385  		}
  2386  		x := v_0
  2387  		y := v_1
  2388  		v.reset(OpRISCV64REMW)
  2389  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2390  		v0.AddArg(x)
  2391  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2392  		v1.AddArg(y)
  2393  		v.AddArg2(v0, v1)
  2394  		return true
  2395  	}
  2396  	return false
  2397  }
  2398  func rewriteValueRISCV64_OpMod16u(v *Value) bool {
  2399  	v_1 := v.Args[1]
  2400  	v_0 := v.Args[0]
  2401  	b := v.Block
  2402  	typ := &b.Func.Config.Types
  2403  	// match: (Mod16u x y)
  2404  	// result: (REMUW (ZeroExt16to32 x) (ZeroExt16to32 y))
  2405  	for {
  2406  		x := v_0
  2407  		y := v_1
  2408  		v.reset(OpRISCV64REMUW)
  2409  		v0 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2410  		v0.AddArg(x)
  2411  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  2412  		v1.AddArg(y)
  2413  		v.AddArg2(v0, v1)
  2414  		return true
  2415  	}
  2416  }
  2417  func rewriteValueRISCV64_OpMod32(v *Value) bool {
  2418  	v_1 := v.Args[1]
  2419  	v_0 := v.Args[0]
  2420  	// match: (Mod32 x y [false])
  2421  	// result: (REMW x y)
  2422  	for {
  2423  		if auxIntToBool(v.AuxInt) != false {
  2424  			break
  2425  		}
  2426  		x := v_0
  2427  		y := v_1
  2428  		v.reset(OpRISCV64REMW)
  2429  		v.AddArg2(x, y)
  2430  		return true
  2431  	}
  2432  	return false
  2433  }
  2434  func rewriteValueRISCV64_OpMod64(v *Value) bool {
  2435  	v_1 := v.Args[1]
  2436  	v_0 := v.Args[0]
  2437  	// match: (Mod64 x y [false])
  2438  	// result: (REM x y)
  2439  	for {
  2440  		if auxIntToBool(v.AuxInt) != false {
  2441  			break
  2442  		}
  2443  		x := v_0
  2444  		y := v_1
  2445  		v.reset(OpRISCV64REM)
  2446  		v.AddArg2(x, y)
  2447  		return true
  2448  	}
  2449  	return false
  2450  }
  2451  func rewriteValueRISCV64_OpMod8(v *Value) bool {
  2452  	v_1 := v.Args[1]
  2453  	v_0 := v.Args[0]
  2454  	b := v.Block
  2455  	typ := &b.Func.Config.Types
  2456  	// match: (Mod8 x y)
  2457  	// result: (REMW (SignExt8to32 x) (SignExt8to32 y))
  2458  	for {
  2459  		x := v_0
  2460  		y := v_1
  2461  		v.reset(OpRISCV64REMW)
  2462  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2463  		v0.AddArg(x)
  2464  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2465  		v1.AddArg(y)
  2466  		v.AddArg2(v0, v1)
  2467  		return true
  2468  	}
  2469  }
  2470  func rewriteValueRISCV64_OpMod8u(v *Value) bool {
  2471  	v_1 := v.Args[1]
  2472  	v_0 := v.Args[0]
  2473  	b := v.Block
  2474  	typ := &b.Func.Config.Types
  2475  	// match: (Mod8u x y)
  2476  	// result: (REMUW (ZeroExt8to32 x) (ZeroExt8to32 y))
  2477  	for {
  2478  		x := v_0
  2479  		y := v_1
  2480  		v.reset(OpRISCV64REMUW)
  2481  		v0 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2482  		v0.AddArg(x)
  2483  		v1 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  2484  		v1.AddArg(y)
  2485  		v.AddArg2(v0, v1)
  2486  		return true
  2487  	}
  2488  }
  2489  func rewriteValueRISCV64_OpMove(v *Value) bool {
  2490  	v_2 := v.Args[2]
  2491  	v_1 := v.Args[1]
  2492  	v_0 := v.Args[0]
  2493  	b := v.Block
  2494  	config := b.Func.Config
  2495  	typ := &b.Func.Config.Types
  2496  	// match: (Move [0] _ _ mem)
  2497  	// result: mem
  2498  	for {
  2499  		if auxIntToInt64(v.AuxInt) != 0 {
  2500  			break
  2501  		}
  2502  		mem := v_2
  2503  		v.copyOf(mem)
  2504  		return true
  2505  	}
  2506  	// match: (Move [1] dst src mem)
  2507  	// result: (MOVBstore dst (MOVBload src mem) mem)
  2508  	for {
  2509  		if auxIntToInt64(v.AuxInt) != 1 {
  2510  			break
  2511  		}
  2512  		dst := v_0
  2513  		src := v_1
  2514  		mem := v_2
  2515  		v.reset(OpRISCV64MOVBstore)
  2516  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2517  		v0.AddArg2(src, mem)
  2518  		v.AddArg3(dst, v0, mem)
  2519  		return true
  2520  	}
  2521  	// match: (Move [2] {t} dst src mem)
  2522  	// cond: t.Alignment()%2 == 0
  2523  	// result: (MOVHstore dst (MOVHload src mem) mem)
  2524  	for {
  2525  		if auxIntToInt64(v.AuxInt) != 2 {
  2526  			break
  2527  		}
  2528  		t := auxToType(v.Aux)
  2529  		dst := v_0
  2530  		src := v_1
  2531  		mem := v_2
  2532  		if !(t.Alignment()%2 == 0) {
  2533  			break
  2534  		}
  2535  		v.reset(OpRISCV64MOVHstore)
  2536  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2537  		v0.AddArg2(src, mem)
  2538  		v.AddArg3(dst, v0, mem)
  2539  		return true
  2540  	}
  2541  	// match: (Move [2] dst src mem)
  2542  	// result: (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))
  2543  	for {
  2544  		if auxIntToInt64(v.AuxInt) != 2 {
  2545  			break
  2546  		}
  2547  		dst := v_0
  2548  		src := v_1
  2549  		mem := v_2
  2550  		v.reset(OpRISCV64MOVBstore)
  2551  		v.AuxInt = int32ToAuxInt(1)
  2552  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2553  		v0.AuxInt = int32ToAuxInt(1)
  2554  		v0.AddArg2(src, mem)
  2555  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2556  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2557  		v2.AddArg2(src, mem)
  2558  		v1.AddArg3(dst, v2, mem)
  2559  		v.AddArg3(dst, v0, v1)
  2560  		return true
  2561  	}
  2562  	// match: (Move [4] {t} dst src mem)
  2563  	// cond: t.Alignment()%4 == 0
  2564  	// result: (MOVWstore dst (MOVWload src mem) mem)
  2565  	for {
  2566  		if auxIntToInt64(v.AuxInt) != 4 {
  2567  			break
  2568  		}
  2569  		t := auxToType(v.Aux)
  2570  		dst := v_0
  2571  		src := v_1
  2572  		mem := v_2
  2573  		if !(t.Alignment()%4 == 0) {
  2574  			break
  2575  		}
  2576  		v.reset(OpRISCV64MOVWstore)
  2577  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2578  		v0.AddArg2(src, mem)
  2579  		v.AddArg3(dst, v0, mem)
  2580  		return true
  2581  	}
  2582  	// match: (Move [4] {t} dst src mem)
  2583  	// cond: t.Alignment()%2 == 0
  2584  	// result: (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))
  2585  	for {
  2586  		if auxIntToInt64(v.AuxInt) != 4 {
  2587  			break
  2588  		}
  2589  		t := auxToType(v.Aux)
  2590  		dst := v_0
  2591  		src := v_1
  2592  		mem := v_2
  2593  		if !(t.Alignment()%2 == 0) {
  2594  			break
  2595  		}
  2596  		v.reset(OpRISCV64MOVHstore)
  2597  		v.AuxInt = int32ToAuxInt(2)
  2598  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2599  		v0.AuxInt = int32ToAuxInt(2)
  2600  		v0.AddArg2(src, mem)
  2601  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2602  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2603  		v2.AddArg2(src, mem)
  2604  		v1.AddArg3(dst, v2, mem)
  2605  		v.AddArg3(dst, v0, v1)
  2606  		return true
  2607  	}
  2608  	// match: (Move [4] dst src mem)
  2609  	// result: (MOVBstore [3] dst (MOVBload [3] src mem) (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem))))
  2610  	for {
  2611  		if auxIntToInt64(v.AuxInt) != 4 {
  2612  			break
  2613  		}
  2614  		dst := v_0
  2615  		src := v_1
  2616  		mem := v_2
  2617  		v.reset(OpRISCV64MOVBstore)
  2618  		v.AuxInt = int32ToAuxInt(3)
  2619  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2620  		v0.AuxInt = int32ToAuxInt(3)
  2621  		v0.AddArg2(src, mem)
  2622  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2623  		v1.AuxInt = int32ToAuxInt(2)
  2624  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2625  		v2.AuxInt = int32ToAuxInt(2)
  2626  		v2.AddArg2(src, mem)
  2627  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2628  		v3.AuxInt = int32ToAuxInt(1)
  2629  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2630  		v4.AuxInt = int32ToAuxInt(1)
  2631  		v4.AddArg2(src, mem)
  2632  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2633  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2634  		v6.AddArg2(src, mem)
  2635  		v5.AddArg3(dst, v6, mem)
  2636  		v3.AddArg3(dst, v4, v5)
  2637  		v1.AddArg3(dst, v2, v3)
  2638  		v.AddArg3(dst, v0, v1)
  2639  		return true
  2640  	}
  2641  	// match: (Move [8] {t} dst src mem)
  2642  	// cond: t.Alignment()%8 == 0
  2643  	// result: (MOVDstore dst (MOVDload src mem) mem)
  2644  	for {
  2645  		if auxIntToInt64(v.AuxInt) != 8 {
  2646  			break
  2647  		}
  2648  		t := auxToType(v.Aux)
  2649  		dst := v_0
  2650  		src := v_1
  2651  		mem := v_2
  2652  		if !(t.Alignment()%8 == 0) {
  2653  			break
  2654  		}
  2655  		v.reset(OpRISCV64MOVDstore)
  2656  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2657  		v0.AddArg2(src, mem)
  2658  		v.AddArg3(dst, v0, mem)
  2659  		return true
  2660  	}
  2661  	// match: (Move [8] {t} dst src mem)
  2662  	// cond: t.Alignment()%4 == 0
  2663  	// result: (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem))
  2664  	for {
  2665  		if auxIntToInt64(v.AuxInt) != 8 {
  2666  			break
  2667  		}
  2668  		t := auxToType(v.Aux)
  2669  		dst := v_0
  2670  		src := v_1
  2671  		mem := v_2
  2672  		if !(t.Alignment()%4 == 0) {
  2673  			break
  2674  		}
  2675  		v.reset(OpRISCV64MOVWstore)
  2676  		v.AuxInt = int32ToAuxInt(4)
  2677  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2678  		v0.AuxInt = int32ToAuxInt(4)
  2679  		v0.AddArg2(src, mem)
  2680  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2681  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2682  		v2.AddArg2(src, mem)
  2683  		v1.AddArg3(dst, v2, mem)
  2684  		v.AddArg3(dst, v0, v1)
  2685  		return true
  2686  	}
  2687  	// match: (Move [8] {t} dst src mem)
  2688  	// cond: t.Alignment()%2 == 0
  2689  	// result: (MOVHstore [6] dst (MOVHload [6] src mem) (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem))))
  2690  	for {
  2691  		if auxIntToInt64(v.AuxInt) != 8 {
  2692  			break
  2693  		}
  2694  		t := auxToType(v.Aux)
  2695  		dst := v_0
  2696  		src := v_1
  2697  		mem := v_2
  2698  		if !(t.Alignment()%2 == 0) {
  2699  			break
  2700  		}
  2701  		v.reset(OpRISCV64MOVHstore)
  2702  		v.AuxInt = int32ToAuxInt(6)
  2703  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2704  		v0.AuxInt = int32ToAuxInt(6)
  2705  		v0.AddArg2(src, mem)
  2706  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2707  		v1.AuxInt = int32ToAuxInt(4)
  2708  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2709  		v2.AuxInt = int32ToAuxInt(4)
  2710  		v2.AddArg2(src, mem)
  2711  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2712  		v3.AuxInt = int32ToAuxInt(2)
  2713  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2714  		v4.AuxInt = int32ToAuxInt(2)
  2715  		v4.AddArg2(src, mem)
  2716  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2717  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2718  		v6.AddArg2(src, mem)
  2719  		v5.AddArg3(dst, v6, mem)
  2720  		v3.AddArg3(dst, v4, v5)
  2721  		v1.AddArg3(dst, v2, v3)
  2722  		v.AddArg3(dst, v0, v1)
  2723  		return true
  2724  	}
  2725  	// match: (Move [3] dst src mem)
  2726  	// result: (MOVBstore [2] dst (MOVBload [2] src mem) (MOVBstore [1] dst (MOVBload [1] src mem) (MOVBstore dst (MOVBload src mem) mem)))
  2727  	for {
  2728  		if auxIntToInt64(v.AuxInt) != 3 {
  2729  			break
  2730  		}
  2731  		dst := v_0
  2732  		src := v_1
  2733  		mem := v_2
  2734  		v.reset(OpRISCV64MOVBstore)
  2735  		v.AuxInt = int32ToAuxInt(2)
  2736  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2737  		v0.AuxInt = int32ToAuxInt(2)
  2738  		v0.AddArg2(src, mem)
  2739  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2740  		v1.AuxInt = int32ToAuxInt(1)
  2741  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2742  		v2.AuxInt = int32ToAuxInt(1)
  2743  		v2.AddArg2(src, mem)
  2744  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  2745  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVBload, typ.Int8)
  2746  		v4.AddArg2(src, mem)
  2747  		v3.AddArg3(dst, v4, mem)
  2748  		v1.AddArg3(dst, v2, v3)
  2749  		v.AddArg3(dst, v0, v1)
  2750  		return true
  2751  	}
  2752  	// match: (Move [6] {t} dst src mem)
  2753  	// cond: t.Alignment()%2 == 0
  2754  	// result: (MOVHstore [4] dst (MOVHload [4] src mem) (MOVHstore [2] dst (MOVHload [2] src mem) (MOVHstore dst (MOVHload src mem) mem)))
  2755  	for {
  2756  		if auxIntToInt64(v.AuxInt) != 6 {
  2757  			break
  2758  		}
  2759  		t := auxToType(v.Aux)
  2760  		dst := v_0
  2761  		src := v_1
  2762  		mem := v_2
  2763  		if !(t.Alignment()%2 == 0) {
  2764  			break
  2765  		}
  2766  		v.reset(OpRISCV64MOVHstore)
  2767  		v.AuxInt = int32ToAuxInt(4)
  2768  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2769  		v0.AuxInt = int32ToAuxInt(4)
  2770  		v0.AddArg2(src, mem)
  2771  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2772  		v1.AuxInt = int32ToAuxInt(2)
  2773  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2774  		v2.AuxInt = int32ToAuxInt(2)
  2775  		v2.AddArg2(src, mem)
  2776  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  2777  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVHload, typ.Int16)
  2778  		v4.AddArg2(src, mem)
  2779  		v3.AddArg3(dst, v4, mem)
  2780  		v1.AddArg3(dst, v2, v3)
  2781  		v.AddArg3(dst, v0, v1)
  2782  		return true
  2783  	}
  2784  	// match: (Move [12] {t} dst src mem)
  2785  	// cond: t.Alignment()%4 == 0
  2786  	// result: (MOVWstore [8] dst (MOVWload [8] src mem) (MOVWstore [4] dst (MOVWload [4] src mem) (MOVWstore dst (MOVWload src mem) mem)))
  2787  	for {
  2788  		if auxIntToInt64(v.AuxInt) != 12 {
  2789  			break
  2790  		}
  2791  		t := auxToType(v.Aux)
  2792  		dst := v_0
  2793  		src := v_1
  2794  		mem := v_2
  2795  		if !(t.Alignment()%4 == 0) {
  2796  			break
  2797  		}
  2798  		v.reset(OpRISCV64MOVWstore)
  2799  		v.AuxInt = int32ToAuxInt(8)
  2800  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2801  		v0.AuxInt = int32ToAuxInt(8)
  2802  		v0.AddArg2(src, mem)
  2803  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2804  		v1.AuxInt = int32ToAuxInt(4)
  2805  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2806  		v2.AuxInt = int32ToAuxInt(4)
  2807  		v2.AddArg2(src, mem)
  2808  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  2809  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVWload, typ.Int32)
  2810  		v4.AddArg2(src, mem)
  2811  		v3.AddArg3(dst, v4, mem)
  2812  		v1.AddArg3(dst, v2, v3)
  2813  		v.AddArg3(dst, v0, v1)
  2814  		return true
  2815  	}
  2816  	// match: (Move [16] {t} dst src mem)
  2817  	// cond: t.Alignment()%8 == 0
  2818  	// result: (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))
  2819  	for {
  2820  		if auxIntToInt64(v.AuxInt) != 16 {
  2821  			break
  2822  		}
  2823  		t := auxToType(v.Aux)
  2824  		dst := v_0
  2825  		src := v_1
  2826  		mem := v_2
  2827  		if !(t.Alignment()%8 == 0) {
  2828  			break
  2829  		}
  2830  		v.reset(OpRISCV64MOVDstore)
  2831  		v.AuxInt = int32ToAuxInt(8)
  2832  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2833  		v0.AuxInt = int32ToAuxInt(8)
  2834  		v0.AddArg2(src, mem)
  2835  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2836  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2837  		v2.AddArg2(src, mem)
  2838  		v1.AddArg3(dst, v2, mem)
  2839  		v.AddArg3(dst, v0, v1)
  2840  		return true
  2841  	}
  2842  	// match: (Move [24] {t} dst src mem)
  2843  	// cond: t.Alignment()%8 == 0
  2844  	// result: (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem)))
  2845  	for {
  2846  		if auxIntToInt64(v.AuxInt) != 24 {
  2847  			break
  2848  		}
  2849  		t := auxToType(v.Aux)
  2850  		dst := v_0
  2851  		src := v_1
  2852  		mem := v_2
  2853  		if !(t.Alignment()%8 == 0) {
  2854  			break
  2855  		}
  2856  		v.reset(OpRISCV64MOVDstore)
  2857  		v.AuxInt = int32ToAuxInt(16)
  2858  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2859  		v0.AuxInt = int32ToAuxInt(16)
  2860  		v0.AddArg2(src, mem)
  2861  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2862  		v1.AuxInt = int32ToAuxInt(8)
  2863  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2864  		v2.AuxInt = int32ToAuxInt(8)
  2865  		v2.AddArg2(src, mem)
  2866  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2867  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2868  		v4.AddArg2(src, mem)
  2869  		v3.AddArg3(dst, v4, mem)
  2870  		v1.AddArg3(dst, v2, v3)
  2871  		v.AddArg3(dst, v0, v1)
  2872  		return true
  2873  	}
  2874  	// match: (Move [32] {t} dst src mem)
  2875  	// cond: t.Alignment()%8 == 0
  2876  	// result: (MOVDstore [24] dst (MOVDload [24] src mem) (MOVDstore [16] dst (MOVDload [16] src mem) (MOVDstore [8] dst (MOVDload [8] src mem) (MOVDstore dst (MOVDload src mem) mem))))
  2877  	for {
  2878  		if auxIntToInt64(v.AuxInt) != 32 {
  2879  			break
  2880  		}
  2881  		t := auxToType(v.Aux)
  2882  		dst := v_0
  2883  		src := v_1
  2884  		mem := v_2
  2885  		if !(t.Alignment()%8 == 0) {
  2886  			break
  2887  		}
  2888  		v.reset(OpRISCV64MOVDstore)
  2889  		v.AuxInt = int32ToAuxInt(24)
  2890  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2891  		v0.AuxInt = int32ToAuxInt(24)
  2892  		v0.AddArg2(src, mem)
  2893  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2894  		v1.AuxInt = int32ToAuxInt(16)
  2895  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2896  		v2.AuxInt = int32ToAuxInt(16)
  2897  		v2.AddArg2(src, mem)
  2898  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2899  		v3.AuxInt = int32ToAuxInt(8)
  2900  		v4 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2901  		v4.AuxInt = int32ToAuxInt(8)
  2902  		v4.AddArg2(src, mem)
  2903  		v5 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  2904  		v6 := b.NewValue0(v.Pos, OpRISCV64MOVDload, typ.Int64)
  2905  		v6.AddArg2(src, mem)
  2906  		v5.AddArg3(dst, v6, mem)
  2907  		v3.AddArg3(dst, v4, v5)
  2908  		v1.AddArg3(dst, v2, v3)
  2909  		v.AddArg3(dst, v0, v1)
  2910  		return true
  2911  	}
  2912  	// match: (Move [s] {t} dst src mem)
  2913  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)
  2914  	// result: (DUFFCOPY [16 * (128 - s/8)] dst src mem)
  2915  	for {
  2916  		s := auxIntToInt64(v.AuxInt)
  2917  		t := auxToType(v.Aux)
  2918  		dst := v_0
  2919  		src := v_1
  2920  		mem := v_2
  2921  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
  2922  			break
  2923  		}
  2924  		v.reset(OpRISCV64DUFFCOPY)
  2925  		v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
  2926  		v.AddArg3(dst, src, mem)
  2927  		return true
  2928  	}
  2929  	// match: (Move [s] {t} dst src mem)
  2930  	// cond: (s <= 16 || logLargeCopy(v, s))
  2931  	// result: (LoweredMove [t.Alignment()] dst src (ADDI <src.Type> [s-moveSize(t.Alignment(), config)] src) mem)
  2932  	for {
  2933  		s := auxIntToInt64(v.AuxInt)
  2934  		t := auxToType(v.Aux)
  2935  		dst := v_0
  2936  		src := v_1
  2937  		mem := v_2
  2938  		if !(s <= 16 || logLargeCopy(v, s)) {
  2939  			break
  2940  		}
  2941  		v.reset(OpRISCV64LoweredMove)
  2942  		v.AuxInt = int64ToAuxInt(t.Alignment())
  2943  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, src.Type)
  2944  		v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  2945  		v0.AddArg(src)
  2946  		v.AddArg4(dst, src, v0, mem)
  2947  		return true
  2948  	}
  2949  	return false
  2950  }
  2951  func rewriteValueRISCV64_OpMul16(v *Value) bool {
  2952  	v_1 := v.Args[1]
  2953  	v_0 := v.Args[0]
  2954  	b := v.Block
  2955  	typ := &b.Func.Config.Types
  2956  	// match: (Mul16 x y)
  2957  	// result: (MULW (SignExt16to32 x) (SignExt16to32 y))
  2958  	for {
  2959  		x := v_0
  2960  		y := v_1
  2961  		v.reset(OpRISCV64MULW)
  2962  		v0 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2963  		v0.AddArg(x)
  2964  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  2965  		v1.AddArg(y)
  2966  		v.AddArg2(v0, v1)
  2967  		return true
  2968  	}
  2969  }
  2970  func rewriteValueRISCV64_OpMul8(v *Value) bool {
  2971  	v_1 := v.Args[1]
  2972  	v_0 := v.Args[0]
  2973  	b := v.Block
  2974  	typ := &b.Func.Config.Types
  2975  	// match: (Mul8 x y)
  2976  	// result: (MULW (SignExt8to32 x) (SignExt8to32 y))
  2977  	for {
  2978  		x := v_0
  2979  		y := v_1
  2980  		v.reset(OpRISCV64MULW)
  2981  		v0 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2982  		v0.AddArg(x)
  2983  		v1 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  2984  		v1.AddArg(y)
  2985  		v.AddArg2(v0, v1)
  2986  		return true
  2987  	}
  2988  }
  2989  func rewriteValueRISCV64_OpNeq16(v *Value) bool {
  2990  	v_1 := v.Args[1]
  2991  	v_0 := v.Args[0]
  2992  	b := v.Block
  2993  	typ := &b.Func.Config.Types
  2994  	// match: (Neq16 x y)
  2995  	// result: (Not (Eq16 x y))
  2996  	for {
  2997  		x := v_0
  2998  		y := v_1
  2999  		v.reset(OpNot)
  3000  		v0 := b.NewValue0(v.Pos, OpEq16, typ.Bool)
  3001  		v0.AddArg2(x, y)
  3002  		v.AddArg(v0)
  3003  		return true
  3004  	}
  3005  }
  3006  func rewriteValueRISCV64_OpNeq32(v *Value) bool {
  3007  	v_1 := v.Args[1]
  3008  	v_0 := v.Args[0]
  3009  	b := v.Block
  3010  	typ := &b.Func.Config.Types
  3011  	// match: (Neq32 x y)
  3012  	// result: (Not (Eq32 x y))
  3013  	for {
  3014  		x := v_0
  3015  		y := v_1
  3016  		v.reset(OpNot)
  3017  		v0 := b.NewValue0(v.Pos, OpEq32, typ.Bool)
  3018  		v0.AddArg2(x, y)
  3019  		v.AddArg(v0)
  3020  		return true
  3021  	}
  3022  }
  3023  func rewriteValueRISCV64_OpNeq64(v *Value) bool {
  3024  	v_1 := v.Args[1]
  3025  	v_0 := v.Args[0]
  3026  	b := v.Block
  3027  	typ := &b.Func.Config.Types
  3028  	// match: (Neq64 x y)
  3029  	// result: (Not (Eq64 x y))
  3030  	for {
  3031  		x := v_0
  3032  		y := v_1
  3033  		v.reset(OpNot)
  3034  		v0 := b.NewValue0(v.Pos, OpEq64, typ.Bool)
  3035  		v0.AddArg2(x, y)
  3036  		v.AddArg(v0)
  3037  		return true
  3038  	}
  3039  }
  3040  func rewriteValueRISCV64_OpNeq8(v *Value) bool {
  3041  	v_1 := v.Args[1]
  3042  	v_0 := v.Args[0]
  3043  	b := v.Block
  3044  	typ := &b.Func.Config.Types
  3045  	// match: (Neq8 x y)
  3046  	// result: (Not (Eq8 x y))
  3047  	for {
  3048  		x := v_0
  3049  		y := v_1
  3050  		v.reset(OpNot)
  3051  		v0 := b.NewValue0(v.Pos, OpEq8, typ.Bool)
  3052  		v0.AddArg2(x, y)
  3053  		v.AddArg(v0)
  3054  		return true
  3055  	}
  3056  }
  3057  func rewriteValueRISCV64_OpNeqB(v *Value) bool {
  3058  	v_1 := v.Args[1]
  3059  	v_0 := v.Args[0]
  3060  	b := v.Block
  3061  	typ := &b.Func.Config.Types
  3062  	// match: (NeqB x y)
  3063  	// result: (SNEZ (SUB <typ.Bool> x y))
  3064  	for {
  3065  		x := v_0
  3066  		y := v_1
  3067  		v.reset(OpRISCV64SNEZ)
  3068  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.Bool)
  3069  		v0.AddArg2(x, y)
  3070  		v.AddArg(v0)
  3071  		return true
  3072  	}
  3073  }
  3074  func rewriteValueRISCV64_OpNeqPtr(v *Value) bool {
  3075  	v_1 := v.Args[1]
  3076  	v_0 := v.Args[0]
  3077  	b := v.Block
  3078  	typ := &b.Func.Config.Types
  3079  	// match: (NeqPtr x y)
  3080  	// result: (Not (EqPtr x y))
  3081  	for {
  3082  		x := v_0
  3083  		y := v_1
  3084  		v.reset(OpNot)
  3085  		v0 := b.NewValue0(v.Pos, OpEqPtr, typ.Bool)
  3086  		v0.AddArg2(x, y)
  3087  		v.AddArg(v0)
  3088  		return true
  3089  	}
  3090  }
  3091  func rewriteValueRISCV64_OpOffPtr(v *Value) bool {
  3092  	v_0 := v.Args[0]
  3093  	b := v.Block
  3094  	typ := &b.Func.Config.Types
  3095  	// match: (OffPtr [off] ptr:(SP))
  3096  	// cond: is32Bit(off)
  3097  	// result: (MOVaddr [int32(off)] ptr)
  3098  	for {
  3099  		off := auxIntToInt64(v.AuxInt)
  3100  		ptr := v_0
  3101  		if ptr.Op != OpSP || !(is32Bit(off)) {
  3102  			break
  3103  		}
  3104  		v.reset(OpRISCV64MOVaddr)
  3105  		v.AuxInt = int32ToAuxInt(int32(off))
  3106  		v.AddArg(ptr)
  3107  		return true
  3108  	}
  3109  	// match: (OffPtr [off] ptr)
  3110  	// cond: is32Bit(off)
  3111  	// result: (ADDI [off] ptr)
  3112  	for {
  3113  		off := auxIntToInt64(v.AuxInt)
  3114  		ptr := v_0
  3115  		if !(is32Bit(off)) {
  3116  			break
  3117  		}
  3118  		v.reset(OpRISCV64ADDI)
  3119  		v.AuxInt = int64ToAuxInt(off)
  3120  		v.AddArg(ptr)
  3121  		return true
  3122  	}
  3123  	// match: (OffPtr [off] ptr)
  3124  	// result: (ADD (MOVDconst [off]) ptr)
  3125  	for {
  3126  		off := auxIntToInt64(v.AuxInt)
  3127  		ptr := v_0
  3128  		v.reset(OpRISCV64ADD)
  3129  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  3130  		v0.AuxInt = int64ToAuxInt(off)
  3131  		v.AddArg2(v0, ptr)
  3132  		return true
  3133  	}
  3134  }
  3135  func rewriteValueRISCV64_OpPanicBounds(v *Value) bool {
  3136  	v_2 := v.Args[2]
  3137  	v_1 := v.Args[1]
  3138  	v_0 := v.Args[0]
  3139  	// match: (PanicBounds [kind] x y mem)
  3140  	// cond: boundsABI(kind) == 0
  3141  	// result: (LoweredPanicBoundsA [kind] x y mem)
  3142  	for {
  3143  		kind := auxIntToInt64(v.AuxInt)
  3144  		x := v_0
  3145  		y := v_1
  3146  		mem := v_2
  3147  		if !(boundsABI(kind) == 0) {
  3148  			break
  3149  		}
  3150  		v.reset(OpRISCV64LoweredPanicBoundsA)
  3151  		v.AuxInt = int64ToAuxInt(kind)
  3152  		v.AddArg3(x, y, mem)
  3153  		return true
  3154  	}
  3155  	// match: (PanicBounds [kind] x y mem)
  3156  	// cond: boundsABI(kind) == 1
  3157  	// result: (LoweredPanicBoundsB [kind] x y mem)
  3158  	for {
  3159  		kind := auxIntToInt64(v.AuxInt)
  3160  		x := v_0
  3161  		y := v_1
  3162  		mem := v_2
  3163  		if !(boundsABI(kind) == 1) {
  3164  			break
  3165  		}
  3166  		v.reset(OpRISCV64LoweredPanicBoundsB)
  3167  		v.AuxInt = int64ToAuxInt(kind)
  3168  		v.AddArg3(x, y, mem)
  3169  		return true
  3170  	}
  3171  	// match: (PanicBounds [kind] x y mem)
  3172  	// cond: boundsABI(kind) == 2
  3173  	// result: (LoweredPanicBoundsC [kind] x y mem)
  3174  	for {
  3175  		kind := auxIntToInt64(v.AuxInt)
  3176  		x := v_0
  3177  		y := v_1
  3178  		mem := v_2
  3179  		if !(boundsABI(kind) == 2) {
  3180  			break
  3181  		}
  3182  		v.reset(OpRISCV64LoweredPanicBoundsC)
  3183  		v.AuxInt = int64ToAuxInt(kind)
  3184  		v.AddArg3(x, y, mem)
  3185  		return true
  3186  	}
  3187  	return false
  3188  }
  3189  func rewriteValueRISCV64_OpRISCV64ADD(v *Value) bool {
  3190  	v_1 := v.Args[1]
  3191  	v_0 := v.Args[0]
  3192  	// match: (ADD (MOVDconst <t> [val]) x)
  3193  	// cond: is32Bit(val) && !t.IsPtr()
  3194  	// result: (ADDI [val] x)
  3195  	for {
  3196  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3197  			if v_0.Op != OpRISCV64MOVDconst {
  3198  				continue
  3199  			}
  3200  			t := v_0.Type
  3201  			val := auxIntToInt64(v_0.AuxInt)
  3202  			x := v_1
  3203  			if !(is32Bit(val) && !t.IsPtr()) {
  3204  				continue
  3205  			}
  3206  			v.reset(OpRISCV64ADDI)
  3207  			v.AuxInt = int64ToAuxInt(val)
  3208  			v.AddArg(x)
  3209  			return true
  3210  		}
  3211  		break
  3212  	}
  3213  	return false
  3214  }
  3215  func rewriteValueRISCV64_OpRISCV64ADDI(v *Value) bool {
  3216  	v_0 := v.Args[0]
  3217  	// match: (ADDI [c] (MOVaddr [d] {s} x))
  3218  	// cond: is32Bit(c+int64(d))
  3219  	// result: (MOVaddr [int32(c)+d] {s} x)
  3220  	for {
  3221  		c := auxIntToInt64(v.AuxInt)
  3222  		if v_0.Op != OpRISCV64MOVaddr {
  3223  			break
  3224  		}
  3225  		d := auxIntToInt32(v_0.AuxInt)
  3226  		s := auxToSym(v_0.Aux)
  3227  		x := v_0.Args[0]
  3228  		if !(is32Bit(c + int64(d))) {
  3229  			break
  3230  		}
  3231  		v.reset(OpRISCV64MOVaddr)
  3232  		v.AuxInt = int32ToAuxInt(int32(c) + d)
  3233  		v.Aux = symToAux(s)
  3234  		v.AddArg(x)
  3235  		return true
  3236  	}
  3237  	// match: (ADDI [0] x)
  3238  	// result: x
  3239  	for {
  3240  		if auxIntToInt64(v.AuxInt) != 0 {
  3241  			break
  3242  		}
  3243  		x := v_0
  3244  		v.copyOf(x)
  3245  		return true
  3246  	}
  3247  	// match: (ADDI [x] (MOVDconst [y]))
  3248  	// cond: is32Bit(x + y)
  3249  	// result: (MOVDconst [x + y])
  3250  	for {
  3251  		x := auxIntToInt64(v.AuxInt)
  3252  		if v_0.Op != OpRISCV64MOVDconst {
  3253  			break
  3254  		}
  3255  		y := auxIntToInt64(v_0.AuxInt)
  3256  		if !(is32Bit(x + y)) {
  3257  			break
  3258  		}
  3259  		v.reset(OpRISCV64MOVDconst)
  3260  		v.AuxInt = int64ToAuxInt(x + y)
  3261  		return true
  3262  	}
  3263  	// match: (ADDI [x] (ADDI [y] z))
  3264  	// cond: is32Bit(x + y)
  3265  	// result: (ADDI [x + y] z)
  3266  	for {
  3267  		x := auxIntToInt64(v.AuxInt)
  3268  		if v_0.Op != OpRISCV64ADDI {
  3269  			break
  3270  		}
  3271  		y := auxIntToInt64(v_0.AuxInt)
  3272  		z := v_0.Args[0]
  3273  		if !(is32Bit(x + y)) {
  3274  			break
  3275  		}
  3276  		v.reset(OpRISCV64ADDI)
  3277  		v.AuxInt = int64ToAuxInt(x + y)
  3278  		v.AddArg(z)
  3279  		return true
  3280  	}
  3281  	return false
  3282  }
  3283  func rewriteValueRISCV64_OpRISCV64AND(v *Value) bool {
  3284  	v_1 := v.Args[1]
  3285  	v_0 := v.Args[0]
  3286  	// match: (AND (MOVDconst [val]) x)
  3287  	// cond: is32Bit(val)
  3288  	// result: (ANDI [val] x)
  3289  	for {
  3290  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3291  			if v_0.Op != OpRISCV64MOVDconst {
  3292  				continue
  3293  			}
  3294  			val := auxIntToInt64(v_0.AuxInt)
  3295  			x := v_1
  3296  			if !(is32Bit(val)) {
  3297  				continue
  3298  			}
  3299  			v.reset(OpRISCV64ANDI)
  3300  			v.AuxInt = int64ToAuxInt(val)
  3301  			v.AddArg(x)
  3302  			return true
  3303  		}
  3304  		break
  3305  	}
  3306  	return false
  3307  }
  3308  func rewriteValueRISCV64_OpRISCV64ANDI(v *Value) bool {
  3309  	v_0 := v.Args[0]
  3310  	// match: (ANDI [0] x)
  3311  	// result: (MOVDconst [0])
  3312  	for {
  3313  		if auxIntToInt64(v.AuxInt) != 0 {
  3314  			break
  3315  		}
  3316  		v.reset(OpRISCV64MOVDconst)
  3317  		v.AuxInt = int64ToAuxInt(0)
  3318  		return true
  3319  	}
  3320  	// match: (ANDI [-1] x)
  3321  	// result: x
  3322  	for {
  3323  		if auxIntToInt64(v.AuxInt) != -1 {
  3324  			break
  3325  		}
  3326  		x := v_0
  3327  		v.copyOf(x)
  3328  		return true
  3329  	}
  3330  	// match: (ANDI [x] (MOVDconst [y]))
  3331  	// result: (MOVDconst [x & y])
  3332  	for {
  3333  		x := auxIntToInt64(v.AuxInt)
  3334  		if v_0.Op != OpRISCV64MOVDconst {
  3335  			break
  3336  		}
  3337  		y := auxIntToInt64(v_0.AuxInt)
  3338  		v.reset(OpRISCV64MOVDconst)
  3339  		v.AuxInt = int64ToAuxInt(x & y)
  3340  		return true
  3341  	}
  3342  	// match: (ANDI [x] (ANDI [y] z))
  3343  	// result: (ANDI [x & y] z)
  3344  	for {
  3345  		x := auxIntToInt64(v.AuxInt)
  3346  		if v_0.Op != OpRISCV64ANDI {
  3347  			break
  3348  		}
  3349  		y := auxIntToInt64(v_0.AuxInt)
  3350  		z := v_0.Args[0]
  3351  		v.reset(OpRISCV64ANDI)
  3352  		v.AuxInt = int64ToAuxInt(x & y)
  3353  		v.AddArg(z)
  3354  		return true
  3355  	}
  3356  	return false
  3357  }
  3358  func rewriteValueRISCV64_OpRISCV64FADDD(v *Value) bool {
  3359  	v_1 := v.Args[1]
  3360  	v_0 := v.Args[0]
  3361  	// match: (FADDD a (FMULD x y))
  3362  	// cond: a.Block.Func.useFMA(v)
  3363  	// result: (FMADDD x y a)
  3364  	for {
  3365  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3366  			a := v_0
  3367  			if v_1.Op != OpRISCV64FMULD {
  3368  				continue
  3369  			}
  3370  			y := v_1.Args[1]
  3371  			x := v_1.Args[0]
  3372  			if !(a.Block.Func.useFMA(v)) {
  3373  				continue
  3374  			}
  3375  			v.reset(OpRISCV64FMADDD)
  3376  			v.AddArg3(x, y, a)
  3377  			return true
  3378  		}
  3379  		break
  3380  	}
  3381  	return false
  3382  }
  3383  func rewriteValueRISCV64_OpRISCV64FADDS(v *Value) bool {
  3384  	v_1 := v.Args[1]
  3385  	v_0 := v.Args[0]
  3386  	// match: (FADDS a (FMULS x y))
  3387  	// cond: a.Block.Func.useFMA(v)
  3388  	// result: (FMADDS x y a)
  3389  	for {
  3390  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3391  			a := v_0
  3392  			if v_1.Op != OpRISCV64FMULS {
  3393  				continue
  3394  			}
  3395  			y := v_1.Args[1]
  3396  			x := v_1.Args[0]
  3397  			if !(a.Block.Func.useFMA(v)) {
  3398  				continue
  3399  			}
  3400  			v.reset(OpRISCV64FMADDS)
  3401  			v.AddArg3(x, y, a)
  3402  			return true
  3403  		}
  3404  		break
  3405  	}
  3406  	return false
  3407  }
  3408  func rewriteValueRISCV64_OpRISCV64FMADDD(v *Value) bool {
  3409  	v_2 := v.Args[2]
  3410  	v_1 := v.Args[1]
  3411  	v_0 := v.Args[0]
  3412  	// match: (FMADDD neg:(FNEGD x) y z)
  3413  	// cond: neg.Uses == 1
  3414  	// result: (FNMSUBD x y z)
  3415  	for {
  3416  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3417  			neg := v_0
  3418  			if neg.Op != OpRISCV64FNEGD {
  3419  				continue
  3420  			}
  3421  			x := neg.Args[0]
  3422  			y := v_1
  3423  			z := v_2
  3424  			if !(neg.Uses == 1) {
  3425  				continue
  3426  			}
  3427  			v.reset(OpRISCV64FNMSUBD)
  3428  			v.AddArg3(x, y, z)
  3429  			return true
  3430  		}
  3431  		break
  3432  	}
  3433  	// match: (FMADDD x y neg:(FNEGD z))
  3434  	// cond: neg.Uses == 1
  3435  	// result: (FMSUBD x y z)
  3436  	for {
  3437  		x := v_0
  3438  		y := v_1
  3439  		neg := v_2
  3440  		if neg.Op != OpRISCV64FNEGD {
  3441  			break
  3442  		}
  3443  		z := neg.Args[0]
  3444  		if !(neg.Uses == 1) {
  3445  			break
  3446  		}
  3447  		v.reset(OpRISCV64FMSUBD)
  3448  		v.AddArg3(x, y, z)
  3449  		return true
  3450  	}
  3451  	return false
  3452  }
  3453  func rewriteValueRISCV64_OpRISCV64FMADDS(v *Value) bool {
  3454  	v_2 := v.Args[2]
  3455  	v_1 := v.Args[1]
  3456  	v_0 := v.Args[0]
  3457  	// match: (FMADDS neg:(FNEGS x) y z)
  3458  	// cond: neg.Uses == 1
  3459  	// result: (FNMSUBS x y z)
  3460  	for {
  3461  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3462  			neg := v_0
  3463  			if neg.Op != OpRISCV64FNEGS {
  3464  				continue
  3465  			}
  3466  			x := neg.Args[0]
  3467  			y := v_1
  3468  			z := v_2
  3469  			if !(neg.Uses == 1) {
  3470  				continue
  3471  			}
  3472  			v.reset(OpRISCV64FNMSUBS)
  3473  			v.AddArg3(x, y, z)
  3474  			return true
  3475  		}
  3476  		break
  3477  	}
  3478  	// match: (FMADDS x y neg:(FNEGS z))
  3479  	// cond: neg.Uses == 1
  3480  	// result: (FMSUBS x y z)
  3481  	for {
  3482  		x := v_0
  3483  		y := v_1
  3484  		neg := v_2
  3485  		if neg.Op != OpRISCV64FNEGS {
  3486  			break
  3487  		}
  3488  		z := neg.Args[0]
  3489  		if !(neg.Uses == 1) {
  3490  			break
  3491  		}
  3492  		v.reset(OpRISCV64FMSUBS)
  3493  		v.AddArg3(x, y, z)
  3494  		return true
  3495  	}
  3496  	return false
  3497  }
  3498  func rewriteValueRISCV64_OpRISCV64FMSUBD(v *Value) bool {
  3499  	v_2 := v.Args[2]
  3500  	v_1 := v.Args[1]
  3501  	v_0 := v.Args[0]
  3502  	// match: (FMSUBD neg:(FNEGD x) y z)
  3503  	// cond: neg.Uses == 1
  3504  	// result: (FNMADDD x y z)
  3505  	for {
  3506  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3507  			neg := v_0
  3508  			if neg.Op != OpRISCV64FNEGD {
  3509  				continue
  3510  			}
  3511  			x := neg.Args[0]
  3512  			y := v_1
  3513  			z := v_2
  3514  			if !(neg.Uses == 1) {
  3515  				continue
  3516  			}
  3517  			v.reset(OpRISCV64FNMADDD)
  3518  			v.AddArg3(x, y, z)
  3519  			return true
  3520  		}
  3521  		break
  3522  	}
  3523  	// match: (FMSUBD x y neg:(FNEGD z))
  3524  	// cond: neg.Uses == 1
  3525  	// result: (FMADDD x y z)
  3526  	for {
  3527  		x := v_0
  3528  		y := v_1
  3529  		neg := v_2
  3530  		if neg.Op != OpRISCV64FNEGD {
  3531  			break
  3532  		}
  3533  		z := neg.Args[0]
  3534  		if !(neg.Uses == 1) {
  3535  			break
  3536  		}
  3537  		v.reset(OpRISCV64FMADDD)
  3538  		v.AddArg3(x, y, z)
  3539  		return true
  3540  	}
  3541  	return false
  3542  }
  3543  func rewriteValueRISCV64_OpRISCV64FMSUBS(v *Value) bool {
  3544  	v_2 := v.Args[2]
  3545  	v_1 := v.Args[1]
  3546  	v_0 := v.Args[0]
  3547  	// match: (FMSUBS neg:(FNEGS x) y z)
  3548  	// cond: neg.Uses == 1
  3549  	// result: (FNMADDS x y z)
  3550  	for {
  3551  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3552  			neg := v_0
  3553  			if neg.Op != OpRISCV64FNEGS {
  3554  				continue
  3555  			}
  3556  			x := neg.Args[0]
  3557  			y := v_1
  3558  			z := v_2
  3559  			if !(neg.Uses == 1) {
  3560  				continue
  3561  			}
  3562  			v.reset(OpRISCV64FNMADDS)
  3563  			v.AddArg3(x, y, z)
  3564  			return true
  3565  		}
  3566  		break
  3567  	}
  3568  	// match: (FMSUBS x y neg:(FNEGS z))
  3569  	// cond: neg.Uses == 1
  3570  	// result: (FMADDS x y z)
  3571  	for {
  3572  		x := v_0
  3573  		y := v_1
  3574  		neg := v_2
  3575  		if neg.Op != OpRISCV64FNEGS {
  3576  			break
  3577  		}
  3578  		z := neg.Args[0]
  3579  		if !(neg.Uses == 1) {
  3580  			break
  3581  		}
  3582  		v.reset(OpRISCV64FMADDS)
  3583  		v.AddArg3(x, y, z)
  3584  		return true
  3585  	}
  3586  	return false
  3587  }
  3588  func rewriteValueRISCV64_OpRISCV64FNMADDD(v *Value) bool {
  3589  	v_2 := v.Args[2]
  3590  	v_1 := v.Args[1]
  3591  	v_0 := v.Args[0]
  3592  	// match: (FNMADDD neg:(FNEGD x) y z)
  3593  	// cond: neg.Uses == 1
  3594  	// result: (FMSUBD x y z)
  3595  	for {
  3596  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3597  			neg := v_0
  3598  			if neg.Op != OpRISCV64FNEGD {
  3599  				continue
  3600  			}
  3601  			x := neg.Args[0]
  3602  			y := v_1
  3603  			z := v_2
  3604  			if !(neg.Uses == 1) {
  3605  				continue
  3606  			}
  3607  			v.reset(OpRISCV64FMSUBD)
  3608  			v.AddArg3(x, y, z)
  3609  			return true
  3610  		}
  3611  		break
  3612  	}
  3613  	// match: (FNMADDD x y neg:(FNEGD z))
  3614  	// cond: neg.Uses == 1
  3615  	// result: (FNMSUBD x y z)
  3616  	for {
  3617  		x := v_0
  3618  		y := v_1
  3619  		neg := v_2
  3620  		if neg.Op != OpRISCV64FNEGD {
  3621  			break
  3622  		}
  3623  		z := neg.Args[0]
  3624  		if !(neg.Uses == 1) {
  3625  			break
  3626  		}
  3627  		v.reset(OpRISCV64FNMSUBD)
  3628  		v.AddArg3(x, y, z)
  3629  		return true
  3630  	}
  3631  	return false
  3632  }
  3633  func rewriteValueRISCV64_OpRISCV64FNMADDS(v *Value) bool {
  3634  	v_2 := v.Args[2]
  3635  	v_1 := v.Args[1]
  3636  	v_0 := v.Args[0]
  3637  	// match: (FNMADDS neg:(FNEGS x) y z)
  3638  	// cond: neg.Uses == 1
  3639  	// result: (FMSUBS x y z)
  3640  	for {
  3641  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3642  			neg := v_0
  3643  			if neg.Op != OpRISCV64FNEGS {
  3644  				continue
  3645  			}
  3646  			x := neg.Args[0]
  3647  			y := v_1
  3648  			z := v_2
  3649  			if !(neg.Uses == 1) {
  3650  				continue
  3651  			}
  3652  			v.reset(OpRISCV64FMSUBS)
  3653  			v.AddArg3(x, y, z)
  3654  			return true
  3655  		}
  3656  		break
  3657  	}
  3658  	// match: (FNMADDS x y neg:(FNEGS z))
  3659  	// cond: neg.Uses == 1
  3660  	// result: (FNMSUBS x y z)
  3661  	for {
  3662  		x := v_0
  3663  		y := v_1
  3664  		neg := v_2
  3665  		if neg.Op != OpRISCV64FNEGS {
  3666  			break
  3667  		}
  3668  		z := neg.Args[0]
  3669  		if !(neg.Uses == 1) {
  3670  			break
  3671  		}
  3672  		v.reset(OpRISCV64FNMSUBS)
  3673  		v.AddArg3(x, y, z)
  3674  		return true
  3675  	}
  3676  	return false
  3677  }
  3678  func rewriteValueRISCV64_OpRISCV64FNMSUBD(v *Value) bool {
  3679  	v_2 := v.Args[2]
  3680  	v_1 := v.Args[1]
  3681  	v_0 := v.Args[0]
  3682  	// match: (FNMSUBD neg:(FNEGD x) y z)
  3683  	// cond: neg.Uses == 1
  3684  	// result: (FMADDD x y z)
  3685  	for {
  3686  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3687  			neg := v_0
  3688  			if neg.Op != OpRISCV64FNEGD {
  3689  				continue
  3690  			}
  3691  			x := neg.Args[0]
  3692  			y := v_1
  3693  			z := v_2
  3694  			if !(neg.Uses == 1) {
  3695  				continue
  3696  			}
  3697  			v.reset(OpRISCV64FMADDD)
  3698  			v.AddArg3(x, y, z)
  3699  			return true
  3700  		}
  3701  		break
  3702  	}
  3703  	// match: (FNMSUBD x y neg:(FNEGD z))
  3704  	// cond: neg.Uses == 1
  3705  	// result: (FNMADDD x y z)
  3706  	for {
  3707  		x := v_0
  3708  		y := v_1
  3709  		neg := v_2
  3710  		if neg.Op != OpRISCV64FNEGD {
  3711  			break
  3712  		}
  3713  		z := neg.Args[0]
  3714  		if !(neg.Uses == 1) {
  3715  			break
  3716  		}
  3717  		v.reset(OpRISCV64FNMADDD)
  3718  		v.AddArg3(x, y, z)
  3719  		return true
  3720  	}
  3721  	return false
  3722  }
  3723  func rewriteValueRISCV64_OpRISCV64FNMSUBS(v *Value) bool {
  3724  	v_2 := v.Args[2]
  3725  	v_1 := v.Args[1]
  3726  	v_0 := v.Args[0]
  3727  	// match: (FNMSUBS neg:(FNEGS x) y z)
  3728  	// cond: neg.Uses == 1
  3729  	// result: (FMADDS x y z)
  3730  	for {
  3731  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  3732  			neg := v_0
  3733  			if neg.Op != OpRISCV64FNEGS {
  3734  				continue
  3735  			}
  3736  			x := neg.Args[0]
  3737  			y := v_1
  3738  			z := v_2
  3739  			if !(neg.Uses == 1) {
  3740  				continue
  3741  			}
  3742  			v.reset(OpRISCV64FMADDS)
  3743  			v.AddArg3(x, y, z)
  3744  			return true
  3745  		}
  3746  		break
  3747  	}
  3748  	// match: (FNMSUBS x y neg:(FNEGS z))
  3749  	// cond: neg.Uses == 1
  3750  	// result: (FNMADDS x y z)
  3751  	for {
  3752  		x := v_0
  3753  		y := v_1
  3754  		neg := v_2
  3755  		if neg.Op != OpRISCV64FNEGS {
  3756  			break
  3757  		}
  3758  		z := neg.Args[0]
  3759  		if !(neg.Uses == 1) {
  3760  			break
  3761  		}
  3762  		v.reset(OpRISCV64FNMADDS)
  3763  		v.AddArg3(x, y, z)
  3764  		return true
  3765  	}
  3766  	return false
  3767  }
  3768  func rewriteValueRISCV64_OpRISCV64FSUBD(v *Value) bool {
  3769  	v_1 := v.Args[1]
  3770  	v_0 := v.Args[0]
  3771  	// match: (FSUBD a (FMULD x y))
  3772  	// cond: a.Block.Func.useFMA(v)
  3773  	// result: (FNMSUBD x y a)
  3774  	for {
  3775  		a := v_0
  3776  		if v_1.Op != OpRISCV64FMULD {
  3777  			break
  3778  		}
  3779  		y := v_1.Args[1]
  3780  		x := v_1.Args[0]
  3781  		if !(a.Block.Func.useFMA(v)) {
  3782  			break
  3783  		}
  3784  		v.reset(OpRISCV64FNMSUBD)
  3785  		v.AddArg3(x, y, a)
  3786  		return true
  3787  	}
  3788  	// match: (FSUBD (FMULD x y) a)
  3789  	// cond: a.Block.Func.useFMA(v)
  3790  	// result: (FMSUBD x y a)
  3791  	for {
  3792  		if v_0.Op != OpRISCV64FMULD {
  3793  			break
  3794  		}
  3795  		y := v_0.Args[1]
  3796  		x := v_0.Args[0]
  3797  		a := v_1
  3798  		if !(a.Block.Func.useFMA(v)) {
  3799  			break
  3800  		}
  3801  		v.reset(OpRISCV64FMSUBD)
  3802  		v.AddArg3(x, y, a)
  3803  		return true
  3804  	}
  3805  	return false
  3806  }
  3807  func rewriteValueRISCV64_OpRISCV64FSUBS(v *Value) bool {
  3808  	v_1 := v.Args[1]
  3809  	v_0 := v.Args[0]
  3810  	// match: (FSUBS a (FMULS x y))
  3811  	// cond: a.Block.Func.useFMA(v)
  3812  	// result: (FNMSUBS x y a)
  3813  	for {
  3814  		a := v_0
  3815  		if v_1.Op != OpRISCV64FMULS {
  3816  			break
  3817  		}
  3818  		y := v_1.Args[1]
  3819  		x := v_1.Args[0]
  3820  		if !(a.Block.Func.useFMA(v)) {
  3821  			break
  3822  		}
  3823  		v.reset(OpRISCV64FNMSUBS)
  3824  		v.AddArg3(x, y, a)
  3825  		return true
  3826  	}
  3827  	// match: (FSUBS (FMULS x y) a)
  3828  	// cond: a.Block.Func.useFMA(v)
  3829  	// result: (FMSUBS x y a)
  3830  	for {
  3831  		if v_0.Op != OpRISCV64FMULS {
  3832  			break
  3833  		}
  3834  		y := v_0.Args[1]
  3835  		x := v_0.Args[0]
  3836  		a := v_1
  3837  		if !(a.Block.Func.useFMA(v)) {
  3838  			break
  3839  		}
  3840  		v.reset(OpRISCV64FMSUBS)
  3841  		v.AddArg3(x, y, a)
  3842  		return true
  3843  	}
  3844  	return false
  3845  }
  3846  func rewriteValueRISCV64_OpRISCV64MOVBUload(v *Value) bool {
  3847  	v_1 := v.Args[1]
  3848  	v_0 := v.Args[0]
  3849  	// match: (MOVBUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  3850  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  3851  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  3852  	for {
  3853  		off1 := auxIntToInt32(v.AuxInt)
  3854  		sym1 := auxToSym(v.Aux)
  3855  		if v_0.Op != OpRISCV64MOVaddr {
  3856  			break
  3857  		}
  3858  		off2 := auxIntToInt32(v_0.AuxInt)
  3859  		sym2 := auxToSym(v_0.Aux)
  3860  		base := v_0.Args[0]
  3861  		mem := v_1
  3862  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  3863  			break
  3864  		}
  3865  		v.reset(OpRISCV64MOVBUload)
  3866  		v.AuxInt = int32ToAuxInt(off1 + off2)
  3867  		v.Aux = symToAux(mergeSym(sym1, sym2))
  3868  		v.AddArg2(base, mem)
  3869  		return true
  3870  	}
  3871  	// match: (MOVBUload [off1] {sym} (ADDI [off2] base) mem)
  3872  	// cond: is32Bit(int64(off1)+off2)
  3873  	// result: (MOVBUload [off1+int32(off2)] {sym} base mem)
  3874  	for {
  3875  		off1 := auxIntToInt32(v.AuxInt)
  3876  		sym := auxToSym(v.Aux)
  3877  		if v_0.Op != OpRISCV64ADDI {
  3878  			break
  3879  		}
  3880  		off2 := auxIntToInt64(v_0.AuxInt)
  3881  		base := v_0.Args[0]
  3882  		mem := v_1
  3883  		if !(is32Bit(int64(off1) + off2)) {
  3884  			break
  3885  		}
  3886  		v.reset(OpRISCV64MOVBUload)
  3887  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  3888  		v.Aux = symToAux(sym)
  3889  		v.AddArg2(base, mem)
  3890  		return true
  3891  	}
  3892  	return false
  3893  }
  3894  func rewriteValueRISCV64_OpRISCV64MOVBUreg(v *Value) bool {
  3895  	v_0 := v.Args[0]
  3896  	b := v.Block
  3897  	// match: (MOVBUreg x:(FLES _ _))
  3898  	// result: x
  3899  	for {
  3900  		x := v_0
  3901  		if x.Op != OpRISCV64FLES {
  3902  			break
  3903  		}
  3904  		v.copyOf(x)
  3905  		return true
  3906  	}
  3907  	// match: (MOVBUreg x:(FLTS _ _))
  3908  	// result: x
  3909  	for {
  3910  		x := v_0
  3911  		if x.Op != OpRISCV64FLTS {
  3912  			break
  3913  		}
  3914  		v.copyOf(x)
  3915  		return true
  3916  	}
  3917  	// match: (MOVBUreg x:(FEQS _ _))
  3918  	// result: x
  3919  	for {
  3920  		x := v_0
  3921  		if x.Op != OpRISCV64FEQS {
  3922  			break
  3923  		}
  3924  		v.copyOf(x)
  3925  		return true
  3926  	}
  3927  	// match: (MOVBUreg x:(FNES _ _))
  3928  	// result: x
  3929  	for {
  3930  		x := v_0
  3931  		if x.Op != OpRISCV64FNES {
  3932  			break
  3933  		}
  3934  		v.copyOf(x)
  3935  		return true
  3936  	}
  3937  	// match: (MOVBUreg x:(FLED _ _))
  3938  	// result: x
  3939  	for {
  3940  		x := v_0
  3941  		if x.Op != OpRISCV64FLED {
  3942  			break
  3943  		}
  3944  		v.copyOf(x)
  3945  		return true
  3946  	}
  3947  	// match: (MOVBUreg x:(FLTD _ _))
  3948  	// result: x
  3949  	for {
  3950  		x := v_0
  3951  		if x.Op != OpRISCV64FLTD {
  3952  			break
  3953  		}
  3954  		v.copyOf(x)
  3955  		return true
  3956  	}
  3957  	// match: (MOVBUreg x:(FEQD _ _))
  3958  	// result: x
  3959  	for {
  3960  		x := v_0
  3961  		if x.Op != OpRISCV64FEQD {
  3962  			break
  3963  		}
  3964  		v.copyOf(x)
  3965  		return true
  3966  	}
  3967  	// match: (MOVBUreg x:(FNED _ _))
  3968  	// result: x
  3969  	for {
  3970  		x := v_0
  3971  		if x.Op != OpRISCV64FNED {
  3972  			break
  3973  		}
  3974  		v.copyOf(x)
  3975  		return true
  3976  	}
  3977  	// match: (MOVBUreg x:(SEQZ _))
  3978  	// result: x
  3979  	for {
  3980  		x := v_0
  3981  		if x.Op != OpRISCV64SEQZ {
  3982  			break
  3983  		}
  3984  		v.copyOf(x)
  3985  		return true
  3986  	}
  3987  	// match: (MOVBUreg x:(SNEZ _))
  3988  	// result: x
  3989  	for {
  3990  		x := v_0
  3991  		if x.Op != OpRISCV64SNEZ {
  3992  			break
  3993  		}
  3994  		v.copyOf(x)
  3995  		return true
  3996  	}
  3997  	// match: (MOVBUreg x:(SLT _ _))
  3998  	// result: x
  3999  	for {
  4000  		x := v_0
  4001  		if x.Op != OpRISCV64SLT {
  4002  			break
  4003  		}
  4004  		v.copyOf(x)
  4005  		return true
  4006  	}
  4007  	// match: (MOVBUreg x:(SLTU _ _))
  4008  	// result: x
  4009  	for {
  4010  		x := v_0
  4011  		if x.Op != OpRISCV64SLTU {
  4012  			break
  4013  		}
  4014  		v.copyOf(x)
  4015  		return true
  4016  	}
  4017  	// match: (MOVBUreg x:(ANDI [c] y))
  4018  	// cond: c >= 0 && int64(uint8(c)) == c
  4019  	// result: x
  4020  	for {
  4021  		x := v_0
  4022  		if x.Op != OpRISCV64ANDI {
  4023  			break
  4024  		}
  4025  		c := auxIntToInt64(x.AuxInt)
  4026  		if !(c >= 0 && int64(uint8(c)) == c) {
  4027  			break
  4028  		}
  4029  		v.copyOf(x)
  4030  		return true
  4031  	}
  4032  	// match: (MOVBUreg (ANDI [c] x))
  4033  	// cond: c < 0
  4034  	// result: (ANDI [int64(uint8(c))] x)
  4035  	for {
  4036  		if v_0.Op != OpRISCV64ANDI {
  4037  			break
  4038  		}
  4039  		c := auxIntToInt64(v_0.AuxInt)
  4040  		x := v_0.Args[0]
  4041  		if !(c < 0) {
  4042  			break
  4043  		}
  4044  		v.reset(OpRISCV64ANDI)
  4045  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4046  		v.AddArg(x)
  4047  		return true
  4048  	}
  4049  	// match: (MOVBUreg (MOVDconst [c]))
  4050  	// result: (MOVDconst [int64(uint8(c))])
  4051  	for {
  4052  		if v_0.Op != OpRISCV64MOVDconst {
  4053  			break
  4054  		}
  4055  		c := auxIntToInt64(v_0.AuxInt)
  4056  		v.reset(OpRISCV64MOVDconst)
  4057  		v.AuxInt = int64ToAuxInt(int64(uint8(c)))
  4058  		return true
  4059  	}
  4060  	// match: (MOVBUreg x:(MOVBUload _ _))
  4061  	// result: (MOVDreg x)
  4062  	for {
  4063  		x := v_0
  4064  		if x.Op != OpRISCV64MOVBUload {
  4065  			break
  4066  		}
  4067  		v.reset(OpRISCV64MOVDreg)
  4068  		v.AddArg(x)
  4069  		return true
  4070  	}
  4071  	// match: (MOVBUreg x:(Select0 (LoweredAtomicLoad8 _ _)))
  4072  	// result: (MOVDreg x)
  4073  	for {
  4074  		x := v_0
  4075  		if x.Op != OpSelect0 {
  4076  			break
  4077  		}
  4078  		x_0 := x.Args[0]
  4079  		if x_0.Op != OpRISCV64LoweredAtomicLoad8 {
  4080  			break
  4081  		}
  4082  		v.reset(OpRISCV64MOVDreg)
  4083  		v.AddArg(x)
  4084  		return true
  4085  	}
  4086  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas32 _ _ _ _)))
  4087  	// result: (MOVDreg x)
  4088  	for {
  4089  		x := v_0
  4090  		if x.Op != OpSelect0 {
  4091  			break
  4092  		}
  4093  		x_0 := x.Args[0]
  4094  		if x_0.Op != OpRISCV64LoweredAtomicCas32 {
  4095  			break
  4096  		}
  4097  		v.reset(OpRISCV64MOVDreg)
  4098  		v.AddArg(x)
  4099  		return true
  4100  	}
  4101  	// match: (MOVBUreg x:(Select0 (LoweredAtomicCas64 _ _ _ _)))
  4102  	// result: (MOVDreg x)
  4103  	for {
  4104  		x := v_0
  4105  		if x.Op != OpSelect0 {
  4106  			break
  4107  		}
  4108  		x_0 := x.Args[0]
  4109  		if x_0.Op != OpRISCV64LoweredAtomicCas64 {
  4110  			break
  4111  		}
  4112  		v.reset(OpRISCV64MOVDreg)
  4113  		v.AddArg(x)
  4114  		return true
  4115  	}
  4116  	// match: (MOVBUreg x:(MOVBUreg _))
  4117  	// result: (MOVDreg x)
  4118  	for {
  4119  		x := v_0
  4120  		if x.Op != OpRISCV64MOVBUreg {
  4121  			break
  4122  		}
  4123  		v.reset(OpRISCV64MOVDreg)
  4124  		v.AddArg(x)
  4125  		return true
  4126  	}
  4127  	// match: (MOVBUreg <t> x:(MOVBload [off] {sym} ptr mem))
  4128  	// cond: x.Uses == 1 && clobber(x)
  4129  	// result: @x.Block (MOVBUload <t> [off] {sym} ptr mem)
  4130  	for {
  4131  		t := v.Type
  4132  		x := v_0
  4133  		if x.Op != OpRISCV64MOVBload {
  4134  			break
  4135  		}
  4136  		off := auxIntToInt32(x.AuxInt)
  4137  		sym := auxToSym(x.Aux)
  4138  		mem := x.Args[1]
  4139  		ptr := x.Args[0]
  4140  		if !(x.Uses == 1 && clobber(x)) {
  4141  			break
  4142  		}
  4143  		b = x.Block
  4144  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBUload, t)
  4145  		v.copyOf(v0)
  4146  		v0.AuxInt = int32ToAuxInt(off)
  4147  		v0.Aux = symToAux(sym)
  4148  		v0.AddArg2(ptr, mem)
  4149  		return true
  4150  	}
  4151  	return false
  4152  }
  4153  func rewriteValueRISCV64_OpRISCV64MOVBload(v *Value) bool {
  4154  	v_1 := v.Args[1]
  4155  	v_0 := v.Args[0]
  4156  	// match: (MOVBload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4157  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4158  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4159  	for {
  4160  		off1 := auxIntToInt32(v.AuxInt)
  4161  		sym1 := auxToSym(v.Aux)
  4162  		if v_0.Op != OpRISCV64MOVaddr {
  4163  			break
  4164  		}
  4165  		off2 := auxIntToInt32(v_0.AuxInt)
  4166  		sym2 := auxToSym(v_0.Aux)
  4167  		base := v_0.Args[0]
  4168  		mem := v_1
  4169  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4170  			break
  4171  		}
  4172  		v.reset(OpRISCV64MOVBload)
  4173  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4174  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4175  		v.AddArg2(base, mem)
  4176  		return true
  4177  	}
  4178  	// match: (MOVBload [off1] {sym} (ADDI [off2] base) mem)
  4179  	// cond: is32Bit(int64(off1)+off2)
  4180  	// result: (MOVBload [off1+int32(off2)] {sym} base mem)
  4181  	for {
  4182  		off1 := auxIntToInt32(v.AuxInt)
  4183  		sym := auxToSym(v.Aux)
  4184  		if v_0.Op != OpRISCV64ADDI {
  4185  			break
  4186  		}
  4187  		off2 := auxIntToInt64(v_0.AuxInt)
  4188  		base := v_0.Args[0]
  4189  		mem := v_1
  4190  		if !(is32Bit(int64(off1) + off2)) {
  4191  			break
  4192  		}
  4193  		v.reset(OpRISCV64MOVBload)
  4194  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4195  		v.Aux = symToAux(sym)
  4196  		v.AddArg2(base, mem)
  4197  		return true
  4198  	}
  4199  	return false
  4200  }
  4201  func rewriteValueRISCV64_OpRISCV64MOVBreg(v *Value) bool {
  4202  	v_0 := v.Args[0]
  4203  	b := v.Block
  4204  	// match: (MOVBreg x:(ANDI [c] y))
  4205  	// cond: c >= 0 && int64(int8(c)) == c
  4206  	// result: x
  4207  	for {
  4208  		x := v_0
  4209  		if x.Op != OpRISCV64ANDI {
  4210  			break
  4211  		}
  4212  		c := auxIntToInt64(x.AuxInt)
  4213  		if !(c >= 0 && int64(int8(c)) == c) {
  4214  			break
  4215  		}
  4216  		v.copyOf(x)
  4217  		return true
  4218  	}
  4219  	// match: (MOVBreg (MOVDconst [c]))
  4220  	// result: (MOVDconst [int64(int8(c))])
  4221  	for {
  4222  		if v_0.Op != OpRISCV64MOVDconst {
  4223  			break
  4224  		}
  4225  		c := auxIntToInt64(v_0.AuxInt)
  4226  		v.reset(OpRISCV64MOVDconst)
  4227  		v.AuxInt = int64ToAuxInt(int64(int8(c)))
  4228  		return true
  4229  	}
  4230  	// match: (MOVBreg x:(MOVBload _ _))
  4231  	// result: (MOVDreg x)
  4232  	for {
  4233  		x := v_0
  4234  		if x.Op != OpRISCV64MOVBload {
  4235  			break
  4236  		}
  4237  		v.reset(OpRISCV64MOVDreg)
  4238  		v.AddArg(x)
  4239  		return true
  4240  	}
  4241  	// match: (MOVBreg x:(MOVBreg _))
  4242  	// result: (MOVDreg x)
  4243  	for {
  4244  		x := v_0
  4245  		if x.Op != OpRISCV64MOVBreg {
  4246  			break
  4247  		}
  4248  		v.reset(OpRISCV64MOVDreg)
  4249  		v.AddArg(x)
  4250  		return true
  4251  	}
  4252  	// match: (MOVBreg <t> x:(MOVBUload [off] {sym} ptr mem))
  4253  	// cond: x.Uses == 1 && clobber(x)
  4254  	// result: @x.Block (MOVBload <t> [off] {sym} ptr mem)
  4255  	for {
  4256  		t := v.Type
  4257  		x := v_0
  4258  		if x.Op != OpRISCV64MOVBUload {
  4259  			break
  4260  		}
  4261  		off := auxIntToInt32(x.AuxInt)
  4262  		sym := auxToSym(x.Aux)
  4263  		mem := x.Args[1]
  4264  		ptr := x.Args[0]
  4265  		if !(x.Uses == 1 && clobber(x)) {
  4266  			break
  4267  		}
  4268  		b = x.Block
  4269  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVBload, t)
  4270  		v.copyOf(v0)
  4271  		v0.AuxInt = int32ToAuxInt(off)
  4272  		v0.Aux = symToAux(sym)
  4273  		v0.AddArg2(ptr, mem)
  4274  		return true
  4275  	}
  4276  	return false
  4277  }
  4278  func rewriteValueRISCV64_OpRISCV64MOVBstore(v *Value) bool {
  4279  	v_2 := v.Args[2]
  4280  	v_1 := v.Args[1]
  4281  	v_0 := v.Args[0]
  4282  	// match: (MOVBstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4283  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4284  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4285  	for {
  4286  		off1 := auxIntToInt32(v.AuxInt)
  4287  		sym1 := auxToSym(v.Aux)
  4288  		if v_0.Op != OpRISCV64MOVaddr {
  4289  			break
  4290  		}
  4291  		off2 := auxIntToInt32(v_0.AuxInt)
  4292  		sym2 := auxToSym(v_0.Aux)
  4293  		base := v_0.Args[0]
  4294  		val := v_1
  4295  		mem := v_2
  4296  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4297  			break
  4298  		}
  4299  		v.reset(OpRISCV64MOVBstore)
  4300  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4301  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4302  		v.AddArg3(base, val, mem)
  4303  		return true
  4304  	}
  4305  	// match: (MOVBstore [off1] {sym} (ADDI [off2] base) val mem)
  4306  	// cond: is32Bit(int64(off1)+off2)
  4307  	// result: (MOVBstore [off1+int32(off2)] {sym} base val mem)
  4308  	for {
  4309  		off1 := auxIntToInt32(v.AuxInt)
  4310  		sym := auxToSym(v.Aux)
  4311  		if v_0.Op != OpRISCV64ADDI {
  4312  			break
  4313  		}
  4314  		off2 := auxIntToInt64(v_0.AuxInt)
  4315  		base := v_0.Args[0]
  4316  		val := v_1
  4317  		mem := v_2
  4318  		if !(is32Bit(int64(off1) + off2)) {
  4319  			break
  4320  		}
  4321  		v.reset(OpRISCV64MOVBstore)
  4322  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4323  		v.Aux = symToAux(sym)
  4324  		v.AddArg3(base, val, mem)
  4325  		return true
  4326  	}
  4327  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  4328  	// result: (MOVBstorezero [off] {sym} ptr mem)
  4329  	for {
  4330  		off := auxIntToInt32(v.AuxInt)
  4331  		sym := auxToSym(v.Aux)
  4332  		ptr := v_0
  4333  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4334  			break
  4335  		}
  4336  		mem := v_2
  4337  		v.reset(OpRISCV64MOVBstorezero)
  4338  		v.AuxInt = int32ToAuxInt(off)
  4339  		v.Aux = symToAux(sym)
  4340  		v.AddArg2(ptr, mem)
  4341  		return true
  4342  	}
  4343  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  4344  	// result: (MOVBstore [off] {sym} ptr x mem)
  4345  	for {
  4346  		off := auxIntToInt32(v.AuxInt)
  4347  		sym := auxToSym(v.Aux)
  4348  		ptr := v_0
  4349  		if v_1.Op != OpRISCV64MOVBreg {
  4350  			break
  4351  		}
  4352  		x := v_1.Args[0]
  4353  		mem := v_2
  4354  		v.reset(OpRISCV64MOVBstore)
  4355  		v.AuxInt = int32ToAuxInt(off)
  4356  		v.Aux = symToAux(sym)
  4357  		v.AddArg3(ptr, x, mem)
  4358  		return true
  4359  	}
  4360  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  4361  	// result: (MOVBstore [off] {sym} ptr x mem)
  4362  	for {
  4363  		off := auxIntToInt32(v.AuxInt)
  4364  		sym := auxToSym(v.Aux)
  4365  		ptr := v_0
  4366  		if v_1.Op != OpRISCV64MOVHreg {
  4367  			break
  4368  		}
  4369  		x := v_1.Args[0]
  4370  		mem := v_2
  4371  		v.reset(OpRISCV64MOVBstore)
  4372  		v.AuxInt = int32ToAuxInt(off)
  4373  		v.Aux = symToAux(sym)
  4374  		v.AddArg3(ptr, x, mem)
  4375  		return true
  4376  	}
  4377  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  4378  	// result: (MOVBstore [off] {sym} ptr x mem)
  4379  	for {
  4380  		off := auxIntToInt32(v.AuxInt)
  4381  		sym := auxToSym(v.Aux)
  4382  		ptr := v_0
  4383  		if v_1.Op != OpRISCV64MOVWreg {
  4384  			break
  4385  		}
  4386  		x := v_1.Args[0]
  4387  		mem := v_2
  4388  		v.reset(OpRISCV64MOVBstore)
  4389  		v.AuxInt = int32ToAuxInt(off)
  4390  		v.Aux = symToAux(sym)
  4391  		v.AddArg3(ptr, x, mem)
  4392  		return true
  4393  	}
  4394  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  4395  	// result: (MOVBstore [off] {sym} ptr x mem)
  4396  	for {
  4397  		off := auxIntToInt32(v.AuxInt)
  4398  		sym := auxToSym(v.Aux)
  4399  		ptr := v_0
  4400  		if v_1.Op != OpRISCV64MOVBUreg {
  4401  			break
  4402  		}
  4403  		x := v_1.Args[0]
  4404  		mem := v_2
  4405  		v.reset(OpRISCV64MOVBstore)
  4406  		v.AuxInt = int32ToAuxInt(off)
  4407  		v.Aux = symToAux(sym)
  4408  		v.AddArg3(ptr, x, mem)
  4409  		return true
  4410  	}
  4411  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  4412  	// result: (MOVBstore [off] {sym} ptr x mem)
  4413  	for {
  4414  		off := auxIntToInt32(v.AuxInt)
  4415  		sym := auxToSym(v.Aux)
  4416  		ptr := v_0
  4417  		if v_1.Op != OpRISCV64MOVHUreg {
  4418  			break
  4419  		}
  4420  		x := v_1.Args[0]
  4421  		mem := v_2
  4422  		v.reset(OpRISCV64MOVBstore)
  4423  		v.AuxInt = int32ToAuxInt(off)
  4424  		v.Aux = symToAux(sym)
  4425  		v.AddArg3(ptr, x, mem)
  4426  		return true
  4427  	}
  4428  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  4429  	// result: (MOVBstore [off] {sym} ptr x mem)
  4430  	for {
  4431  		off := auxIntToInt32(v.AuxInt)
  4432  		sym := auxToSym(v.Aux)
  4433  		ptr := v_0
  4434  		if v_1.Op != OpRISCV64MOVWUreg {
  4435  			break
  4436  		}
  4437  		x := v_1.Args[0]
  4438  		mem := v_2
  4439  		v.reset(OpRISCV64MOVBstore)
  4440  		v.AuxInt = int32ToAuxInt(off)
  4441  		v.Aux = symToAux(sym)
  4442  		v.AddArg3(ptr, x, mem)
  4443  		return true
  4444  	}
  4445  	return false
  4446  }
  4447  func rewriteValueRISCV64_OpRISCV64MOVBstorezero(v *Value) bool {
  4448  	v_1 := v.Args[1]
  4449  	v_0 := v.Args[0]
  4450  	// match: (MOVBstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4451  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4452  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4453  	for {
  4454  		off1 := auxIntToInt32(v.AuxInt)
  4455  		sym1 := auxToSym(v.Aux)
  4456  		if v_0.Op != OpRISCV64MOVaddr {
  4457  			break
  4458  		}
  4459  		off2 := auxIntToInt32(v_0.AuxInt)
  4460  		sym2 := auxToSym(v_0.Aux)
  4461  		ptr := v_0.Args[0]
  4462  		mem := v_1
  4463  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4464  			break
  4465  		}
  4466  		v.reset(OpRISCV64MOVBstorezero)
  4467  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4468  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4469  		v.AddArg2(ptr, mem)
  4470  		return true
  4471  	}
  4472  	// match: (MOVBstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4473  	// cond: is32Bit(int64(off1)+off2)
  4474  	// result: (MOVBstorezero [off1+int32(off2)] {sym} ptr mem)
  4475  	for {
  4476  		off1 := auxIntToInt32(v.AuxInt)
  4477  		sym := auxToSym(v.Aux)
  4478  		if v_0.Op != OpRISCV64ADDI {
  4479  			break
  4480  		}
  4481  		off2 := auxIntToInt64(v_0.AuxInt)
  4482  		ptr := v_0.Args[0]
  4483  		mem := v_1
  4484  		if !(is32Bit(int64(off1) + off2)) {
  4485  			break
  4486  		}
  4487  		v.reset(OpRISCV64MOVBstorezero)
  4488  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4489  		v.Aux = symToAux(sym)
  4490  		v.AddArg2(ptr, mem)
  4491  		return true
  4492  	}
  4493  	return false
  4494  }
  4495  func rewriteValueRISCV64_OpRISCV64MOVDload(v *Value) bool {
  4496  	v_1 := v.Args[1]
  4497  	v_0 := v.Args[0]
  4498  	// match: (MOVDload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4499  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4500  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4501  	for {
  4502  		off1 := auxIntToInt32(v.AuxInt)
  4503  		sym1 := auxToSym(v.Aux)
  4504  		if v_0.Op != OpRISCV64MOVaddr {
  4505  			break
  4506  		}
  4507  		off2 := auxIntToInt32(v_0.AuxInt)
  4508  		sym2 := auxToSym(v_0.Aux)
  4509  		base := v_0.Args[0]
  4510  		mem := v_1
  4511  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4512  			break
  4513  		}
  4514  		v.reset(OpRISCV64MOVDload)
  4515  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4516  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4517  		v.AddArg2(base, mem)
  4518  		return true
  4519  	}
  4520  	// match: (MOVDload [off1] {sym} (ADDI [off2] base) mem)
  4521  	// cond: is32Bit(int64(off1)+off2)
  4522  	// result: (MOVDload [off1+int32(off2)] {sym} base mem)
  4523  	for {
  4524  		off1 := auxIntToInt32(v.AuxInt)
  4525  		sym := auxToSym(v.Aux)
  4526  		if v_0.Op != OpRISCV64ADDI {
  4527  			break
  4528  		}
  4529  		off2 := auxIntToInt64(v_0.AuxInt)
  4530  		base := v_0.Args[0]
  4531  		mem := v_1
  4532  		if !(is32Bit(int64(off1) + off2)) {
  4533  			break
  4534  		}
  4535  		v.reset(OpRISCV64MOVDload)
  4536  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4537  		v.Aux = symToAux(sym)
  4538  		v.AddArg2(base, mem)
  4539  		return true
  4540  	}
  4541  	return false
  4542  }
  4543  func rewriteValueRISCV64_OpRISCV64MOVDnop(v *Value) bool {
  4544  	v_0 := v.Args[0]
  4545  	// match: (MOVDnop (MOVDconst [c]))
  4546  	// result: (MOVDconst [c])
  4547  	for {
  4548  		if v_0.Op != OpRISCV64MOVDconst {
  4549  			break
  4550  		}
  4551  		c := auxIntToInt64(v_0.AuxInt)
  4552  		v.reset(OpRISCV64MOVDconst)
  4553  		v.AuxInt = int64ToAuxInt(c)
  4554  		return true
  4555  	}
  4556  	return false
  4557  }
  4558  func rewriteValueRISCV64_OpRISCV64MOVDreg(v *Value) bool {
  4559  	v_0 := v.Args[0]
  4560  	// match: (MOVDreg x)
  4561  	// cond: x.Uses == 1
  4562  	// result: (MOVDnop x)
  4563  	for {
  4564  		x := v_0
  4565  		if !(x.Uses == 1) {
  4566  			break
  4567  		}
  4568  		v.reset(OpRISCV64MOVDnop)
  4569  		v.AddArg(x)
  4570  		return true
  4571  	}
  4572  	return false
  4573  }
  4574  func rewriteValueRISCV64_OpRISCV64MOVDstore(v *Value) bool {
  4575  	v_2 := v.Args[2]
  4576  	v_1 := v.Args[1]
  4577  	v_0 := v.Args[0]
  4578  	// match: (MOVDstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  4579  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4580  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  4581  	for {
  4582  		off1 := auxIntToInt32(v.AuxInt)
  4583  		sym1 := auxToSym(v.Aux)
  4584  		if v_0.Op != OpRISCV64MOVaddr {
  4585  			break
  4586  		}
  4587  		off2 := auxIntToInt32(v_0.AuxInt)
  4588  		sym2 := auxToSym(v_0.Aux)
  4589  		base := v_0.Args[0]
  4590  		val := v_1
  4591  		mem := v_2
  4592  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4593  			break
  4594  		}
  4595  		v.reset(OpRISCV64MOVDstore)
  4596  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4597  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4598  		v.AddArg3(base, val, mem)
  4599  		return true
  4600  	}
  4601  	// match: (MOVDstore [off1] {sym} (ADDI [off2] base) val mem)
  4602  	// cond: is32Bit(int64(off1)+off2)
  4603  	// result: (MOVDstore [off1+int32(off2)] {sym} base val mem)
  4604  	for {
  4605  		off1 := auxIntToInt32(v.AuxInt)
  4606  		sym := auxToSym(v.Aux)
  4607  		if v_0.Op != OpRISCV64ADDI {
  4608  			break
  4609  		}
  4610  		off2 := auxIntToInt64(v_0.AuxInt)
  4611  		base := v_0.Args[0]
  4612  		val := v_1
  4613  		mem := v_2
  4614  		if !(is32Bit(int64(off1) + off2)) {
  4615  			break
  4616  		}
  4617  		v.reset(OpRISCV64MOVDstore)
  4618  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4619  		v.Aux = symToAux(sym)
  4620  		v.AddArg3(base, val, mem)
  4621  		return true
  4622  	}
  4623  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
  4624  	// result: (MOVDstorezero [off] {sym} ptr mem)
  4625  	for {
  4626  		off := auxIntToInt32(v.AuxInt)
  4627  		sym := auxToSym(v.Aux)
  4628  		ptr := v_0
  4629  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  4630  			break
  4631  		}
  4632  		mem := v_2
  4633  		v.reset(OpRISCV64MOVDstorezero)
  4634  		v.AuxInt = int32ToAuxInt(off)
  4635  		v.Aux = symToAux(sym)
  4636  		v.AddArg2(ptr, mem)
  4637  		return true
  4638  	}
  4639  	return false
  4640  }
  4641  func rewriteValueRISCV64_OpRISCV64MOVDstorezero(v *Value) bool {
  4642  	v_1 := v.Args[1]
  4643  	v_0 := v.Args[0]
  4644  	// match: (MOVDstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  4645  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  4646  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  4647  	for {
  4648  		off1 := auxIntToInt32(v.AuxInt)
  4649  		sym1 := auxToSym(v.Aux)
  4650  		if v_0.Op != OpRISCV64MOVaddr {
  4651  			break
  4652  		}
  4653  		off2 := auxIntToInt32(v_0.AuxInt)
  4654  		sym2 := auxToSym(v_0.Aux)
  4655  		ptr := v_0.Args[0]
  4656  		mem := v_1
  4657  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  4658  			break
  4659  		}
  4660  		v.reset(OpRISCV64MOVDstorezero)
  4661  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4662  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4663  		v.AddArg2(ptr, mem)
  4664  		return true
  4665  	}
  4666  	// match: (MOVDstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  4667  	// cond: is32Bit(int64(off1)+off2)
  4668  	// result: (MOVDstorezero [off1+int32(off2)] {sym} ptr mem)
  4669  	for {
  4670  		off1 := auxIntToInt32(v.AuxInt)
  4671  		sym := auxToSym(v.Aux)
  4672  		if v_0.Op != OpRISCV64ADDI {
  4673  			break
  4674  		}
  4675  		off2 := auxIntToInt64(v_0.AuxInt)
  4676  		ptr := v_0.Args[0]
  4677  		mem := v_1
  4678  		if !(is32Bit(int64(off1) + off2)) {
  4679  			break
  4680  		}
  4681  		v.reset(OpRISCV64MOVDstorezero)
  4682  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4683  		v.Aux = symToAux(sym)
  4684  		v.AddArg2(ptr, mem)
  4685  		return true
  4686  	}
  4687  	return false
  4688  }
  4689  func rewriteValueRISCV64_OpRISCV64MOVHUload(v *Value) bool {
  4690  	v_1 := v.Args[1]
  4691  	v_0 := v.Args[0]
  4692  	// match: (MOVHUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4693  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4694  	// result: (MOVHUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4695  	for {
  4696  		off1 := auxIntToInt32(v.AuxInt)
  4697  		sym1 := auxToSym(v.Aux)
  4698  		if v_0.Op != OpRISCV64MOVaddr {
  4699  			break
  4700  		}
  4701  		off2 := auxIntToInt32(v_0.AuxInt)
  4702  		sym2 := auxToSym(v_0.Aux)
  4703  		base := v_0.Args[0]
  4704  		mem := v_1
  4705  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4706  			break
  4707  		}
  4708  		v.reset(OpRISCV64MOVHUload)
  4709  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4710  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4711  		v.AddArg2(base, mem)
  4712  		return true
  4713  	}
  4714  	// match: (MOVHUload [off1] {sym} (ADDI [off2] base) mem)
  4715  	// cond: is32Bit(int64(off1)+off2)
  4716  	// result: (MOVHUload [off1+int32(off2)] {sym} base mem)
  4717  	for {
  4718  		off1 := auxIntToInt32(v.AuxInt)
  4719  		sym := auxToSym(v.Aux)
  4720  		if v_0.Op != OpRISCV64ADDI {
  4721  			break
  4722  		}
  4723  		off2 := auxIntToInt64(v_0.AuxInt)
  4724  		base := v_0.Args[0]
  4725  		mem := v_1
  4726  		if !(is32Bit(int64(off1) + off2)) {
  4727  			break
  4728  		}
  4729  		v.reset(OpRISCV64MOVHUload)
  4730  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4731  		v.Aux = symToAux(sym)
  4732  		v.AddArg2(base, mem)
  4733  		return true
  4734  	}
  4735  	return false
  4736  }
  4737  func rewriteValueRISCV64_OpRISCV64MOVHUreg(v *Value) bool {
  4738  	v_0 := v.Args[0]
  4739  	b := v.Block
  4740  	// match: (MOVHUreg x:(ANDI [c] y))
  4741  	// cond: c >= 0 && int64(uint16(c)) == c
  4742  	// result: x
  4743  	for {
  4744  		x := v_0
  4745  		if x.Op != OpRISCV64ANDI {
  4746  			break
  4747  		}
  4748  		c := auxIntToInt64(x.AuxInt)
  4749  		if !(c >= 0 && int64(uint16(c)) == c) {
  4750  			break
  4751  		}
  4752  		v.copyOf(x)
  4753  		return true
  4754  	}
  4755  	// match: (MOVHUreg (ANDI [c] x))
  4756  	// cond: c < 0
  4757  	// result: (ANDI [int64(uint16(c))] x)
  4758  	for {
  4759  		if v_0.Op != OpRISCV64ANDI {
  4760  			break
  4761  		}
  4762  		c := auxIntToInt64(v_0.AuxInt)
  4763  		x := v_0.Args[0]
  4764  		if !(c < 0) {
  4765  			break
  4766  		}
  4767  		v.reset(OpRISCV64ANDI)
  4768  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4769  		v.AddArg(x)
  4770  		return true
  4771  	}
  4772  	// match: (MOVHUreg (MOVDconst [c]))
  4773  	// result: (MOVDconst [int64(uint16(c))])
  4774  	for {
  4775  		if v_0.Op != OpRISCV64MOVDconst {
  4776  			break
  4777  		}
  4778  		c := auxIntToInt64(v_0.AuxInt)
  4779  		v.reset(OpRISCV64MOVDconst)
  4780  		v.AuxInt = int64ToAuxInt(int64(uint16(c)))
  4781  		return true
  4782  	}
  4783  	// match: (MOVHUreg x:(MOVBUload _ _))
  4784  	// result: (MOVDreg x)
  4785  	for {
  4786  		x := v_0
  4787  		if x.Op != OpRISCV64MOVBUload {
  4788  			break
  4789  		}
  4790  		v.reset(OpRISCV64MOVDreg)
  4791  		v.AddArg(x)
  4792  		return true
  4793  	}
  4794  	// match: (MOVHUreg x:(MOVHUload _ _))
  4795  	// result: (MOVDreg x)
  4796  	for {
  4797  		x := v_0
  4798  		if x.Op != OpRISCV64MOVHUload {
  4799  			break
  4800  		}
  4801  		v.reset(OpRISCV64MOVDreg)
  4802  		v.AddArg(x)
  4803  		return true
  4804  	}
  4805  	// match: (MOVHUreg x:(MOVBUreg _))
  4806  	// result: (MOVDreg x)
  4807  	for {
  4808  		x := v_0
  4809  		if x.Op != OpRISCV64MOVBUreg {
  4810  			break
  4811  		}
  4812  		v.reset(OpRISCV64MOVDreg)
  4813  		v.AddArg(x)
  4814  		return true
  4815  	}
  4816  	// match: (MOVHUreg x:(MOVHUreg _))
  4817  	// result: (MOVDreg x)
  4818  	for {
  4819  		x := v_0
  4820  		if x.Op != OpRISCV64MOVHUreg {
  4821  			break
  4822  		}
  4823  		v.reset(OpRISCV64MOVDreg)
  4824  		v.AddArg(x)
  4825  		return true
  4826  	}
  4827  	// match: (MOVHUreg <t> x:(MOVHload [off] {sym} ptr mem))
  4828  	// cond: x.Uses == 1 && clobber(x)
  4829  	// result: @x.Block (MOVHUload <t> [off] {sym} ptr mem)
  4830  	for {
  4831  		t := v.Type
  4832  		x := v_0
  4833  		if x.Op != OpRISCV64MOVHload {
  4834  			break
  4835  		}
  4836  		off := auxIntToInt32(x.AuxInt)
  4837  		sym := auxToSym(x.Aux)
  4838  		mem := x.Args[1]
  4839  		ptr := x.Args[0]
  4840  		if !(x.Uses == 1 && clobber(x)) {
  4841  			break
  4842  		}
  4843  		b = x.Block
  4844  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHUload, t)
  4845  		v.copyOf(v0)
  4846  		v0.AuxInt = int32ToAuxInt(off)
  4847  		v0.Aux = symToAux(sym)
  4848  		v0.AddArg2(ptr, mem)
  4849  		return true
  4850  	}
  4851  	return false
  4852  }
  4853  func rewriteValueRISCV64_OpRISCV64MOVHload(v *Value) bool {
  4854  	v_1 := v.Args[1]
  4855  	v_0 := v.Args[0]
  4856  	// match: (MOVHload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  4857  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  4858  	// result: (MOVHload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  4859  	for {
  4860  		off1 := auxIntToInt32(v.AuxInt)
  4861  		sym1 := auxToSym(v.Aux)
  4862  		if v_0.Op != OpRISCV64MOVaddr {
  4863  			break
  4864  		}
  4865  		off2 := auxIntToInt32(v_0.AuxInt)
  4866  		sym2 := auxToSym(v_0.Aux)
  4867  		base := v_0.Args[0]
  4868  		mem := v_1
  4869  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  4870  			break
  4871  		}
  4872  		v.reset(OpRISCV64MOVHload)
  4873  		v.AuxInt = int32ToAuxInt(off1 + off2)
  4874  		v.Aux = symToAux(mergeSym(sym1, sym2))
  4875  		v.AddArg2(base, mem)
  4876  		return true
  4877  	}
  4878  	// match: (MOVHload [off1] {sym} (ADDI [off2] base) mem)
  4879  	// cond: is32Bit(int64(off1)+off2)
  4880  	// result: (MOVHload [off1+int32(off2)] {sym} base mem)
  4881  	for {
  4882  		off1 := auxIntToInt32(v.AuxInt)
  4883  		sym := auxToSym(v.Aux)
  4884  		if v_0.Op != OpRISCV64ADDI {
  4885  			break
  4886  		}
  4887  		off2 := auxIntToInt64(v_0.AuxInt)
  4888  		base := v_0.Args[0]
  4889  		mem := v_1
  4890  		if !(is32Bit(int64(off1) + off2)) {
  4891  			break
  4892  		}
  4893  		v.reset(OpRISCV64MOVHload)
  4894  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  4895  		v.Aux = symToAux(sym)
  4896  		v.AddArg2(base, mem)
  4897  		return true
  4898  	}
  4899  	return false
  4900  }
  4901  func rewriteValueRISCV64_OpRISCV64MOVHreg(v *Value) bool {
  4902  	v_0 := v.Args[0]
  4903  	b := v.Block
  4904  	// match: (MOVHreg x:(ANDI [c] y))
  4905  	// cond: c >= 0 && int64(int16(c)) == c
  4906  	// result: x
  4907  	for {
  4908  		x := v_0
  4909  		if x.Op != OpRISCV64ANDI {
  4910  			break
  4911  		}
  4912  		c := auxIntToInt64(x.AuxInt)
  4913  		if !(c >= 0 && int64(int16(c)) == c) {
  4914  			break
  4915  		}
  4916  		v.copyOf(x)
  4917  		return true
  4918  	}
  4919  	// match: (MOVHreg (MOVDconst [c]))
  4920  	// result: (MOVDconst [int64(int16(c))])
  4921  	for {
  4922  		if v_0.Op != OpRISCV64MOVDconst {
  4923  			break
  4924  		}
  4925  		c := auxIntToInt64(v_0.AuxInt)
  4926  		v.reset(OpRISCV64MOVDconst)
  4927  		v.AuxInt = int64ToAuxInt(int64(int16(c)))
  4928  		return true
  4929  	}
  4930  	// match: (MOVHreg x:(MOVBload _ _))
  4931  	// result: (MOVDreg x)
  4932  	for {
  4933  		x := v_0
  4934  		if x.Op != OpRISCV64MOVBload {
  4935  			break
  4936  		}
  4937  		v.reset(OpRISCV64MOVDreg)
  4938  		v.AddArg(x)
  4939  		return true
  4940  	}
  4941  	// match: (MOVHreg x:(MOVBUload _ _))
  4942  	// result: (MOVDreg x)
  4943  	for {
  4944  		x := v_0
  4945  		if x.Op != OpRISCV64MOVBUload {
  4946  			break
  4947  		}
  4948  		v.reset(OpRISCV64MOVDreg)
  4949  		v.AddArg(x)
  4950  		return true
  4951  	}
  4952  	// match: (MOVHreg x:(MOVHload _ _))
  4953  	// result: (MOVDreg x)
  4954  	for {
  4955  		x := v_0
  4956  		if x.Op != OpRISCV64MOVHload {
  4957  			break
  4958  		}
  4959  		v.reset(OpRISCV64MOVDreg)
  4960  		v.AddArg(x)
  4961  		return true
  4962  	}
  4963  	// match: (MOVHreg x:(MOVBreg _))
  4964  	// result: (MOVDreg x)
  4965  	for {
  4966  		x := v_0
  4967  		if x.Op != OpRISCV64MOVBreg {
  4968  			break
  4969  		}
  4970  		v.reset(OpRISCV64MOVDreg)
  4971  		v.AddArg(x)
  4972  		return true
  4973  	}
  4974  	// match: (MOVHreg x:(MOVBUreg _))
  4975  	// result: (MOVDreg x)
  4976  	for {
  4977  		x := v_0
  4978  		if x.Op != OpRISCV64MOVBUreg {
  4979  			break
  4980  		}
  4981  		v.reset(OpRISCV64MOVDreg)
  4982  		v.AddArg(x)
  4983  		return true
  4984  	}
  4985  	// match: (MOVHreg x:(MOVHreg _))
  4986  	// result: (MOVDreg x)
  4987  	for {
  4988  		x := v_0
  4989  		if x.Op != OpRISCV64MOVHreg {
  4990  			break
  4991  		}
  4992  		v.reset(OpRISCV64MOVDreg)
  4993  		v.AddArg(x)
  4994  		return true
  4995  	}
  4996  	// match: (MOVHreg <t> x:(MOVHUload [off] {sym} ptr mem))
  4997  	// cond: x.Uses == 1 && clobber(x)
  4998  	// result: @x.Block (MOVHload <t> [off] {sym} ptr mem)
  4999  	for {
  5000  		t := v.Type
  5001  		x := v_0
  5002  		if x.Op != OpRISCV64MOVHUload {
  5003  			break
  5004  		}
  5005  		off := auxIntToInt32(x.AuxInt)
  5006  		sym := auxToSym(x.Aux)
  5007  		mem := x.Args[1]
  5008  		ptr := x.Args[0]
  5009  		if !(x.Uses == 1 && clobber(x)) {
  5010  			break
  5011  		}
  5012  		b = x.Block
  5013  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVHload, t)
  5014  		v.copyOf(v0)
  5015  		v0.AuxInt = int32ToAuxInt(off)
  5016  		v0.Aux = symToAux(sym)
  5017  		v0.AddArg2(ptr, mem)
  5018  		return true
  5019  	}
  5020  	return false
  5021  }
  5022  func rewriteValueRISCV64_OpRISCV64MOVHstore(v *Value) bool {
  5023  	v_2 := v.Args[2]
  5024  	v_1 := v.Args[1]
  5025  	v_0 := v.Args[0]
  5026  	// match: (MOVHstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5027  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5028  	// result: (MOVHstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5029  	for {
  5030  		off1 := auxIntToInt32(v.AuxInt)
  5031  		sym1 := auxToSym(v.Aux)
  5032  		if v_0.Op != OpRISCV64MOVaddr {
  5033  			break
  5034  		}
  5035  		off2 := auxIntToInt32(v_0.AuxInt)
  5036  		sym2 := auxToSym(v_0.Aux)
  5037  		base := v_0.Args[0]
  5038  		val := v_1
  5039  		mem := v_2
  5040  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5041  			break
  5042  		}
  5043  		v.reset(OpRISCV64MOVHstore)
  5044  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5045  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5046  		v.AddArg3(base, val, mem)
  5047  		return true
  5048  	}
  5049  	// match: (MOVHstore [off1] {sym} (ADDI [off2] base) val mem)
  5050  	// cond: is32Bit(int64(off1)+off2)
  5051  	// result: (MOVHstore [off1+int32(off2)] {sym} base val mem)
  5052  	for {
  5053  		off1 := auxIntToInt32(v.AuxInt)
  5054  		sym := auxToSym(v.Aux)
  5055  		if v_0.Op != OpRISCV64ADDI {
  5056  			break
  5057  		}
  5058  		off2 := auxIntToInt64(v_0.AuxInt)
  5059  		base := v_0.Args[0]
  5060  		val := v_1
  5061  		mem := v_2
  5062  		if !(is32Bit(int64(off1) + off2)) {
  5063  			break
  5064  		}
  5065  		v.reset(OpRISCV64MOVHstore)
  5066  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5067  		v.Aux = symToAux(sym)
  5068  		v.AddArg3(base, val, mem)
  5069  		return true
  5070  	}
  5071  	// match: (MOVHstore [off] {sym} ptr (MOVDconst [0]) mem)
  5072  	// result: (MOVHstorezero [off] {sym} ptr mem)
  5073  	for {
  5074  		off := auxIntToInt32(v.AuxInt)
  5075  		sym := auxToSym(v.Aux)
  5076  		ptr := v_0
  5077  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5078  			break
  5079  		}
  5080  		mem := v_2
  5081  		v.reset(OpRISCV64MOVHstorezero)
  5082  		v.AuxInt = int32ToAuxInt(off)
  5083  		v.Aux = symToAux(sym)
  5084  		v.AddArg2(ptr, mem)
  5085  		return true
  5086  	}
  5087  	// match: (MOVHstore [off] {sym} ptr (MOVHreg x) mem)
  5088  	// result: (MOVHstore [off] {sym} ptr x mem)
  5089  	for {
  5090  		off := auxIntToInt32(v.AuxInt)
  5091  		sym := auxToSym(v.Aux)
  5092  		ptr := v_0
  5093  		if v_1.Op != OpRISCV64MOVHreg {
  5094  			break
  5095  		}
  5096  		x := v_1.Args[0]
  5097  		mem := v_2
  5098  		v.reset(OpRISCV64MOVHstore)
  5099  		v.AuxInt = int32ToAuxInt(off)
  5100  		v.Aux = symToAux(sym)
  5101  		v.AddArg3(ptr, x, mem)
  5102  		return true
  5103  	}
  5104  	// match: (MOVHstore [off] {sym} ptr (MOVWreg x) mem)
  5105  	// result: (MOVHstore [off] {sym} ptr x mem)
  5106  	for {
  5107  		off := auxIntToInt32(v.AuxInt)
  5108  		sym := auxToSym(v.Aux)
  5109  		ptr := v_0
  5110  		if v_1.Op != OpRISCV64MOVWreg {
  5111  			break
  5112  		}
  5113  		x := v_1.Args[0]
  5114  		mem := v_2
  5115  		v.reset(OpRISCV64MOVHstore)
  5116  		v.AuxInt = int32ToAuxInt(off)
  5117  		v.Aux = symToAux(sym)
  5118  		v.AddArg3(ptr, x, mem)
  5119  		return true
  5120  	}
  5121  	// match: (MOVHstore [off] {sym} ptr (MOVHUreg x) mem)
  5122  	// result: (MOVHstore [off] {sym} ptr x mem)
  5123  	for {
  5124  		off := auxIntToInt32(v.AuxInt)
  5125  		sym := auxToSym(v.Aux)
  5126  		ptr := v_0
  5127  		if v_1.Op != OpRISCV64MOVHUreg {
  5128  			break
  5129  		}
  5130  		x := v_1.Args[0]
  5131  		mem := v_2
  5132  		v.reset(OpRISCV64MOVHstore)
  5133  		v.AuxInt = int32ToAuxInt(off)
  5134  		v.Aux = symToAux(sym)
  5135  		v.AddArg3(ptr, x, mem)
  5136  		return true
  5137  	}
  5138  	// match: (MOVHstore [off] {sym} ptr (MOVWUreg x) mem)
  5139  	// result: (MOVHstore [off] {sym} ptr x mem)
  5140  	for {
  5141  		off := auxIntToInt32(v.AuxInt)
  5142  		sym := auxToSym(v.Aux)
  5143  		ptr := v_0
  5144  		if v_1.Op != OpRISCV64MOVWUreg {
  5145  			break
  5146  		}
  5147  		x := v_1.Args[0]
  5148  		mem := v_2
  5149  		v.reset(OpRISCV64MOVHstore)
  5150  		v.AuxInt = int32ToAuxInt(off)
  5151  		v.Aux = symToAux(sym)
  5152  		v.AddArg3(ptr, x, mem)
  5153  		return true
  5154  	}
  5155  	return false
  5156  }
  5157  func rewriteValueRISCV64_OpRISCV64MOVHstorezero(v *Value) bool {
  5158  	v_1 := v.Args[1]
  5159  	v_0 := v.Args[0]
  5160  	// match: (MOVHstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5161  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5162  	// result: (MOVHstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5163  	for {
  5164  		off1 := auxIntToInt32(v.AuxInt)
  5165  		sym1 := auxToSym(v.Aux)
  5166  		if v_0.Op != OpRISCV64MOVaddr {
  5167  			break
  5168  		}
  5169  		off2 := auxIntToInt32(v_0.AuxInt)
  5170  		sym2 := auxToSym(v_0.Aux)
  5171  		ptr := v_0.Args[0]
  5172  		mem := v_1
  5173  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5174  			break
  5175  		}
  5176  		v.reset(OpRISCV64MOVHstorezero)
  5177  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5178  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5179  		v.AddArg2(ptr, mem)
  5180  		return true
  5181  	}
  5182  	// match: (MOVHstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  5183  	// cond: is32Bit(int64(off1)+off2)
  5184  	// result: (MOVHstorezero [off1+int32(off2)] {sym} ptr mem)
  5185  	for {
  5186  		off1 := auxIntToInt32(v.AuxInt)
  5187  		sym := auxToSym(v.Aux)
  5188  		if v_0.Op != OpRISCV64ADDI {
  5189  			break
  5190  		}
  5191  		off2 := auxIntToInt64(v_0.AuxInt)
  5192  		ptr := v_0.Args[0]
  5193  		mem := v_1
  5194  		if !(is32Bit(int64(off1) + off2)) {
  5195  			break
  5196  		}
  5197  		v.reset(OpRISCV64MOVHstorezero)
  5198  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5199  		v.Aux = symToAux(sym)
  5200  		v.AddArg2(ptr, mem)
  5201  		return true
  5202  	}
  5203  	return false
  5204  }
  5205  func rewriteValueRISCV64_OpRISCV64MOVWUload(v *Value) bool {
  5206  	v_1 := v.Args[1]
  5207  	v_0 := v.Args[0]
  5208  	// match: (MOVWUload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5209  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5210  	// result: (MOVWUload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5211  	for {
  5212  		off1 := auxIntToInt32(v.AuxInt)
  5213  		sym1 := auxToSym(v.Aux)
  5214  		if v_0.Op != OpRISCV64MOVaddr {
  5215  			break
  5216  		}
  5217  		off2 := auxIntToInt32(v_0.AuxInt)
  5218  		sym2 := auxToSym(v_0.Aux)
  5219  		base := v_0.Args[0]
  5220  		mem := v_1
  5221  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5222  			break
  5223  		}
  5224  		v.reset(OpRISCV64MOVWUload)
  5225  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5226  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5227  		v.AddArg2(base, mem)
  5228  		return true
  5229  	}
  5230  	// match: (MOVWUload [off1] {sym} (ADDI [off2] base) mem)
  5231  	// cond: is32Bit(int64(off1)+off2)
  5232  	// result: (MOVWUload [off1+int32(off2)] {sym} base mem)
  5233  	for {
  5234  		off1 := auxIntToInt32(v.AuxInt)
  5235  		sym := auxToSym(v.Aux)
  5236  		if v_0.Op != OpRISCV64ADDI {
  5237  			break
  5238  		}
  5239  		off2 := auxIntToInt64(v_0.AuxInt)
  5240  		base := v_0.Args[0]
  5241  		mem := v_1
  5242  		if !(is32Bit(int64(off1) + off2)) {
  5243  			break
  5244  		}
  5245  		v.reset(OpRISCV64MOVWUload)
  5246  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5247  		v.Aux = symToAux(sym)
  5248  		v.AddArg2(base, mem)
  5249  		return true
  5250  	}
  5251  	return false
  5252  }
  5253  func rewriteValueRISCV64_OpRISCV64MOVWUreg(v *Value) bool {
  5254  	v_0 := v.Args[0]
  5255  	b := v.Block
  5256  	typ := &b.Func.Config.Types
  5257  	// match: (MOVWUreg x:(ANDI [c] y))
  5258  	// cond: c >= 0 && int64(uint32(c)) == c
  5259  	// result: x
  5260  	for {
  5261  		x := v_0
  5262  		if x.Op != OpRISCV64ANDI {
  5263  			break
  5264  		}
  5265  		c := auxIntToInt64(x.AuxInt)
  5266  		if !(c >= 0 && int64(uint32(c)) == c) {
  5267  			break
  5268  		}
  5269  		v.copyOf(x)
  5270  		return true
  5271  	}
  5272  	// match: (MOVWUreg (ANDI [c] x))
  5273  	// cond: c < 0
  5274  	// result: (AND (MOVDconst [int64(uint32(c))]) x)
  5275  	for {
  5276  		if v_0.Op != OpRISCV64ANDI {
  5277  			break
  5278  		}
  5279  		c := auxIntToInt64(v_0.AuxInt)
  5280  		x := v_0.Args[0]
  5281  		if !(c < 0) {
  5282  			break
  5283  		}
  5284  		v.reset(OpRISCV64AND)
  5285  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  5286  		v0.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5287  		v.AddArg2(v0, x)
  5288  		return true
  5289  	}
  5290  	// match: (MOVWUreg (MOVDconst [c]))
  5291  	// result: (MOVDconst [int64(uint32(c))])
  5292  	for {
  5293  		if v_0.Op != OpRISCV64MOVDconst {
  5294  			break
  5295  		}
  5296  		c := auxIntToInt64(v_0.AuxInt)
  5297  		v.reset(OpRISCV64MOVDconst)
  5298  		v.AuxInt = int64ToAuxInt(int64(uint32(c)))
  5299  		return true
  5300  	}
  5301  	// match: (MOVWUreg x:(MOVBUload _ _))
  5302  	// result: (MOVDreg x)
  5303  	for {
  5304  		x := v_0
  5305  		if x.Op != OpRISCV64MOVBUload {
  5306  			break
  5307  		}
  5308  		v.reset(OpRISCV64MOVDreg)
  5309  		v.AddArg(x)
  5310  		return true
  5311  	}
  5312  	// match: (MOVWUreg x:(MOVHUload _ _))
  5313  	// result: (MOVDreg x)
  5314  	for {
  5315  		x := v_0
  5316  		if x.Op != OpRISCV64MOVHUload {
  5317  			break
  5318  		}
  5319  		v.reset(OpRISCV64MOVDreg)
  5320  		v.AddArg(x)
  5321  		return true
  5322  	}
  5323  	// match: (MOVWUreg x:(MOVWUload _ _))
  5324  	// result: (MOVDreg x)
  5325  	for {
  5326  		x := v_0
  5327  		if x.Op != OpRISCV64MOVWUload {
  5328  			break
  5329  		}
  5330  		v.reset(OpRISCV64MOVDreg)
  5331  		v.AddArg(x)
  5332  		return true
  5333  	}
  5334  	// match: (MOVWUreg x:(MOVBUreg _))
  5335  	// result: (MOVDreg x)
  5336  	for {
  5337  		x := v_0
  5338  		if x.Op != OpRISCV64MOVBUreg {
  5339  			break
  5340  		}
  5341  		v.reset(OpRISCV64MOVDreg)
  5342  		v.AddArg(x)
  5343  		return true
  5344  	}
  5345  	// match: (MOVWUreg x:(MOVHUreg _))
  5346  	// result: (MOVDreg x)
  5347  	for {
  5348  		x := v_0
  5349  		if x.Op != OpRISCV64MOVHUreg {
  5350  			break
  5351  		}
  5352  		v.reset(OpRISCV64MOVDreg)
  5353  		v.AddArg(x)
  5354  		return true
  5355  	}
  5356  	// match: (MOVWUreg x:(MOVWUreg _))
  5357  	// result: (MOVDreg x)
  5358  	for {
  5359  		x := v_0
  5360  		if x.Op != OpRISCV64MOVWUreg {
  5361  			break
  5362  		}
  5363  		v.reset(OpRISCV64MOVDreg)
  5364  		v.AddArg(x)
  5365  		return true
  5366  	}
  5367  	// match: (MOVWUreg <t> x:(MOVWload [off] {sym} ptr mem))
  5368  	// cond: x.Uses == 1 && clobber(x)
  5369  	// result: @x.Block (MOVWUload <t> [off] {sym} ptr mem)
  5370  	for {
  5371  		t := v.Type
  5372  		x := v_0
  5373  		if x.Op != OpRISCV64MOVWload {
  5374  			break
  5375  		}
  5376  		off := auxIntToInt32(x.AuxInt)
  5377  		sym := auxToSym(x.Aux)
  5378  		mem := x.Args[1]
  5379  		ptr := x.Args[0]
  5380  		if !(x.Uses == 1 && clobber(x)) {
  5381  			break
  5382  		}
  5383  		b = x.Block
  5384  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWUload, t)
  5385  		v.copyOf(v0)
  5386  		v0.AuxInt = int32ToAuxInt(off)
  5387  		v0.Aux = symToAux(sym)
  5388  		v0.AddArg2(ptr, mem)
  5389  		return true
  5390  	}
  5391  	return false
  5392  }
  5393  func rewriteValueRISCV64_OpRISCV64MOVWload(v *Value) bool {
  5394  	v_1 := v.Args[1]
  5395  	v_0 := v.Args[0]
  5396  	// match: (MOVWload [off1] {sym1} (MOVaddr [off2] {sym2} base) mem)
  5397  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5398  	// result: (MOVWload [off1+off2] {mergeSym(sym1,sym2)} base mem)
  5399  	for {
  5400  		off1 := auxIntToInt32(v.AuxInt)
  5401  		sym1 := auxToSym(v.Aux)
  5402  		if v_0.Op != OpRISCV64MOVaddr {
  5403  			break
  5404  		}
  5405  		off2 := auxIntToInt32(v_0.AuxInt)
  5406  		sym2 := auxToSym(v_0.Aux)
  5407  		base := v_0.Args[0]
  5408  		mem := v_1
  5409  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5410  			break
  5411  		}
  5412  		v.reset(OpRISCV64MOVWload)
  5413  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5414  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5415  		v.AddArg2(base, mem)
  5416  		return true
  5417  	}
  5418  	// match: (MOVWload [off1] {sym} (ADDI [off2] base) mem)
  5419  	// cond: is32Bit(int64(off1)+off2)
  5420  	// result: (MOVWload [off1+int32(off2)] {sym} base mem)
  5421  	for {
  5422  		off1 := auxIntToInt32(v.AuxInt)
  5423  		sym := auxToSym(v.Aux)
  5424  		if v_0.Op != OpRISCV64ADDI {
  5425  			break
  5426  		}
  5427  		off2 := auxIntToInt64(v_0.AuxInt)
  5428  		base := v_0.Args[0]
  5429  		mem := v_1
  5430  		if !(is32Bit(int64(off1) + off2)) {
  5431  			break
  5432  		}
  5433  		v.reset(OpRISCV64MOVWload)
  5434  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5435  		v.Aux = symToAux(sym)
  5436  		v.AddArg2(base, mem)
  5437  		return true
  5438  	}
  5439  	return false
  5440  }
  5441  func rewriteValueRISCV64_OpRISCV64MOVWreg(v *Value) bool {
  5442  	v_0 := v.Args[0]
  5443  	b := v.Block
  5444  	// match: (MOVWreg x:(ANDI [c] y))
  5445  	// cond: c >= 0 && int64(int32(c)) == c
  5446  	// result: x
  5447  	for {
  5448  		x := v_0
  5449  		if x.Op != OpRISCV64ANDI {
  5450  			break
  5451  		}
  5452  		c := auxIntToInt64(x.AuxInt)
  5453  		if !(c >= 0 && int64(int32(c)) == c) {
  5454  			break
  5455  		}
  5456  		v.copyOf(x)
  5457  		return true
  5458  	}
  5459  	// match: (MOVWreg (MOVDconst [c]))
  5460  	// result: (MOVDconst [int64(int32(c))])
  5461  	for {
  5462  		if v_0.Op != OpRISCV64MOVDconst {
  5463  			break
  5464  		}
  5465  		c := auxIntToInt64(v_0.AuxInt)
  5466  		v.reset(OpRISCV64MOVDconst)
  5467  		v.AuxInt = int64ToAuxInt(int64(int32(c)))
  5468  		return true
  5469  	}
  5470  	// match: (MOVWreg x:(MOVBload _ _))
  5471  	// result: (MOVDreg x)
  5472  	for {
  5473  		x := v_0
  5474  		if x.Op != OpRISCV64MOVBload {
  5475  			break
  5476  		}
  5477  		v.reset(OpRISCV64MOVDreg)
  5478  		v.AddArg(x)
  5479  		return true
  5480  	}
  5481  	// match: (MOVWreg x:(MOVBUload _ _))
  5482  	// result: (MOVDreg x)
  5483  	for {
  5484  		x := v_0
  5485  		if x.Op != OpRISCV64MOVBUload {
  5486  			break
  5487  		}
  5488  		v.reset(OpRISCV64MOVDreg)
  5489  		v.AddArg(x)
  5490  		return true
  5491  	}
  5492  	// match: (MOVWreg x:(MOVHload _ _))
  5493  	// result: (MOVDreg x)
  5494  	for {
  5495  		x := v_0
  5496  		if x.Op != OpRISCV64MOVHload {
  5497  			break
  5498  		}
  5499  		v.reset(OpRISCV64MOVDreg)
  5500  		v.AddArg(x)
  5501  		return true
  5502  	}
  5503  	// match: (MOVWreg x:(MOVHUload _ _))
  5504  	// result: (MOVDreg x)
  5505  	for {
  5506  		x := v_0
  5507  		if x.Op != OpRISCV64MOVHUload {
  5508  			break
  5509  		}
  5510  		v.reset(OpRISCV64MOVDreg)
  5511  		v.AddArg(x)
  5512  		return true
  5513  	}
  5514  	// match: (MOVWreg x:(MOVWload _ _))
  5515  	// result: (MOVDreg x)
  5516  	for {
  5517  		x := v_0
  5518  		if x.Op != OpRISCV64MOVWload {
  5519  			break
  5520  		}
  5521  		v.reset(OpRISCV64MOVDreg)
  5522  		v.AddArg(x)
  5523  		return true
  5524  	}
  5525  	// match: (MOVWreg x:(ADDIW _))
  5526  	// result: (MOVDreg x)
  5527  	for {
  5528  		x := v_0
  5529  		if x.Op != OpRISCV64ADDIW {
  5530  			break
  5531  		}
  5532  		v.reset(OpRISCV64MOVDreg)
  5533  		v.AddArg(x)
  5534  		return true
  5535  	}
  5536  	// match: (MOVWreg x:(SUBW _ _))
  5537  	// result: (MOVDreg x)
  5538  	for {
  5539  		x := v_0
  5540  		if x.Op != OpRISCV64SUBW {
  5541  			break
  5542  		}
  5543  		v.reset(OpRISCV64MOVDreg)
  5544  		v.AddArg(x)
  5545  		return true
  5546  	}
  5547  	// match: (MOVWreg x:(NEGW _))
  5548  	// result: (MOVDreg x)
  5549  	for {
  5550  		x := v_0
  5551  		if x.Op != OpRISCV64NEGW {
  5552  			break
  5553  		}
  5554  		v.reset(OpRISCV64MOVDreg)
  5555  		v.AddArg(x)
  5556  		return true
  5557  	}
  5558  	// match: (MOVWreg x:(MULW _ _))
  5559  	// result: (MOVDreg x)
  5560  	for {
  5561  		x := v_0
  5562  		if x.Op != OpRISCV64MULW {
  5563  			break
  5564  		}
  5565  		v.reset(OpRISCV64MOVDreg)
  5566  		v.AddArg(x)
  5567  		return true
  5568  	}
  5569  	// match: (MOVWreg x:(DIVW _ _))
  5570  	// result: (MOVDreg x)
  5571  	for {
  5572  		x := v_0
  5573  		if x.Op != OpRISCV64DIVW {
  5574  			break
  5575  		}
  5576  		v.reset(OpRISCV64MOVDreg)
  5577  		v.AddArg(x)
  5578  		return true
  5579  	}
  5580  	// match: (MOVWreg x:(DIVUW _ _))
  5581  	// result: (MOVDreg x)
  5582  	for {
  5583  		x := v_0
  5584  		if x.Op != OpRISCV64DIVUW {
  5585  			break
  5586  		}
  5587  		v.reset(OpRISCV64MOVDreg)
  5588  		v.AddArg(x)
  5589  		return true
  5590  	}
  5591  	// match: (MOVWreg x:(REMW _ _))
  5592  	// result: (MOVDreg x)
  5593  	for {
  5594  		x := v_0
  5595  		if x.Op != OpRISCV64REMW {
  5596  			break
  5597  		}
  5598  		v.reset(OpRISCV64MOVDreg)
  5599  		v.AddArg(x)
  5600  		return true
  5601  	}
  5602  	// match: (MOVWreg x:(REMUW _ _))
  5603  	// result: (MOVDreg x)
  5604  	for {
  5605  		x := v_0
  5606  		if x.Op != OpRISCV64REMUW {
  5607  			break
  5608  		}
  5609  		v.reset(OpRISCV64MOVDreg)
  5610  		v.AddArg(x)
  5611  		return true
  5612  	}
  5613  	// match: (MOVWreg x:(MOVBreg _))
  5614  	// result: (MOVDreg x)
  5615  	for {
  5616  		x := v_0
  5617  		if x.Op != OpRISCV64MOVBreg {
  5618  			break
  5619  		}
  5620  		v.reset(OpRISCV64MOVDreg)
  5621  		v.AddArg(x)
  5622  		return true
  5623  	}
  5624  	// match: (MOVWreg x:(MOVBUreg _))
  5625  	// result: (MOVDreg x)
  5626  	for {
  5627  		x := v_0
  5628  		if x.Op != OpRISCV64MOVBUreg {
  5629  			break
  5630  		}
  5631  		v.reset(OpRISCV64MOVDreg)
  5632  		v.AddArg(x)
  5633  		return true
  5634  	}
  5635  	// match: (MOVWreg x:(MOVHreg _))
  5636  	// result: (MOVDreg x)
  5637  	for {
  5638  		x := v_0
  5639  		if x.Op != OpRISCV64MOVHreg {
  5640  			break
  5641  		}
  5642  		v.reset(OpRISCV64MOVDreg)
  5643  		v.AddArg(x)
  5644  		return true
  5645  	}
  5646  	// match: (MOVWreg x:(MOVWreg _))
  5647  	// result: (MOVDreg x)
  5648  	for {
  5649  		x := v_0
  5650  		if x.Op != OpRISCV64MOVWreg {
  5651  			break
  5652  		}
  5653  		v.reset(OpRISCV64MOVDreg)
  5654  		v.AddArg(x)
  5655  		return true
  5656  	}
  5657  	// match: (MOVWreg <t> x:(MOVWUload [off] {sym} ptr mem))
  5658  	// cond: x.Uses == 1 && clobber(x)
  5659  	// result: @x.Block (MOVWload <t> [off] {sym} ptr mem)
  5660  	for {
  5661  		t := v.Type
  5662  		x := v_0
  5663  		if x.Op != OpRISCV64MOVWUload {
  5664  			break
  5665  		}
  5666  		off := auxIntToInt32(x.AuxInt)
  5667  		sym := auxToSym(x.Aux)
  5668  		mem := x.Args[1]
  5669  		ptr := x.Args[0]
  5670  		if !(x.Uses == 1 && clobber(x)) {
  5671  			break
  5672  		}
  5673  		b = x.Block
  5674  		v0 := b.NewValue0(x.Pos, OpRISCV64MOVWload, t)
  5675  		v.copyOf(v0)
  5676  		v0.AuxInt = int32ToAuxInt(off)
  5677  		v0.Aux = symToAux(sym)
  5678  		v0.AddArg2(ptr, mem)
  5679  		return true
  5680  	}
  5681  	return false
  5682  }
  5683  func rewriteValueRISCV64_OpRISCV64MOVWstore(v *Value) bool {
  5684  	v_2 := v.Args[2]
  5685  	v_1 := v.Args[1]
  5686  	v_0 := v.Args[0]
  5687  	// match: (MOVWstore [off1] {sym1} (MOVaddr [off2] {sym2} base) val mem)
  5688  	// cond: is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)
  5689  	// result: (MOVWstore [off1+off2] {mergeSym(sym1,sym2)} base val mem)
  5690  	for {
  5691  		off1 := auxIntToInt32(v.AuxInt)
  5692  		sym1 := auxToSym(v.Aux)
  5693  		if v_0.Op != OpRISCV64MOVaddr {
  5694  			break
  5695  		}
  5696  		off2 := auxIntToInt32(v_0.AuxInt)
  5697  		sym2 := auxToSym(v_0.Aux)
  5698  		base := v_0.Args[0]
  5699  		val := v_1
  5700  		mem := v_2
  5701  		if !(is32Bit(int64(off1)+int64(off2)) && canMergeSym(sym1, sym2)) {
  5702  			break
  5703  		}
  5704  		v.reset(OpRISCV64MOVWstore)
  5705  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5706  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5707  		v.AddArg3(base, val, mem)
  5708  		return true
  5709  	}
  5710  	// match: (MOVWstore [off1] {sym} (ADDI [off2] base) val mem)
  5711  	// cond: is32Bit(int64(off1)+off2)
  5712  	// result: (MOVWstore [off1+int32(off2)] {sym} base val mem)
  5713  	for {
  5714  		off1 := auxIntToInt32(v.AuxInt)
  5715  		sym := auxToSym(v.Aux)
  5716  		if v_0.Op != OpRISCV64ADDI {
  5717  			break
  5718  		}
  5719  		off2 := auxIntToInt64(v_0.AuxInt)
  5720  		base := v_0.Args[0]
  5721  		val := v_1
  5722  		mem := v_2
  5723  		if !(is32Bit(int64(off1) + off2)) {
  5724  			break
  5725  		}
  5726  		v.reset(OpRISCV64MOVWstore)
  5727  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5728  		v.Aux = symToAux(sym)
  5729  		v.AddArg3(base, val, mem)
  5730  		return true
  5731  	}
  5732  	// match: (MOVWstore [off] {sym} ptr (MOVDconst [0]) mem)
  5733  	// result: (MOVWstorezero [off] {sym} ptr mem)
  5734  	for {
  5735  		off := auxIntToInt32(v.AuxInt)
  5736  		sym := auxToSym(v.Aux)
  5737  		ptr := v_0
  5738  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  5739  			break
  5740  		}
  5741  		mem := v_2
  5742  		v.reset(OpRISCV64MOVWstorezero)
  5743  		v.AuxInt = int32ToAuxInt(off)
  5744  		v.Aux = symToAux(sym)
  5745  		v.AddArg2(ptr, mem)
  5746  		return true
  5747  	}
  5748  	// match: (MOVWstore [off] {sym} ptr (MOVWreg x) mem)
  5749  	// result: (MOVWstore [off] {sym} ptr x mem)
  5750  	for {
  5751  		off := auxIntToInt32(v.AuxInt)
  5752  		sym := auxToSym(v.Aux)
  5753  		ptr := v_0
  5754  		if v_1.Op != OpRISCV64MOVWreg {
  5755  			break
  5756  		}
  5757  		x := v_1.Args[0]
  5758  		mem := v_2
  5759  		v.reset(OpRISCV64MOVWstore)
  5760  		v.AuxInt = int32ToAuxInt(off)
  5761  		v.Aux = symToAux(sym)
  5762  		v.AddArg3(ptr, x, mem)
  5763  		return true
  5764  	}
  5765  	// match: (MOVWstore [off] {sym} ptr (MOVWUreg x) mem)
  5766  	// result: (MOVWstore [off] {sym} ptr x mem)
  5767  	for {
  5768  		off := auxIntToInt32(v.AuxInt)
  5769  		sym := auxToSym(v.Aux)
  5770  		ptr := v_0
  5771  		if v_1.Op != OpRISCV64MOVWUreg {
  5772  			break
  5773  		}
  5774  		x := v_1.Args[0]
  5775  		mem := v_2
  5776  		v.reset(OpRISCV64MOVWstore)
  5777  		v.AuxInt = int32ToAuxInt(off)
  5778  		v.Aux = symToAux(sym)
  5779  		v.AddArg3(ptr, x, mem)
  5780  		return true
  5781  	}
  5782  	return false
  5783  }
  5784  func rewriteValueRISCV64_OpRISCV64MOVWstorezero(v *Value) bool {
  5785  	v_1 := v.Args[1]
  5786  	v_0 := v.Args[0]
  5787  	// match: (MOVWstorezero [off1] {sym1} (MOVaddr [off2] {sym2} ptr) mem)
  5788  	// cond: canMergeSym(sym1,sym2) && is32Bit(int64(off1)+int64(off2))
  5789  	// result: (MOVWstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5790  	for {
  5791  		off1 := auxIntToInt32(v.AuxInt)
  5792  		sym1 := auxToSym(v.Aux)
  5793  		if v_0.Op != OpRISCV64MOVaddr {
  5794  			break
  5795  		}
  5796  		off2 := auxIntToInt32(v_0.AuxInt)
  5797  		sym2 := auxToSym(v_0.Aux)
  5798  		ptr := v_0.Args[0]
  5799  		mem := v_1
  5800  		if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
  5801  			break
  5802  		}
  5803  		v.reset(OpRISCV64MOVWstorezero)
  5804  		v.AuxInt = int32ToAuxInt(off1 + off2)
  5805  		v.Aux = symToAux(mergeSym(sym1, sym2))
  5806  		v.AddArg2(ptr, mem)
  5807  		return true
  5808  	}
  5809  	// match: (MOVWstorezero [off1] {sym} (ADDI [off2] ptr) mem)
  5810  	// cond: is32Bit(int64(off1)+off2)
  5811  	// result: (MOVWstorezero [off1+int32(off2)] {sym} ptr mem)
  5812  	for {
  5813  		off1 := auxIntToInt32(v.AuxInt)
  5814  		sym := auxToSym(v.Aux)
  5815  		if v_0.Op != OpRISCV64ADDI {
  5816  			break
  5817  		}
  5818  		off2 := auxIntToInt64(v_0.AuxInt)
  5819  		ptr := v_0.Args[0]
  5820  		mem := v_1
  5821  		if !(is32Bit(int64(off1) + off2)) {
  5822  			break
  5823  		}
  5824  		v.reset(OpRISCV64MOVWstorezero)
  5825  		v.AuxInt = int32ToAuxInt(off1 + int32(off2))
  5826  		v.Aux = symToAux(sym)
  5827  		v.AddArg2(ptr, mem)
  5828  		return true
  5829  	}
  5830  	return false
  5831  }
  5832  func rewriteValueRISCV64_OpRISCV64NEG(v *Value) bool {
  5833  	v_0 := v.Args[0]
  5834  	b := v.Block
  5835  	// match: (NEG (SUB x y))
  5836  	// result: (SUB y x)
  5837  	for {
  5838  		if v_0.Op != OpRISCV64SUB {
  5839  			break
  5840  		}
  5841  		y := v_0.Args[1]
  5842  		x := v_0.Args[0]
  5843  		v.reset(OpRISCV64SUB)
  5844  		v.AddArg2(y, x)
  5845  		return true
  5846  	}
  5847  	// match: (NEG <t> s:(ADDI [val] (SUB x y)))
  5848  	// cond: s.Uses == 1 && is32Bit(-val)
  5849  	// result: (ADDI [-val] (SUB <t> y x))
  5850  	for {
  5851  		t := v.Type
  5852  		s := v_0
  5853  		if s.Op != OpRISCV64ADDI {
  5854  			break
  5855  		}
  5856  		val := auxIntToInt64(s.AuxInt)
  5857  		s_0 := s.Args[0]
  5858  		if s_0.Op != OpRISCV64SUB {
  5859  			break
  5860  		}
  5861  		y := s_0.Args[1]
  5862  		x := s_0.Args[0]
  5863  		if !(s.Uses == 1 && is32Bit(-val)) {
  5864  			break
  5865  		}
  5866  		v.reset(OpRISCV64ADDI)
  5867  		v.AuxInt = int64ToAuxInt(-val)
  5868  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, t)
  5869  		v0.AddArg2(y, x)
  5870  		v.AddArg(v0)
  5871  		return true
  5872  	}
  5873  	// match: (NEG (NEG x))
  5874  	// result: x
  5875  	for {
  5876  		if v_0.Op != OpRISCV64NEG {
  5877  			break
  5878  		}
  5879  		x := v_0.Args[0]
  5880  		v.copyOf(x)
  5881  		return true
  5882  	}
  5883  	// match: (NEG (MOVDconst [x]))
  5884  	// result: (MOVDconst [-x])
  5885  	for {
  5886  		if v_0.Op != OpRISCV64MOVDconst {
  5887  			break
  5888  		}
  5889  		x := auxIntToInt64(v_0.AuxInt)
  5890  		v.reset(OpRISCV64MOVDconst)
  5891  		v.AuxInt = int64ToAuxInt(-x)
  5892  		return true
  5893  	}
  5894  	return false
  5895  }
  5896  func rewriteValueRISCV64_OpRISCV64NEGW(v *Value) bool {
  5897  	v_0 := v.Args[0]
  5898  	// match: (NEGW (MOVDconst [x]))
  5899  	// result: (MOVDconst [int64(int32(-x))])
  5900  	for {
  5901  		if v_0.Op != OpRISCV64MOVDconst {
  5902  			break
  5903  		}
  5904  		x := auxIntToInt64(v_0.AuxInt)
  5905  		v.reset(OpRISCV64MOVDconst)
  5906  		v.AuxInt = int64ToAuxInt(int64(int32(-x)))
  5907  		return true
  5908  	}
  5909  	return false
  5910  }
  5911  func rewriteValueRISCV64_OpRISCV64OR(v *Value) bool {
  5912  	v_1 := v.Args[1]
  5913  	v_0 := v.Args[0]
  5914  	// match: (OR (MOVDconst [val]) x)
  5915  	// cond: is32Bit(val)
  5916  	// result: (ORI [val] x)
  5917  	for {
  5918  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  5919  			if v_0.Op != OpRISCV64MOVDconst {
  5920  				continue
  5921  			}
  5922  			val := auxIntToInt64(v_0.AuxInt)
  5923  			x := v_1
  5924  			if !(is32Bit(val)) {
  5925  				continue
  5926  			}
  5927  			v.reset(OpRISCV64ORI)
  5928  			v.AuxInt = int64ToAuxInt(val)
  5929  			v.AddArg(x)
  5930  			return true
  5931  		}
  5932  		break
  5933  	}
  5934  	return false
  5935  }
  5936  func rewriteValueRISCV64_OpRISCV64ORI(v *Value) bool {
  5937  	v_0 := v.Args[0]
  5938  	// match: (ORI [0] x)
  5939  	// result: x
  5940  	for {
  5941  		if auxIntToInt64(v.AuxInt) != 0 {
  5942  			break
  5943  		}
  5944  		x := v_0
  5945  		v.copyOf(x)
  5946  		return true
  5947  	}
  5948  	// match: (ORI [-1] x)
  5949  	// result: (MOVDconst [-1])
  5950  	for {
  5951  		if auxIntToInt64(v.AuxInt) != -1 {
  5952  			break
  5953  		}
  5954  		v.reset(OpRISCV64MOVDconst)
  5955  		v.AuxInt = int64ToAuxInt(-1)
  5956  		return true
  5957  	}
  5958  	// match: (ORI [x] (MOVDconst [y]))
  5959  	// result: (MOVDconst [x | y])
  5960  	for {
  5961  		x := auxIntToInt64(v.AuxInt)
  5962  		if v_0.Op != OpRISCV64MOVDconst {
  5963  			break
  5964  		}
  5965  		y := auxIntToInt64(v_0.AuxInt)
  5966  		v.reset(OpRISCV64MOVDconst)
  5967  		v.AuxInt = int64ToAuxInt(x | y)
  5968  		return true
  5969  	}
  5970  	// match: (ORI [x] (ORI [y] z))
  5971  	// result: (ORI [x | y] z)
  5972  	for {
  5973  		x := auxIntToInt64(v.AuxInt)
  5974  		if v_0.Op != OpRISCV64ORI {
  5975  			break
  5976  		}
  5977  		y := auxIntToInt64(v_0.AuxInt)
  5978  		z := v_0.Args[0]
  5979  		v.reset(OpRISCV64ORI)
  5980  		v.AuxInt = int64ToAuxInt(x | y)
  5981  		v.AddArg(z)
  5982  		return true
  5983  	}
  5984  	return false
  5985  }
  5986  func rewriteValueRISCV64_OpRISCV64SEQZ(v *Value) bool {
  5987  	v_0 := v.Args[0]
  5988  	// match: (SEQZ (NEG x))
  5989  	// result: (SEQZ x)
  5990  	for {
  5991  		if v_0.Op != OpRISCV64NEG {
  5992  			break
  5993  		}
  5994  		x := v_0.Args[0]
  5995  		v.reset(OpRISCV64SEQZ)
  5996  		v.AddArg(x)
  5997  		return true
  5998  	}
  5999  	// match: (SEQZ (SEQZ x))
  6000  	// result: (SNEZ x)
  6001  	for {
  6002  		if v_0.Op != OpRISCV64SEQZ {
  6003  			break
  6004  		}
  6005  		x := v_0.Args[0]
  6006  		v.reset(OpRISCV64SNEZ)
  6007  		v.AddArg(x)
  6008  		return true
  6009  	}
  6010  	// match: (SEQZ (SNEZ x))
  6011  	// result: (SEQZ x)
  6012  	for {
  6013  		if v_0.Op != OpRISCV64SNEZ {
  6014  			break
  6015  		}
  6016  		x := v_0.Args[0]
  6017  		v.reset(OpRISCV64SEQZ)
  6018  		v.AddArg(x)
  6019  		return true
  6020  	}
  6021  	return false
  6022  }
  6023  func rewriteValueRISCV64_OpRISCV64SLL(v *Value) bool {
  6024  	v_1 := v.Args[1]
  6025  	v_0 := v.Args[0]
  6026  	// match: (SLL x (MOVDconst [val]))
  6027  	// result: (SLLI [int64(val&63)] x)
  6028  	for {
  6029  		x := v_0
  6030  		if v_1.Op != OpRISCV64MOVDconst {
  6031  			break
  6032  		}
  6033  		val := auxIntToInt64(v_1.AuxInt)
  6034  		v.reset(OpRISCV64SLLI)
  6035  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6036  		v.AddArg(x)
  6037  		return true
  6038  	}
  6039  	return false
  6040  }
  6041  func rewriteValueRISCV64_OpRISCV64SLLI(v *Value) bool {
  6042  	v_0 := v.Args[0]
  6043  	// match: (SLLI [x] (MOVDconst [y]))
  6044  	// cond: is32Bit(y << uint32(x))
  6045  	// result: (MOVDconst [y << uint32(x)])
  6046  	for {
  6047  		x := auxIntToInt64(v.AuxInt)
  6048  		if v_0.Op != OpRISCV64MOVDconst {
  6049  			break
  6050  		}
  6051  		y := auxIntToInt64(v_0.AuxInt)
  6052  		if !(is32Bit(y << uint32(x))) {
  6053  			break
  6054  		}
  6055  		v.reset(OpRISCV64MOVDconst)
  6056  		v.AuxInt = int64ToAuxInt(y << uint32(x))
  6057  		return true
  6058  	}
  6059  	return false
  6060  }
  6061  func rewriteValueRISCV64_OpRISCV64SLT(v *Value) bool {
  6062  	v_1 := v.Args[1]
  6063  	v_0 := v.Args[0]
  6064  	// match: (SLT x (MOVDconst [val]))
  6065  	// cond: val >= -2048 && val <= 2047
  6066  	// result: (SLTI [val] x)
  6067  	for {
  6068  		x := v_0
  6069  		if v_1.Op != OpRISCV64MOVDconst {
  6070  			break
  6071  		}
  6072  		val := auxIntToInt64(v_1.AuxInt)
  6073  		if !(val >= -2048 && val <= 2047) {
  6074  			break
  6075  		}
  6076  		v.reset(OpRISCV64SLTI)
  6077  		v.AuxInt = int64ToAuxInt(val)
  6078  		v.AddArg(x)
  6079  		return true
  6080  	}
  6081  	// match: (SLT x x)
  6082  	// result: (MOVDconst [0])
  6083  	for {
  6084  		x := v_0
  6085  		if x != v_1 {
  6086  			break
  6087  		}
  6088  		v.reset(OpRISCV64MOVDconst)
  6089  		v.AuxInt = int64ToAuxInt(0)
  6090  		return true
  6091  	}
  6092  	return false
  6093  }
  6094  func rewriteValueRISCV64_OpRISCV64SLTI(v *Value) bool {
  6095  	v_0 := v.Args[0]
  6096  	// match: (SLTI [x] (MOVDconst [y]))
  6097  	// result: (MOVDconst [b2i(int64(y) < int64(x))])
  6098  	for {
  6099  		x := auxIntToInt64(v.AuxInt)
  6100  		if v_0.Op != OpRISCV64MOVDconst {
  6101  			break
  6102  		}
  6103  		y := auxIntToInt64(v_0.AuxInt)
  6104  		v.reset(OpRISCV64MOVDconst)
  6105  		v.AuxInt = int64ToAuxInt(b2i(int64(y) < int64(x)))
  6106  		return true
  6107  	}
  6108  	// match: (SLTI [x] (ANDI [y] _))
  6109  	// cond: y >= 0 && int64(y) < int64(x)
  6110  	// result: (MOVDconst [1])
  6111  	for {
  6112  		x := auxIntToInt64(v.AuxInt)
  6113  		if v_0.Op != OpRISCV64ANDI {
  6114  			break
  6115  		}
  6116  		y := auxIntToInt64(v_0.AuxInt)
  6117  		if !(y >= 0 && int64(y) < int64(x)) {
  6118  			break
  6119  		}
  6120  		v.reset(OpRISCV64MOVDconst)
  6121  		v.AuxInt = int64ToAuxInt(1)
  6122  		return true
  6123  	}
  6124  	// match: (SLTI [x] (ORI [y] _))
  6125  	// cond: y >= 0 && int64(y) >= int64(x)
  6126  	// result: (MOVDconst [0])
  6127  	for {
  6128  		x := auxIntToInt64(v.AuxInt)
  6129  		if v_0.Op != OpRISCV64ORI {
  6130  			break
  6131  		}
  6132  		y := auxIntToInt64(v_0.AuxInt)
  6133  		if !(y >= 0 && int64(y) >= int64(x)) {
  6134  			break
  6135  		}
  6136  		v.reset(OpRISCV64MOVDconst)
  6137  		v.AuxInt = int64ToAuxInt(0)
  6138  		return true
  6139  	}
  6140  	return false
  6141  }
  6142  func rewriteValueRISCV64_OpRISCV64SLTIU(v *Value) bool {
  6143  	v_0 := v.Args[0]
  6144  	// match: (SLTIU [x] (MOVDconst [y]))
  6145  	// result: (MOVDconst [b2i(uint64(y) < uint64(x))])
  6146  	for {
  6147  		x := auxIntToInt64(v.AuxInt)
  6148  		if v_0.Op != OpRISCV64MOVDconst {
  6149  			break
  6150  		}
  6151  		y := auxIntToInt64(v_0.AuxInt)
  6152  		v.reset(OpRISCV64MOVDconst)
  6153  		v.AuxInt = int64ToAuxInt(b2i(uint64(y) < uint64(x)))
  6154  		return true
  6155  	}
  6156  	// match: (SLTIU [x] (ANDI [y] _))
  6157  	// cond: y >= 0 && uint64(y) < uint64(x)
  6158  	// result: (MOVDconst [1])
  6159  	for {
  6160  		x := auxIntToInt64(v.AuxInt)
  6161  		if v_0.Op != OpRISCV64ANDI {
  6162  			break
  6163  		}
  6164  		y := auxIntToInt64(v_0.AuxInt)
  6165  		if !(y >= 0 && uint64(y) < uint64(x)) {
  6166  			break
  6167  		}
  6168  		v.reset(OpRISCV64MOVDconst)
  6169  		v.AuxInt = int64ToAuxInt(1)
  6170  		return true
  6171  	}
  6172  	// match: (SLTIU [x] (ORI [y] _))
  6173  	// cond: y >= 0 && uint64(y) >= uint64(x)
  6174  	// result: (MOVDconst [0])
  6175  	for {
  6176  		x := auxIntToInt64(v.AuxInt)
  6177  		if v_0.Op != OpRISCV64ORI {
  6178  			break
  6179  		}
  6180  		y := auxIntToInt64(v_0.AuxInt)
  6181  		if !(y >= 0 && uint64(y) >= uint64(x)) {
  6182  			break
  6183  		}
  6184  		v.reset(OpRISCV64MOVDconst)
  6185  		v.AuxInt = int64ToAuxInt(0)
  6186  		return true
  6187  	}
  6188  	return false
  6189  }
  6190  func rewriteValueRISCV64_OpRISCV64SLTU(v *Value) bool {
  6191  	v_1 := v.Args[1]
  6192  	v_0 := v.Args[0]
  6193  	// match: (SLTU x (MOVDconst [val]))
  6194  	// cond: val >= -2048 && val <= 2047
  6195  	// result: (SLTIU [val] x)
  6196  	for {
  6197  		x := v_0
  6198  		if v_1.Op != OpRISCV64MOVDconst {
  6199  			break
  6200  		}
  6201  		val := auxIntToInt64(v_1.AuxInt)
  6202  		if !(val >= -2048 && val <= 2047) {
  6203  			break
  6204  		}
  6205  		v.reset(OpRISCV64SLTIU)
  6206  		v.AuxInt = int64ToAuxInt(val)
  6207  		v.AddArg(x)
  6208  		return true
  6209  	}
  6210  	// match: (SLTU x x)
  6211  	// result: (MOVDconst [0])
  6212  	for {
  6213  		x := v_0
  6214  		if x != v_1 {
  6215  			break
  6216  		}
  6217  		v.reset(OpRISCV64MOVDconst)
  6218  		v.AuxInt = int64ToAuxInt(0)
  6219  		return true
  6220  	}
  6221  	return false
  6222  }
  6223  func rewriteValueRISCV64_OpRISCV64SNEZ(v *Value) bool {
  6224  	v_0 := v.Args[0]
  6225  	// match: (SNEZ (NEG x))
  6226  	// result: (SNEZ x)
  6227  	for {
  6228  		if v_0.Op != OpRISCV64NEG {
  6229  			break
  6230  		}
  6231  		x := v_0.Args[0]
  6232  		v.reset(OpRISCV64SNEZ)
  6233  		v.AddArg(x)
  6234  		return true
  6235  	}
  6236  	// match: (SNEZ (SEQZ x))
  6237  	// result: (SEQZ x)
  6238  	for {
  6239  		if v_0.Op != OpRISCV64SEQZ {
  6240  			break
  6241  		}
  6242  		x := v_0.Args[0]
  6243  		v.reset(OpRISCV64SEQZ)
  6244  		v.AddArg(x)
  6245  		return true
  6246  	}
  6247  	// match: (SNEZ (SNEZ x))
  6248  	// result: (SNEZ x)
  6249  	for {
  6250  		if v_0.Op != OpRISCV64SNEZ {
  6251  			break
  6252  		}
  6253  		x := v_0.Args[0]
  6254  		v.reset(OpRISCV64SNEZ)
  6255  		v.AddArg(x)
  6256  		return true
  6257  	}
  6258  	return false
  6259  }
  6260  func rewriteValueRISCV64_OpRISCV64SRA(v *Value) bool {
  6261  	v_1 := v.Args[1]
  6262  	v_0 := v.Args[0]
  6263  	// match: (SRA x (MOVDconst [val]))
  6264  	// result: (SRAI [int64(val&63)] x)
  6265  	for {
  6266  		x := v_0
  6267  		if v_1.Op != OpRISCV64MOVDconst {
  6268  			break
  6269  		}
  6270  		val := auxIntToInt64(v_1.AuxInt)
  6271  		v.reset(OpRISCV64SRAI)
  6272  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6273  		v.AddArg(x)
  6274  		return true
  6275  	}
  6276  	return false
  6277  }
  6278  func rewriteValueRISCV64_OpRISCV64SRAI(v *Value) bool {
  6279  	v_0 := v.Args[0]
  6280  	b := v.Block
  6281  	// match: (SRAI <t> [x] (MOVWreg y))
  6282  	// cond: x >= 0 && x <= 31
  6283  	// result: (SRAIW <t> [int64(x)] y)
  6284  	for {
  6285  		t := v.Type
  6286  		x := auxIntToInt64(v.AuxInt)
  6287  		if v_0.Op != OpRISCV64MOVWreg {
  6288  			break
  6289  		}
  6290  		y := v_0.Args[0]
  6291  		if !(x >= 0 && x <= 31) {
  6292  			break
  6293  		}
  6294  		v.reset(OpRISCV64SRAIW)
  6295  		v.Type = t
  6296  		v.AuxInt = int64ToAuxInt(int64(x))
  6297  		v.AddArg(y)
  6298  		return true
  6299  	}
  6300  	// match: (SRAI <t> [x] (MOVBreg y))
  6301  	// cond: x >= 8
  6302  	// result: (SRAI [63] (SLLI <t> [56] y))
  6303  	for {
  6304  		t := v.Type
  6305  		x := auxIntToInt64(v.AuxInt)
  6306  		if v_0.Op != OpRISCV64MOVBreg {
  6307  			break
  6308  		}
  6309  		y := v_0.Args[0]
  6310  		if !(x >= 8) {
  6311  			break
  6312  		}
  6313  		v.reset(OpRISCV64SRAI)
  6314  		v.AuxInt = int64ToAuxInt(63)
  6315  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6316  		v0.AuxInt = int64ToAuxInt(56)
  6317  		v0.AddArg(y)
  6318  		v.AddArg(v0)
  6319  		return true
  6320  	}
  6321  	// match: (SRAI <t> [x] (MOVHreg y))
  6322  	// cond: x >= 16
  6323  	// result: (SRAI [63] (SLLI <t> [48] y))
  6324  	for {
  6325  		t := v.Type
  6326  		x := auxIntToInt64(v.AuxInt)
  6327  		if v_0.Op != OpRISCV64MOVHreg {
  6328  			break
  6329  		}
  6330  		y := v_0.Args[0]
  6331  		if !(x >= 16) {
  6332  			break
  6333  		}
  6334  		v.reset(OpRISCV64SRAI)
  6335  		v.AuxInt = int64ToAuxInt(63)
  6336  		v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t)
  6337  		v0.AuxInt = int64ToAuxInt(48)
  6338  		v0.AddArg(y)
  6339  		v.AddArg(v0)
  6340  		return true
  6341  	}
  6342  	// match: (SRAI <t> [x] (MOVWreg y))
  6343  	// cond: x >= 32
  6344  	// result: (SRAIW [31] y)
  6345  	for {
  6346  		x := auxIntToInt64(v.AuxInt)
  6347  		if v_0.Op != OpRISCV64MOVWreg {
  6348  			break
  6349  		}
  6350  		y := v_0.Args[0]
  6351  		if !(x >= 32) {
  6352  			break
  6353  		}
  6354  		v.reset(OpRISCV64SRAIW)
  6355  		v.AuxInt = int64ToAuxInt(31)
  6356  		v.AddArg(y)
  6357  		return true
  6358  	}
  6359  	// match: (SRAI [x] (MOVDconst [y]))
  6360  	// result: (MOVDconst [int64(y) >> uint32(x)])
  6361  	for {
  6362  		x := auxIntToInt64(v.AuxInt)
  6363  		if v_0.Op != OpRISCV64MOVDconst {
  6364  			break
  6365  		}
  6366  		y := auxIntToInt64(v_0.AuxInt)
  6367  		v.reset(OpRISCV64MOVDconst)
  6368  		v.AuxInt = int64ToAuxInt(int64(y) >> uint32(x))
  6369  		return true
  6370  	}
  6371  	return false
  6372  }
  6373  func rewriteValueRISCV64_OpRISCV64SRAW(v *Value) bool {
  6374  	v_1 := v.Args[1]
  6375  	v_0 := v.Args[0]
  6376  	// match: (SRAW x (MOVDconst [val]))
  6377  	// result: (SRAIW [int64(val&31)] x)
  6378  	for {
  6379  		x := v_0
  6380  		if v_1.Op != OpRISCV64MOVDconst {
  6381  			break
  6382  		}
  6383  		val := auxIntToInt64(v_1.AuxInt)
  6384  		v.reset(OpRISCV64SRAIW)
  6385  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6386  		v.AddArg(x)
  6387  		return true
  6388  	}
  6389  	return false
  6390  }
  6391  func rewriteValueRISCV64_OpRISCV64SRL(v *Value) bool {
  6392  	v_1 := v.Args[1]
  6393  	v_0 := v.Args[0]
  6394  	// match: (SRL x (MOVDconst [val]))
  6395  	// result: (SRLI [int64(val&63)] x)
  6396  	for {
  6397  		x := v_0
  6398  		if v_1.Op != OpRISCV64MOVDconst {
  6399  			break
  6400  		}
  6401  		val := auxIntToInt64(v_1.AuxInt)
  6402  		v.reset(OpRISCV64SRLI)
  6403  		v.AuxInt = int64ToAuxInt(int64(val & 63))
  6404  		v.AddArg(x)
  6405  		return true
  6406  	}
  6407  	return false
  6408  }
  6409  func rewriteValueRISCV64_OpRISCV64SRLI(v *Value) bool {
  6410  	v_0 := v.Args[0]
  6411  	// match: (SRLI <t> [x] (MOVWUreg y))
  6412  	// cond: x >= 0 && x <= 31
  6413  	// result: (SRLIW <t> [int64(x)] y)
  6414  	for {
  6415  		t := v.Type
  6416  		x := auxIntToInt64(v.AuxInt)
  6417  		if v_0.Op != OpRISCV64MOVWUreg {
  6418  			break
  6419  		}
  6420  		y := v_0.Args[0]
  6421  		if !(x >= 0 && x <= 31) {
  6422  			break
  6423  		}
  6424  		v.reset(OpRISCV64SRLIW)
  6425  		v.Type = t
  6426  		v.AuxInt = int64ToAuxInt(int64(x))
  6427  		v.AddArg(y)
  6428  		return true
  6429  	}
  6430  	// match: (SRLI <t> [x] (MOVBUreg y))
  6431  	// cond: x >= 8
  6432  	// result: (MOVDconst <t> [0])
  6433  	for {
  6434  		t := v.Type
  6435  		x := auxIntToInt64(v.AuxInt)
  6436  		if v_0.Op != OpRISCV64MOVBUreg {
  6437  			break
  6438  		}
  6439  		if !(x >= 8) {
  6440  			break
  6441  		}
  6442  		v.reset(OpRISCV64MOVDconst)
  6443  		v.Type = t
  6444  		v.AuxInt = int64ToAuxInt(0)
  6445  		return true
  6446  	}
  6447  	// match: (SRLI <t> [x] (MOVHUreg y))
  6448  	// cond: x >= 16
  6449  	// result: (MOVDconst <t> [0])
  6450  	for {
  6451  		t := v.Type
  6452  		x := auxIntToInt64(v.AuxInt)
  6453  		if v_0.Op != OpRISCV64MOVHUreg {
  6454  			break
  6455  		}
  6456  		if !(x >= 16) {
  6457  			break
  6458  		}
  6459  		v.reset(OpRISCV64MOVDconst)
  6460  		v.Type = t
  6461  		v.AuxInt = int64ToAuxInt(0)
  6462  		return true
  6463  	}
  6464  	// match: (SRLI <t> [x] (MOVWUreg y))
  6465  	// cond: x >= 32
  6466  	// result: (MOVDconst <t> [0])
  6467  	for {
  6468  		t := v.Type
  6469  		x := auxIntToInt64(v.AuxInt)
  6470  		if v_0.Op != OpRISCV64MOVWUreg {
  6471  			break
  6472  		}
  6473  		if !(x >= 32) {
  6474  			break
  6475  		}
  6476  		v.reset(OpRISCV64MOVDconst)
  6477  		v.Type = t
  6478  		v.AuxInt = int64ToAuxInt(0)
  6479  		return true
  6480  	}
  6481  	// match: (SRLI [x] (MOVDconst [y]))
  6482  	// result: (MOVDconst [int64(uint64(y) >> uint32(x))])
  6483  	for {
  6484  		x := auxIntToInt64(v.AuxInt)
  6485  		if v_0.Op != OpRISCV64MOVDconst {
  6486  			break
  6487  		}
  6488  		y := auxIntToInt64(v_0.AuxInt)
  6489  		v.reset(OpRISCV64MOVDconst)
  6490  		v.AuxInt = int64ToAuxInt(int64(uint64(y) >> uint32(x)))
  6491  		return true
  6492  	}
  6493  	return false
  6494  }
  6495  func rewriteValueRISCV64_OpRISCV64SRLW(v *Value) bool {
  6496  	v_1 := v.Args[1]
  6497  	v_0 := v.Args[0]
  6498  	// match: (SRLW x (MOVDconst [val]))
  6499  	// result: (SRLIW [int64(val&31)] x)
  6500  	for {
  6501  		x := v_0
  6502  		if v_1.Op != OpRISCV64MOVDconst {
  6503  			break
  6504  		}
  6505  		val := auxIntToInt64(v_1.AuxInt)
  6506  		v.reset(OpRISCV64SRLIW)
  6507  		v.AuxInt = int64ToAuxInt(int64(val & 31))
  6508  		v.AddArg(x)
  6509  		return true
  6510  	}
  6511  	return false
  6512  }
  6513  func rewriteValueRISCV64_OpRISCV64SUB(v *Value) bool {
  6514  	v_1 := v.Args[1]
  6515  	v_0 := v.Args[0]
  6516  	b := v.Block
  6517  	// match: (SUB x (MOVDconst [val]))
  6518  	// cond: is32Bit(-val)
  6519  	// result: (ADDI [-val] x)
  6520  	for {
  6521  		x := v_0
  6522  		if v_1.Op != OpRISCV64MOVDconst {
  6523  			break
  6524  		}
  6525  		val := auxIntToInt64(v_1.AuxInt)
  6526  		if !(is32Bit(-val)) {
  6527  			break
  6528  		}
  6529  		v.reset(OpRISCV64ADDI)
  6530  		v.AuxInt = int64ToAuxInt(-val)
  6531  		v.AddArg(x)
  6532  		return true
  6533  	}
  6534  	// match: (SUB <t> (MOVDconst [val]) y)
  6535  	// cond: is32Bit(-val)
  6536  	// result: (NEG (ADDI <t> [-val] y))
  6537  	for {
  6538  		t := v.Type
  6539  		if v_0.Op != OpRISCV64MOVDconst {
  6540  			break
  6541  		}
  6542  		val := auxIntToInt64(v_0.AuxInt)
  6543  		y := v_1
  6544  		if !(is32Bit(-val)) {
  6545  			break
  6546  		}
  6547  		v.reset(OpRISCV64NEG)
  6548  		v0 := b.NewValue0(v.Pos, OpRISCV64ADDI, t)
  6549  		v0.AuxInt = int64ToAuxInt(-val)
  6550  		v0.AddArg(y)
  6551  		v.AddArg(v0)
  6552  		return true
  6553  	}
  6554  	// match: (SUB x (MOVDconst [0]))
  6555  	// result: x
  6556  	for {
  6557  		x := v_0
  6558  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6559  			break
  6560  		}
  6561  		v.copyOf(x)
  6562  		return true
  6563  	}
  6564  	// match: (SUB (MOVDconst [0]) x)
  6565  	// result: (NEG x)
  6566  	for {
  6567  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6568  			break
  6569  		}
  6570  		x := v_1
  6571  		v.reset(OpRISCV64NEG)
  6572  		v.AddArg(x)
  6573  		return true
  6574  	}
  6575  	return false
  6576  }
  6577  func rewriteValueRISCV64_OpRISCV64SUBW(v *Value) bool {
  6578  	v_1 := v.Args[1]
  6579  	v_0 := v.Args[0]
  6580  	// match: (SUBW x (MOVDconst [0]))
  6581  	// result: (ADDIW [0] x)
  6582  	for {
  6583  		x := v_0
  6584  		if v_1.Op != OpRISCV64MOVDconst || auxIntToInt64(v_1.AuxInt) != 0 {
  6585  			break
  6586  		}
  6587  		v.reset(OpRISCV64ADDIW)
  6588  		v.AuxInt = int64ToAuxInt(0)
  6589  		v.AddArg(x)
  6590  		return true
  6591  	}
  6592  	// match: (SUBW (MOVDconst [0]) x)
  6593  	// result: (NEGW x)
  6594  	for {
  6595  		if v_0.Op != OpRISCV64MOVDconst || auxIntToInt64(v_0.AuxInt) != 0 {
  6596  			break
  6597  		}
  6598  		x := v_1
  6599  		v.reset(OpRISCV64NEGW)
  6600  		v.AddArg(x)
  6601  		return true
  6602  	}
  6603  	return false
  6604  }
  6605  func rewriteValueRISCV64_OpRISCV64XOR(v *Value) bool {
  6606  	v_1 := v.Args[1]
  6607  	v_0 := v.Args[0]
  6608  	// match: (XOR (MOVDconst [val]) x)
  6609  	// cond: is32Bit(val)
  6610  	// result: (XORI [val] x)
  6611  	for {
  6612  		for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
  6613  			if v_0.Op != OpRISCV64MOVDconst {
  6614  				continue
  6615  			}
  6616  			val := auxIntToInt64(v_0.AuxInt)
  6617  			x := v_1
  6618  			if !(is32Bit(val)) {
  6619  				continue
  6620  			}
  6621  			v.reset(OpRISCV64XORI)
  6622  			v.AuxInt = int64ToAuxInt(val)
  6623  			v.AddArg(x)
  6624  			return true
  6625  		}
  6626  		break
  6627  	}
  6628  	return false
  6629  }
  6630  func rewriteValueRISCV64_OpRotateLeft16(v *Value) bool {
  6631  	v_1 := v.Args[1]
  6632  	v_0 := v.Args[0]
  6633  	b := v.Block
  6634  	typ := &b.Func.Config.Types
  6635  	// match: (RotateLeft16 <t> x (MOVDconst [c]))
  6636  	// result: (Or16 (Lsh16x64 <t> x (MOVDconst [c&15])) (Rsh16Ux64 <t> x (MOVDconst [-c&15])))
  6637  	for {
  6638  		t := v.Type
  6639  		x := v_0
  6640  		if v_1.Op != OpRISCV64MOVDconst {
  6641  			break
  6642  		}
  6643  		c := auxIntToInt64(v_1.AuxInt)
  6644  		v.reset(OpOr16)
  6645  		v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
  6646  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6647  		v1.AuxInt = int64ToAuxInt(c & 15)
  6648  		v0.AddArg2(x, v1)
  6649  		v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  6650  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6651  		v3.AuxInt = int64ToAuxInt(-c & 15)
  6652  		v2.AddArg2(x, v3)
  6653  		v.AddArg2(v0, v2)
  6654  		return true
  6655  	}
  6656  	return false
  6657  }
  6658  func rewriteValueRISCV64_OpRotateLeft32(v *Value) bool {
  6659  	v_1 := v.Args[1]
  6660  	v_0 := v.Args[0]
  6661  	b := v.Block
  6662  	typ := &b.Func.Config.Types
  6663  	// match: (RotateLeft32 <t> x (MOVDconst [c]))
  6664  	// result: (Or32 (Lsh32x64 <t> x (MOVDconst [c&31])) (Rsh32Ux64 <t> x (MOVDconst [-c&31])))
  6665  	for {
  6666  		t := v.Type
  6667  		x := v_0
  6668  		if v_1.Op != OpRISCV64MOVDconst {
  6669  			break
  6670  		}
  6671  		c := auxIntToInt64(v_1.AuxInt)
  6672  		v.reset(OpOr32)
  6673  		v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
  6674  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6675  		v1.AuxInt = int64ToAuxInt(c & 31)
  6676  		v0.AddArg2(x, v1)
  6677  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  6678  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6679  		v3.AuxInt = int64ToAuxInt(-c & 31)
  6680  		v2.AddArg2(x, v3)
  6681  		v.AddArg2(v0, v2)
  6682  		return true
  6683  	}
  6684  	return false
  6685  }
  6686  func rewriteValueRISCV64_OpRotateLeft64(v *Value) bool {
  6687  	v_1 := v.Args[1]
  6688  	v_0 := v.Args[0]
  6689  	b := v.Block
  6690  	typ := &b.Func.Config.Types
  6691  	// match: (RotateLeft64 <t> x (MOVDconst [c]))
  6692  	// result: (Or64 (Lsh64x64 <t> x (MOVDconst [c&63])) (Rsh64Ux64 <t> x (MOVDconst [-c&63])))
  6693  	for {
  6694  		t := v.Type
  6695  		x := v_0
  6696  		if v_1.Op != OpRISCV64MOVDconst {
  6697  			break
  6698  		}
  6699  		c := auxIntToInt64(v_1.AuxInt)
  6700  		v.reset(OpOr64)
  6701  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  6702  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6703  		v1.AuxInt = int64ToAuxInt(c & 63)
  6704  		v0.AddArg2(x, v1)
  6705  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  6706  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6707  		v3.AuxInt = int64ToAuxInt(-c & 63)
  6708  		v2.AddArg2(x, v3)
  6709  		v.AddArg2(v0, v2)
  6710  		return true
  6711  	}
  6712  	return false
  6713  }
  6714  func rewriteValueRISCV64_OpRotateLeft8(v *Value) bool {
  6715  	v_1 := v.Args[1]
  6716  	v_0 := v.Args[0]
  6717  	b := v.Block
  6718  	typ := &b.Func.Config.Types
  6719  	// match: (RotateLeft8 <t> x (MOVDconst [c]))
  6720  	// result: (Or8 (Lsh8x64 <t> x (MOVDconst [c&7])) (Rsh8Ux64 <t> x (MOVDconst [-c&7])))
  6721  	for {
  6722  		t := v.Type
  6723  		x := v_0
  6724  		if v_1.Op != OpRISCV64MOVDconst {
  6725  			break
  6726  		}
  6727  		c := auxIntToInt64(v_1.AuxInt)
  6728  		v.reset(OpOr8)
  6729  		v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
  6730  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6731  		v1.AuxInt = int64ToAuxInt(c & 7)
  6732  		v0.AddArg2(x, v1)
  6733  		v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  6734  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  6735  		v3.AuxInt = int64ToAuxInt(-c & 7)
  6736  		v2.AddArg2(x, v3)
  6737  		v.AddArg2(v0, v2)
  6738  		return true
  6739  	}
  6740  	return false
  6741  }
  6742  func rewriteValueRISCV64_OpRsh16Ux16(v *Value) bool {
  6743  	v_1 := v.Args[1]
  6744  	v_0 := v.Args[0]
  6745  	b := v.Block
  6746  	typ := &b.Func.Config.Types
  6747  	// match: (Rsh16Ux16 <t> x y)
  6748  	// cond: !shiftIsBounded(v)
  6749  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  6750  	for {
  6751  		t := v.Type
  6752  		x := v_0
  6753  		y := v_1
  6754  		if !(!shiftIsBounded(v)) {
  6755  			break
  6756  		}
  6757  		v.reset(OpRISCV64AND)
  6758  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6759  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6760  		v1.AddArg(x)
  6761  		v0.AddArg2(v1, y)
  6762  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6763  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6764  		v3.AuxInt = int64ToAuxInt(64)
  6765  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6766  		v4.AddArg(y)
  6767  		v3.AddArg(v4)
  6768  		v2.AddArg(v3)
  6769  		v.AddArg2(v0, v2)
  6770  		return true
  6771  	}
  6772  	// match: (Rsh16Ux16 x y)
  6773  	// cond: shiftIsBounded(v)
  6774  	// result: (SRL (ZeroExt16to64 x) y)
  6775  	for {
  6776  		x := v_0
  6777  		y := v_1
  6778  		if !(shiftIsBounded(v)) {
  6779  			break
  6780  		}
  6781  		v.reset(OpRISCV64SRL)
  6782  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6783  		v0.AddArg(x)
  6784  		v.AddArg2(v0, y)
  6785  		return true
  6786  	}
  6787  	return false
  6788  }
  6789  func rewriteValueRISCV64_OpRsh16Ux32(v *Value) bool {
  6790  	v_1 := v.Args[1]
  6791  	v_0 := v.Args[0]
  6792  	b := v.Block
  6793  	typ := &b.Func.Config.Types
  6794  	// match: (Rsh16Ux32 <t> x y)
  6795  	// cond: !shiftIsBounded(v)
  6796  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  6797  	for {
  6798  		t := v.Type
  6799  		x := v_0
  6800  		y := v_1
  6801  		if !(!shiftIsBounded(v)) {
  6802  			break
  6803  		}
  6804  		v.reset(OpRISCV64AND)
  6805  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6806  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6807  		v1.AddArg(x)
  6808  		v0.AddArg2(v1, y)
  6809  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6810  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6811  		v3.AuxInt = int64ToAuxInt(64)
  6812  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  6813  		v4.AddArg(y)
  6814  		v3.AddArg(v4)
  6815  		v2.AddArg(v3)
  6816  		v.AddArg2(v0, v2)
  6817  		return true
  6818  	}
  6819  	// match: (Rsh16Ux32 x y)
  6820  	// cond: shiftIsBounded(v)
  6821  	// result: (SRL (ZeroExt16to64 x) y)
  6822  	for {
  6823  		x := v_0
  6824  		y := v_1
  6825  		if !(shiftIsBounded(v)) {
  6826  			break
  6827  		}
  6828  		v.reset(OpRISCV64SRL)
  6829  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6830  		v0.AddArg(x)
  6831  		v.AddArg2(v0, y)
  6832  		return true
  6833  	}
  6834  	return false
  6835  }
  6836  func rewriteValueRISCV64_OpRsh16Ux64(v *Value) bool {
  6837  	v_1 := v.Args[1]
  6838  	v_0 := v.Args[0]
  6839  	b := v.Block
  6840  	typ := &b.Func.Config.Types
  6841  	// match: (Rsh16Ux64 <t> x y)
  6842  	// cond: !shiftIsBounded(v)
  6843  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] y)))
  6844  	for {
  6845  		t := v.Type
  6846  		x := v_0
  6847  		y := v_1
  6848  		if !(!shiftIsBounded(v)) {
  6849  			break
  6850  		}
  6851  		v.reset(OpRISCV64AND)
  6852  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6853  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6854  		v1.AddArg(x)
  6855  		v0.AddArg2(v1, y)
  6856  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6857  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6858  		v3.AuxInt = int64ToAuxInt(64)
  6859  		v3.AddArg(y)
  6860  		v2.AddArg(v3)
  6861  		v.AddArg2(v0, v2)
  6862  		return true
  6863  	}
  6864  	// match: (Rsh16Ux64 x y)
  6865  	// cond: shiftIsBounded(v)
  6866  	// result: (SRL (ZeroExt16to64 x) y)
  6867  	for {
  6868  		x := v_0
  6869  		y := v_1
  6870  		if !(shiftIsBounded(v)) {
  6871  			break
  6872  		}
  6873  		v.reset(OpRISCV64SRL)
  6874  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6875  		v0.AddArg(x)
  6876  		v.AddArg2(v0, y)
  6877  		return true
  6878  	}
  6879  	return false
  6880  }
  6881  func rewriteValueRISCV64_OpRsh16Ux8(v *Value) bool {
  6882  	v_1 := v.Args[1]
  6883  	v_0 := v.Args[0]
  6884  	b := v.Block
  6885  	typ := &b.Func.Config.Types
  6886  	// match: (Rsh16Ux8 <t> x y)
  6887  	// cond: !shiftIsBounded(v)
  6888  	// result: (AND (SRL <t> (ZeroExt16to64 x) y) (Neg16 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  6889  	for {
  6890  		t := v.Type
  6891  		x := v_0
  6892  		y := v_1
  6893  		if !(!shiftIsBounded(v)) {
  6894  			break
  6895  		}
  6896  		v.reset(OpRISCV64AND)
  6897  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  6898  		v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6899  		v1.AddArg(x)
  6900  		v0.AddArg2(v1, y)
  6901  		v2 := b.NewValue0(v.Pos, OpNeg16, t)
  6902  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  6903  		v3.AuxInt = int64ToAuxInt(64)
  6904  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  6905  		v4.AddArg(y)
  6906  		v3.AddArg(v4)
  6907  		v2.AddArg(v3)
  6908  		v.AddArg2(v0, v2)
  6909  		return true
  6910  	}
  6911  	// match: (Rsh16Ux8 x y)
  6912  	// cond: shiftIsBounded(v)
  6913  	// result: (SRL (ZeroExt16to64 x) y)
  6914  	for {
  6915  		x := v_0
  6916  		y := v_1
  6917  		if !(shiftIsBounded(v)) {
  6918  			break
  6919  		}
  6920  		v.reset(OpRISCV64SRL)
  6921  		v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6922  		v0.AddArg(x)
  6923  		v.AddArg2(v0, y)
  6924  		return true
  6925  	}
  6926  	return false
  6927  }
  6928  func rewriteValueRISCV64_OpRsh16x16(v *Value) bool {
  6929  	v_1 := v.Args[1]
  6930  	v_0 := v.Args[0]
  6931  	b := v.Block
  6932  	typ := &b.Func.Config.Types
  6933  	// match: (Rsh16x16 <t> x y)
  6934  	// cond: !shiftIsBounded(v)
  6935  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  6936  	for {
  6937  		t := v.Type
  6938  		x := v_0
  6939  		y := v_1
  6940  		if !(!shiftIsBounded(v)) {
  6941  			break
  6942  		}
  6943  		v.reset(OpRISCV64SRA)
  6944  		v.Type = t
  6945  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6946  		v0.AddArg(x)
  6947  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  6948  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  6949  		v2.AuxInt = int64ToAuxInt(-1)
  6950  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  6951  		v3.AuxInt = int64ToAuxInt(64)
  6952  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  6953  		v4.AddArg(y)
  6954  		v3.AddArg(v4)
  6955  		v2.AddArg(v3)
  6956  		v1.AddArg2(y, v2)
  6957  		v.AddArg2(v0, v1)
  6958  		return true
  6959  	}
  6960  	// match: (Rsh16x16 x y)
  6961  	// cond: shiftIsBounded(v)
  6962  	// result: (SRA (SignExt16to64 x) y)
  6963  	for {
  6964  		x := v_0
  6965  		y := v_1
  6966  		if !(shiftIsBounded(v)) {
  6967  			break
  6968  		}
  6969  		v.reset(OpRISCV64SRA)
  6970  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6971  		v0.AddArg(x)
  6972  		v.AddArg2(v0, y)
  6973  		return true
  6974  	}
  6975  	return false
  6976  }
  6977  func rewriteValueRISCV64_OpRsh16x32(v *Value) bool {
  6978  	v_1 := v.Args[1]
  6979  	v_0 := v.Args[0]
  6980  	b := v.Block
  6981  	typ := &b.Func.Config.Types
  6982  	// match: (Rsh16x32 <t> x y)
  6983  	// cond: !shiftIsBounded(v)
  6984  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  6985  	for {
  6986  		t := v.Type
  6987  		x := v_0
  6988  		y := v_1
  6989  		if !(!shiftIsBounded(v)) {
  6990  			break
  6991  		}
  6992  		v.reset(OpRISCV64SRA)
  6993  		v.Type = t
  6994  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  6995  		v0.AddArg(x)
  6996  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  6997  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  6998  		v2.AuxInt = int64ToAuxInt(-1)
  6999  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7000  		v3.AuxInt = int64ToAuxInt(64)
  7001  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7002  		v4.AddArg(y)
  7003  		v3.AddArg(v4)
  7004  		v2.AddArg(v3)
  7005  		v1.AddArg2(y, v2)
  7006  		v.AddArg2(v0, v1)
  7007  		return true
  7008  	}
  7009  	// match: (Rsh16x32 x y)
  7010  	// cond: shiftIsBounded(v)
  7011  	// result: (SRA (SignExt16to64 x) y)
  7012  	for {
  7013  		x := v_0
  7014  		y := v_1
  7015  		if !(shiftIsBounded(v)) {
  7016  			break
  7017  		}
  7018  		v.reset(OpRISCV64SRA)
  7019  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7020  		v0.AddArg(x)
  7021  		v.AddArg2(v0, y)
  7022  		return true
  7023  	}
  7024  	return false
  7025  }
  7026  func rewriteValueRISCV64_OpRsh16x64(v *Value) bool {
  7027  	v_1 := v.Args[1]
  7028  	v_0 := v.Args[0]
  7029  	b := v.Block
  7030  	typ := &b.Func.Config.Types
  7031  	// match: (Rsh16x64 <t> x y)
  7032  	// cond: !shiftIsBounded(v)
  7033  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7034  	for {
  7035  		t := v.Type
  7036  		x := v_0
  7037  		y := v_1
  7038  		if !(!shiftIsBounded(v)) {
  7039  			break
  7040  		}
  7041  		v.reset(OpRISCV64SRA)
  7042  		v.Type = t
  7043  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7044  		v0.AddArg(x)
  7045  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7046  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7047  		v2.AuxInt = int64ToAuxInt(-1)
  7048  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7049  		v3.AuxInt = int64ToAuxInt(64)
  7050  		v3.AddArg(y)
  7051  		v2.AddArg(v3)
  7052  		v1.AddArg2(y, v2)
  7053  		v.AddArg2(v0, v1)
  7054  		return true
  7055  	}
  7056  	// match: (Rsh16x64 x y)
  7057  	// cond: shiftIsBounded(v)
  7058  	// result: (SRA (SignExt16to64 x) y)
  7059  	for {
  7060  		x := v_0
  7061  		y := v_1
  7062  		if !(shiftIsBounded(v)) {
  7063  			break
  7064  		}
  7065  		v.reset(OpRISCV64SRA)
  7066  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7067  		v0.AddArg(x)
  7068  		v.AddArg2(v0, y)
  7069  		return true
  7070  	}
  7071  	return false
  7072  }
  7073  func rewriteValueRISCV64_OpRsh16x8(v *Value) bool {
  7074  	v_1 := v.Args[1]
  7075  	v_0 := v.Args[0]
  7076  	b := v.Block
  7077  	typ := &b.Func.Config.Types
  7078  	// match: (Rsh16x8 <t> x y)
  7079  	// cond: !shiftIsBounded(v)
  7080  	// result: (SRA <t> (SignExt16to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7081  	for {
  7082  		t := v.Type
  7083  		x := v_0
  7084  		y := v_1
  7085  		if !(!shiftIsBounded(v)) {
  7086  			break
  7087  		}
  7088  		v.reset(OpRISCV64SRA)
  7089  		v.Type = t
  7090  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7091  		v0.AddArg(x)
  7092  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7093  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7094  		v2.AuxInt = int64ToAuxInt(-1)
  7095  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7096  		v3.AuxInt = int64ToAuxInt(64)
  7097  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7098  		v4.AddArg(y)
  7099  		v3.AddArg(v4)
  7100  		v2.AddArg(v3)
  7101  		v1.AddArg2(y, v2)
  7102  		v.AddArg2(v0, v1)
  7103  		return true
  7104  	}
  7105  	// match: (Rsh16x8 x y)
  7106  	// cond: shiftIsBounded(v)
  7107  	// result: (SRA (SignExt16to64 x) y)
  7108  	for {
  7109  		x := v_0
  7110  		y := v_1
  7111  		if !(shiftIsBounded(v)) {
  7112  			break
  7113  		}
  7114  		v.reset(OpRISCV64SRA)
  7115  		v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
  7116  		v0.AddArg(x)
  7117  		v.AddArg2(v0, y)
  7118  		return true
  7119  	}
  7120  	return false
  7121  }
  7122  func rewriteValueRISCV64_OpRsh32Ux16(v *Value) bool {
  7123  	v_1 := v.Args[1]
  7124  	v_0 := v.Args[0]
  7125  	b := v.Block
  7126  	typ := &b.Func.Config.Types
  7127  	// match: (Rsh32Ux16 <t> x y)
  7128  	// cond: !shiftIsBounded(v)
  7129  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt16to64 y))))
  7130  	for {
  7131  		t := v.Type
  7132  		x := v_0
  7133  		y := v_1
  7134  		if !(!shiftIsBounded(v)) {
  7135  			break
  7136  		}
  7137  		v.reset(OpRISCV64AND)
  7138  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7139  		v0.AddArg2(x, y)
  7140  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7141  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7142  		v2.AuxInt = int64ToAuxInt(32)
  7143  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7144  		v3.AddArg(y)
  7145  		v2.AddArg(v3)
  7146  		v1.AddArg(v2)
  7147  		v.AddArg2(v0, v1)
  7148  		return true
  7149  	}
  7150  	// match: (Rsh32Ux16 x y)
  7151  	// cond: shiftIsBounded(v)
  7152  	// result: (SRLW x y)
  7153  	for {
  7154  		x := v_0
  7155  		y := v_1
  7156  		if !(shiftIsBounded(v)) {
  7157  			break
  7158  		}
  7159  		v.reset(OpRISCV64SRLW)
  7160  		v.AddArg2(x, y)
  7161  		return true
  7162  	}
  7163  	return false
  7164  }
  7165  func rewriteValueRISCV64_OpRsh32Ux32(v *Value) bool {
  7166  	v_1 := v.Args[1]
  7167  	v_0 := v.Args[0]
  7168  	b := v.Block
  7169  	typ := &b.Func.Config.Types
  7170  	// match: (Rsh32Ux32 <t> x y)
  7171  	// cond: !shiftIsBounded(v)
  7172  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt32to64 y))))
  7173  	for {
  7174  		t := v.Type
  7175  		x := v_0
  7176  		y := v_1
  7177  		if !(!shiftIsBounded(v)) {
  7178  			break
  7179  		}
  7180  		v.reset(OpRISCV64AND)
  7181  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7182  		v0.AddArg2(x, y)
  7183  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7184  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7185  		v2.AuxInt = int64ToAuxInt(32)
  7186  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7187  		v3.AddArg(y)
  7188  		v2.AddArg(v3)
  7189  		v1.AddArg(v2)
  7190  		v.AddArg2(v0, v1)
  7191  		return true
  7192  	}
  7193  	// match: (Rsh32Ux32 x y)
  7194  	// cond: shiftIsBounded(v)
  7195  	// result: (SRLW x y)
  7196  	for {
  7197  		x := v_0
  7198  		y := v_1
  7199  		if !(shiftIsBounded(v)) {
  7200  			break
  7201  		}
  7202  		v.reset(OpRISCV64SRLW)
  7203  		v.AddArg2(x, y)
  7204  		return true
  7205  	}
  7206  	return false
  7207  }
  7208  func rewriteValueRISCV64_OpRsh32Ux64(v *Value) bool {
  7209  	v_1 := v.Args[1]
  7210  	v_0 := v.Args[0]
  7211  	b := v.Block
  7212  	// match: (Rsh32Ux64 <t> x y)
  7213  	// cond: !shiftIsBounded(v)
  7214  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] y)))
  7215  	for {
  7216  		t := v.Type
  7217  		x := v_0
  7218  		y := v_1
  7219  		if !(!shiftIsBounded(v)) {
  7220  			break
  7221  		}
  7222  		v.reset(OpRISCV64AND)
  7223  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7224  		v0.AddArg2(x, y)
  7225  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7226  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7227  		v2.AuxInt = int64ToAuxInt(32)
  7228  		v2.AddArg(y)
  7229  		v1.AddArg(v2)
  7230  		v.AddArg2(v0, v1)
  7231  		return true
  7232  	}
  7233  	// match: (Rsh32Ux64 x y)
  7234  	// cond: shiftIsBounded(v)
  7235  	// result: (SRLW x y)
  7236  	for {
  7237  		x := v_0
  7238  		y := v_1
  7239  		if !(shiftIsBounded(v)) {
  7240  			break
  7241  		}
  7242  		v.reset(OpRISCV64SRLW)
  7243  		v.AddArg2(x, y)
  7244  		return true
  7245  	}
  7246  	return false
  7247  }
  7248  func rewriteValueRISCV64_OpRsh32Ux8(v *Value) bool {
  7249  	v_1 := v.Args[1]
  7250  	v_0 := v.Args[0]
  7251  	b := v.Block
  7252  	typ := &b.Func.Config.Types
  7253  	// match: (Rsh32Ux8 <t> x y)
  7254  	// cond: !shiftIsBounded(v)
  7255  	// result: (AND (SRLW <t> x y) (Neg32 <t> (SLTIU <t> [32] (ZeroExt8to64 y))))
  7256  	for {
  7257  		t := v.Type
  7258  		x := v_0
  7259  		y := v_1
  7260  		if !(!shiftIsBounded(v)) {
  7261  			break
  7262  		}
  7263  		v.reset(OpRISCV64AND)
  7264  		v0 := b.NewValue0(v.Pos, OpRISCV64SRLW, t)
  7265  		v0.AddArg2(x, y)
  7266  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7267  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7268  		v2.AuxInt = int64ToAuxInt(32)
  7269  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7270  		v3.AddArg(y)
  7271  		v2.AddArg(v3)
  7272  		v1.AddArg(v2)
  7273  		v.AddArg2(v0, v1)
  7274  		return true
  7275  	}
  7276  	// match: (Rsh32Ux8 x y)
  7277  	// cond: shiftIsBounded(v)
  7278  	// result: (SRLW x y)
  7279  	for {
  7280  		x := v_0
  7281  		y := v_1
  7282  		if !(shiftIsBounded(v)) {
  7283  			break
  7284  		}
  7285  		v.reset(OpRISCV64SRLW)
  7286  		v.AddArg2(x, y)
  7287  		return true
  7288  	}
  7289  	return false
  7290  }
  7291  func rewriteValueRISCV64_OpRsh32x16(v *Value) bool {
  7292  	v_1 := v.Args[1]
  7293  	v_0 := v.Args[0]
  7294  	b := v.Block
  7295  	typ := &b.Func.Config.Types
  7296  	// match: (Rsh32x16 <t> x y)
  7297  	// cond: !shiftIsBounded(v)
  7298  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt16to64 y)))))
  7299  	for {
  7300  		t := v.Type
  7301  		x := v_0
  7302  		y := v_1
  7303  		if !(!shiftIsBounded(v)) {
  7304  			break
  7305  		}
  7306  		v.reset(OpRISCV64SRAW)
  7307  		v.Type = t
  7308  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7309  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7310  		v1.AuxInt = int64ToAuxInt(-1)
  7311  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7312  		v2.AuxInt = int64ToAuxInt(32)
  7313  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7314  		v3.AddArg(y)
  7315  		v2.AddArg(v3)
  7316  		v1.AddArg(v2)
  7317  		v0.AddArg2(y, v1)
  7318  		v.AddArg2(x, v0)
  7319  		return true
  7320  	}
  7321  	// match: (Rsh32x16 x y)
  7322  	// cond: shiftIsBounded(v)
  7323  	// result: (SRAW x y)
  7324  	for {
  7325  		x := v_0
  7326  		y := v_1
  7327  		if !(shiftIsBounded(v)) {
  7328  			break
  7329  		}
  7330  		v.reset(OpRISCV64SRAW)
  7331  		v.AddArg2(x, y)
  7332  		return true
  7333  	}
  7334  	return false
  7335  }
  7336  func rewriteValueRISCV64_OpRsh32x32(v *Value) bool {
  7337  	v_1 := v.Args[1]
  7338  	v_0 := v.Args[0]
  7339  	b := v.Block
  7340  	typ := &b.Func.Config.Types
  7341  	// match: (Rsh32x32 <t> x y)
  7342  	// cond: !shiftIsBounded(v)
  7343  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt32to64 y)))))
  7344  	for {
  7345  		t := v.Type
  7346  		x := v_0
  7347  		y := v_1
  7348  		if !(!shiftIsBounded(v)) {
  7349  			break
  7350  		}
  7351  		v.reset(OpRISCV64SRAW)
  7352  		v.Type = t
  7353  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7354  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7355  		v1.AuxInt = int64ToAuxInt(-1)
  7356  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7357  		v2.AuxInt = int64ToAuxInt(32)
  7358  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7359  		v3.AddArg(y)
  7360  		v2.AddArg(v3)
  7361  		v1.AddArg(v2)
  7362  		v0.AddArg2(y, v1)
  7363  		v.AddArg2(x, v0)
  7364  		return true
  7365  	}
  7366  	// match: (Rsh32x32 x y)
  7367  	// cond: shiftIsBounded(v)
  7368  	// result: (SRAW x y)
  7369  	for {
  7370  		x := v_0
  7371  		y := v_1
  7372  		if !(shiftIsBounded(v)) {
  7373  			break
  7374  		}
  7375  		v.reset(OpRISCV64SRAW)
  7376  		v.AddArg2(x, y)
  7377  		return true
  7378  	}
  7379  	return false
  7380  }
  7381  func rewriteValueRISCV64_OpRsh32x64(v *Value) bool {
  7382  	v_1 := v.Args[1]
  7383  	v_0 := v.Args[0]
  7384  	b := v.Block
  7385  	// match: (Rsh32x64 <t> x y)
  7386  	// cond: !shiftIsBounded(v)
  7387  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] y))))
  7388  	for {
  7389  		t := v.Type
  7390  		x := v_0
  7391  		y := v_1
  7392  		if !(!shiftIsBounded(v)) {
  7393  			break
  7394  		}
  7395  		v.reset(OpRISCV64SRAW)
  7396  		v.Type = t
  7397  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7398  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7399  		v1.AuxInt = int64ToAuxInt(-1)
  7400  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7401  		v2.AuxInt = int64ToAuxInt(32)
  7402  		v2.AddArg(y)
  7403  		v1.AddArg(v2)
  7404  		v0.AddArg2(y, v1)
  7405  		v.AddArg2(x, v0)
  7406  		return true
  7407  	}
  7408  	// match: (Rsh32x64 x y)
  7409  	// cond: shiftIsBounded(v)
  7410  	// result: (SRAW x y)
  7411  	for {
  7412  		x := v_0
  7413  		y := v_1
  7414  		if !(shiftIsBounded(v)) {
  7415  			break
  7416  		}
  7417  		v.reset(OpRISCV64SRAW)
  7418  		v.AddArg2(x, y)
  7419  		return true
  7420  	}
  7421  	return false
  7422  }
  7423  func rewriteValueRISCV64_OpRsh32x8(v *Value) bool {
  7424  	v_1 := v.Args[1]
  7425  	v_0 := v.Args[0]
  7426  	b := v.Block
  7427  	typ := &b.Func.Config.Types
  7428  	// match: (Rsh32x8 <t> x y)
  7429  	// cond: !shiftIsBounded(v)
  7430  	// result: (SRAW <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [32] (ZeroExt8to64 y)))))
  7431  	for {
  7432  		t := v.Type
  7433  		x := v_0
  7434  		y := v_1
  7435  		if !(!shiftIsBounded(v)) {
  7436  			break
  7437  		}
  7438  		v.reset(OpRISCV64SRAW)
  7439  		v.Type = t
  7440  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7441  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7442  		v1.AuxInt = int64ToAuxInt(-1)
  7443  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7444  		v2.AuxInt = int64ToAuxInt(32)
  7445  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7446  		v3.AddArg(y)
  7447  		v2.AddArg(v3)
  7448  		v1.AddArg(v2)
  7449  		v0.AddArg2(y, v1)
  7450  		v.AddArg2(x, v0)
  7451  		return true
  7452  	}
  7453  	// match: (Rsh32x8 x y)
  7454  	// cond: shiftIsBounded(v)
  7455  	// result: (SRAW x y)
  7456  	for {
  7457  		x := v_0
  7458  		y := v_1
  7459  		if !(shiftIsBounded(v)) {
  7460  			break
  7461  		}
  7462  		v.reset(OpRISCV64SRAW)
  7463  		v.AddArg2(x, y)
  7464  		return true
  7465  	}
  7466  	return false
  7467  }
  7468  func rewriteValueRISCV64_OpRsh64Ux16(v *Value) bool {
  7469  	v_1 := v.Args[1]
  7470  	v_0 := v.Args[0]
  7471  	b := v.Block
  7472  	typ := &b.Func.Config.Types
  7473  	// match: (Rsh64Ux16 <t> x y)
  7474  	// cond: !shiftIsBounded(v)
  7475  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7476  	for {
  7477  		t := v.Type
  7478  		x := v_0
  7479  		y := v_1
  7480  		if !(!shiftIsBounded(v)) {
  7481  			break
  7482  		}
  7483  		v.reset(OpRISCV64AND)
  7484  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7485  		v0.AddArg2(x, y)
  7486  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7487  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7488  		v2.AuxInt = int64ToAuxInt(64)
  7489  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7490  		v3.AddArg(y)
  7491  		v2.AddArg(v3)
  7492  		v1.AddArg(v2)
  7493  		v.AddArg2(v0, v1)
  7494  		return true
  7495  	}
  7496  	// match: (Rsh64Ux16 x y)
  7497  	// cond: shiftIsBounded(v)
  7498  	// result: (SRL x y)
  7499  	for {
  7500  		x := v_0
  7501  		y := v_1
  7502  		if !(shiftIsBounded(v)) {
  7503  			break
  7504  		}
  7505  		v.reset(OpRISCV64SRL)
  7506  		v.AddArg2(x, y)
  7507  		return true
  7508  	}
  7509  	return false
  7510  }
  7511  func rewriteValueRISCV64_OpRsh64Ux32(v *Value) bool {
  7512  	v_1 := v.Args[1]
  7513  	v_0 := v.Args[0]
  7514  	b := v.Block
  7515  	typ := &b.Func.Config.Types
  7516  	// match: (Rsh64Ux32 <t> x y)
  7517  	// cond: !shiftIsBounded(v)
  7518  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7519  	for {
  7520  		t := v.Type
  7521  		x := v_0
  7522  		y := v_1
  7523  		if !(!shiftIsBounded(v)) {
  7524  			break
  7525  		}
  7526  		v.reset(OpRISCV64AND)
  7527  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7528  		v0.AddArg2(x, y)
  7529  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7530  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7531  		v2.AuxInt = int64ToAuxInt(64)
  7532  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7533  		v3.AddArg(y)
  7534  		v2.AddArg(v3)
  7535  		v1.AddArg(v2)
  7536  		v.AddArg2(v0, v1)
  7537  		return true
  7538  	}
  7539  	// match: (Rsh64Ux32 x y)
  7540  	// cond: shiftIsBounded(v)
  7541  	// result: (SRL x y)
  7542  	for {
  7543  		x := v_0
  7544  		y := v_1
  7545  		if !(shiftIsBounded(v)) {
  7546  			break
  7547  		}
  7548  		v.reset(OpRISCV64SRL)
  7549  		v.AddArg2(x, y)
  7550  		return true
  7551  	}
  7552  	return false
  7553  }
  7554  func rewriteValueRISCV64_OpRsh64Ux64(v *Value) bool {
  7555  	v_1 := v.Args[1]
  7556  	v_0 := v.Args[0]
  7557  	b := v.Block
  7558  	// match: (Rsh64Ux64 <t> x y)
  7559  	// cond: !shiftIsBounded(v)
  7560  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] y)))
  7561  	for {
  7562  		t := v.Type
  7563  		x := v_0
  7564  		y := v_1
  7565  		if !(!shiftIsBounded(v)) {
  7566  			break
  7567  		}
  7568  		v.reset(OpRISCV64AND)
  7569  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7570  		v0.AddArg2(x, y)
  7571  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7572  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7573  		v2.AuxInt = int64ToAuxInt(64)
  7574  		v2.AddArg(y)
  7575  		v1.AddArg(v2)
  7576  		v.AddArg2(v0, v1)
  7577  		return true
  7578  	}
  7579  	// match: (Rsh64Ux64 x y)
  7580  	// cond: shiftIsBounded(v)
  7581  	// result: (SRL x y)
  7582  	for {
  7583  		x := v_0
  7584  		y := v_1
  7585  		if !(shiftIsBounded(v)) {
  7586  			break
  7587  		}
  7588  		v.reset(OpRISCV64SRL)
  7589  		v.AddArg2(x, y)
  7590  		return true
  7591  	}
  7592  	return false
  7593  }
  7594  func rewriteValueRISCV64_OpRsh64Ux8(v *Value) bool {
  7595  	v_1 := v.Args[1]
  7596  	v_0 := v.Args[0]
  7597  	b := v.Block
  7598  	typ := &b.Func.Config.Types
  7599  	// match: (Rsh64Ux8 <t> x y)
  7600  	// cond: !shiftIsBounded(v)
  7601  	// result: (AND (SRL <t> x y) (Neg64 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7602  	for {
  7603  		t := v.Type
  7604  		x := v_0
  7605  		y := v_1
  7606  		if !(!shiftIsBounded(v)) {
  7607  			break
  7608  		}
  7609  		v.reset(OpRISCV64AND)
  7610  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7611  		v0.AddArg2(x, y)
  7612  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  7613  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7614  		v2.AuxInt = int64ToAuxInt(64)
  7615  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7616  		v3.AddArg(y)
  7617  		v2.AddArg(v3)
  7618  		v1.AddArg(v2)
  7619  		v.AddArg2(v0, v1)
  7620  		return true
  7621  	}
  7622  	// match: (Rsh64Ux8 x y)
  7623  	// cond: shiftIsBounded(v)
  7624  	// result: (SRL x y)
  7625  	for {
  7626  		x := v_0
  7627  		y := v_1
  7628  		if !(shiftIsBounded(v)) {
  7629  			break
  7630  		}
  7631  		v.reset(OpRISCV64SRL)
  7632  		v.AddArg2(x, y)
  7633  		return true
  7634  	}
  7635  	return false
  7636  }
  7637  func rewriteValueRISCV64_OpRsh64x16(v *Value) bool {
  7638  	v_1 := v.Args[1]
  7639  	v_0 := v.Args[0]
  7640  	b := v.Block
  7641  	typ := &b.Func.Config.Types
  7642  	// match: (Rsh64x16 <t> x y)
  7643  	// cond: !shiftIsBounded(v)
  7644  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  7645  	for {
  7646  		t := v.Type
  7647  		x := v_0
  7648  		y := v_1
  7649  		if !(!shiftIsBounded(v)) {
  7650  			break
  7651  		}
  7652  		v.reset(OpRISCV64SRA)
  7653  		v.Type = t
  7654  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7655  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7656  		v1.AuxInt = int64ToAuxInt(-1)
  7657  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7658  		v2.AuxInt = int64ToAuxInt(64)
  7659  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7660  		v3.AddArg(y)
  7661  		v2.AddArg(v3)
  7662  		v1.AddArg(v2)
  7663  		v0.AddArg2(y, v1)
  7664  		v.AddArg2(x, v0)
  7665  		return true
  7666  	}
  7667  	// match: (Rsh64x16 x y)
  7668  	// cond: shiftIsBounded(v)
  7669  	// result: (SRA x y)
  7670  	for {
  7671  		x := v_0
  7672  		y := v_1
  7673  		if !(shiftIsBounded(v)) {
  7674  			break
  7675  		}
  7676  		v.reset(OpRISCV64SRA)
  7677  		v.AddArg2(x, y)
  7678  		return true
  7679  	}
  7680  	return false
  7681  }
  7682  func rewriteValueRISCV64_OpRsh64x32(v *Value) bool {
  7683  	v_1 := v.Args[1]
  7684  	v_0 := v.Args[0]
  7685  	b := v.Block
  7686  	typ := &b.Func.Config.Types
  7687  	// match: (Rsh64x32 <t> x y)
  7688  	// cond: !shiftIsBounded(v)
  7689  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  7690  	for {
  7691  		t := v.Type
  7692  		x := v_0
  7693  		y := v_1
  7694  		if !(!shiftIsBounded(v)) {
  7695  			break
  7696  		}
  7697  		v.reset(OpRISCV64SRA)
  7698  		v.Type = t
  7699  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7700  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7701  		v1.AuxInt = int64ToAuxInt(-1)
  7702  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7703  		v2.AuxInt = int64ToAuxInt(64)
  7704  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7705  		v3.AddArg(y)
  7706  		v2.AddArg(v3)
  7707  		v1.AddArg(v2)
  7708  		v0.AddArg2(y, v1)
  7709  		v.AddArg2(x, v0)
  7710  		return true
  7711  	}
  7712  	// match: (Rsh64x32 x y)
  7713  	// cond: shiftIsBounded(v)
  7714  	// result: (SRA x y)
  7715  	for {
  7716  		x := v_0
  7717  		y := v_1
  7718  		if !(shiftIsBounded(v)) {
  7719  			break
  7720  		}
  7721  		v.reset(OpRISCV64SRA)
  7722  		v.AddArg2(x, y)
  7723  		return true
  7724  	}
  7725  	return false
  7726  }
  7727  func rewriteValueRISCV64_OpRsh64x64(v *Value) bool {
  7728  	v_1 := v.Args[1]
  7729  	v_0 := v.Args[0]
  7730  	b := v.Block
  7731  	// match: (Rsh64x64 <t> x y)
  7732  	// cond: !shiftIsBounded(v)
  7733  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  7734  	for {
  7735  		t := v.Type
  7736  		x := v_0
  7737  		y := v_1
  7738  		if !(!shiftIsBounded(v)) {
  7739  			break
  7740  		}
  7741  		v.reset(OpRISCV64SRA)
  7742  		v.Type = t
  7743  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7744  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7745  		v1.AuxInt = int64ToAuxInt(-1)
  7746  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7747  		v2.AuxInt = int64ToAuxInt(64)
  7748  		v2.AddArg(y)
  7749  		v1.AddArg(v2)
  7750  		v0.AddArg2(y, v1)
  7751  		v.AddArg2(x, v0)
  7752  		return true
  7753  	}
  7754  	// match: (Rsh64x64 x y)
  7755  	// cond: shiftIsBounded(v)
  7756  	// result: (SRA x y)
  7757  	for {
  7758  		x := v_0
  7759  		y := v_1
  7760  		if !(shiftIsBounded(v)) {
  7761  			break
  7762  		}
  7763  		v.reset(OpRISCV64SRA)
  7764  		v.AddArg2(x, y)
  7765  		return true
  7766  	}
  7767  	return false
  7768  }
  7769  func rewriteValueRISCV64_OpRsh64x8(v *Value) bool {
  7770  	v_1 := v.Args[1]
  7771  	v_0 := v.Args[0]
  7772  	b := v.Block
  7773  	typ := &b.Func.Config.Types
  7774  	// match: (Rsh64x8 <t> x y)
  7775  	// cond: !shiftIsBounded(v)
  7776  	// result: (SRA <t> x (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  7777  	for {
  7778  		t := v.Type
  7779  		x := v_0
  7780  		y := v_1
  7781  		if !(!shiftIsBounded(v)) {
  7782  			break
  7783  		}
  7784  		v.reset(OpRISCV64SRA)
  7785  		v.Type = t
  7786  		v0 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  7787  		v1 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  7788  		v1.AuxInt = int64ToAuxInt(-1)
  7789  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  7790  		v2.AuxInt = int64ToAuxInt(64)
  7791  		v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7792  		v3.AddArg(y)
  7793  		v2.AddArg(v3)
  7794  		v1.AddArg(v2)
  7795  		v0.AddArg2(y, v1)
  7796  		v.AddArg2(x, v0)
  7797  		return true
  7798  	}
  7799  	// match: (Rsh64x8 x y)
  7800  	// cond: shiftIsBounded(v)
  7801  	// result: (SRA x y)
  7802  	for {
  7803  		x := v_0
  7804  		y := v_1
  7805  		if !(shiftIsBounded(v)) {
  7806  			break
  7807  		}
  7808  		v.reset(OpRISCV64SRA)
  7809  		v.AddArg2(x, y)
  7810  		return true
  7811  	}
  7812  	return false
  7813  }
  7814  func rewriteValueRISCV64_OpRsh8Ux16(v *Value) bool {
  7815  	v_1 := v.Args[1]
  7816  	v_0 := v.Args[0]
  7817  	b := v.Block
  7818  	typ := &b.Func.Config.Types
  7819  	// match: (Rsh8Ux16 <t> x y)
  7820  	// cond: !shiftIsBounded(v)
  7821  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt16to64 y))))
  7822  	for {
  7823  		t := v.Type
  7824  		x := v_0
  7825  		y := v_1
  7826  		if !(!shiftIsBounded(v)) {
  7827  			break
  7828  		}
  7829  		v.reset(OpRISCV64AND)
  7830  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7831  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7832  		v1.AddArg(x)
  7833  		v0.AddArg2(v1, y)
  7834  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  7835  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7836  		v3.AuxInt = int64ToAuxInt(64)
  7837  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7838  		v4.AddArg(y)
  7839  		v3.AddArg(v4)
  7840  		v2.AddArg(v3)
  7841  		v.AddArg2(v0, v2)
  7842  		return true
  7843  	}
  7844  	// match: (Rsh8Ux16 x y)
  7845  	// cond: shiftIsBounded(v)
  7846  	// result: (SRL (ZeroExt8to64 x) y)
  7847  	for {
  7848  		x := v_0
  7849  		y := v_1
  7850  		if !(shiftIsBounded(v)) {
  7851  			break
  7852  		}
  7853  		v.reset(OpRISCV64SRL)
  7854  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7855  		v0.AddArg(x)
  7856  		v.AddArg2(v0, y)
  7857  		return true
  7858  	}
  7859  	return false
  7860  }
  7861  func rewriteValueRISCV64_OpRsh8Ux32(v *Value) bool {
  7862  	v_1 := v.Args[1]
  7863  	v_0 := v.Args[0]
  7864  	b := v.Block
  7865  	typ := &b.Func.Config.Types
  7866  	// match: (Rsh8Ux32 <t> x y)
  7867  	// cond: !shiftIsBounded(v)
  7868  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt32to64 y))))
  7869  	for {
  7870  		t := v.Type
  7871  		x := v_0
  7872  		y := v_1
  7873  		if !(!shiftIsBounded(v)) {
  7874  			break
  7875  		}
  7876  		v.reset(OpRISCV64AND)
  7877  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7878  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7879  		v1.AddArg(x)
  7880  		v0.AddArg2(v1, y)
  7881  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  7882  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7883  		v3.AuxInt = int64ToAuxInt(64)
  7884  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  7885  		v4.AddArg(y)
  7886  		v3.AddArg(v4)
  7887  		v2.AddArg(v3)
  7888  		v.AddArg2(v0, v2)
  7889  		return true
  7890  	}
  7891  	// match: (Rsh8Ux32 x y)
  7892  	// cond: shiftIsBounded(v)
  7893  	// result: (SRL (ZeroExt8to64 x) y)
  7894  	for {
  7895  		x := v_0
  7896  		y := v_1
  7897  		if !(shiftIsBounded(v)) {
  7898  			break
  7899  		}
  7900  		v.reset(OpRISCV64SRL)
  7901  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7902  		v0.AddArg(x)
  7903  		v.AddArg2(v0, y)
  7904  		return true
  7905  	}
  7906  	return false
  7907  }
  7908  func rewriteValueRISCV64_OpRsh8Ux64(v *Value) bool {
  7909  	v_1 := v.Args[1]
  7910  	v_0 := v.Args[0]
  7911  	b := v.Block
  7912  	typ := &b.Func.Config.Types
  7913  	// match: (Rsh8Ux64 <t> x y)
  7914  	// cond: !shiftIsBounded(v)
  7915  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] y)))
  7916  	for {
  7917  		t := v.Type
  7918  		x := v_0
  7919  		y := v_1
  7920  		if !(!shiftIsBounded(v)) {
  7921  			break
  7922  		}
  7923  		v.reset(OpRISCV64AND)
  7924  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7925  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7926  		v1.AddArg(x)
  7927  		v0.AddArg2(v1, y)
  7928  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  7929  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7930  		v3.AuxInt = int64ToAuxInt(64)
  7931  		v3.AddArg(y)
  7932  		v2.AddArg(v3)
  7933  		v.AddArg2(v0, v2)
  7934  		return true
  7935  	}
  7936  	// match: (Rsh8Ux64 x y)
  7937  	// cond: shiftIsBounded(v)
  7938  	// result: (SRL (ZeroExt8to64 x) y)
  7939  	for {
  7940  		x := v_0
  7941  		y := v_1
  7942  		if !(shiftIsBounded(v)) {
  7943  			break
  7944  		}
  7945  		v.reset(OpRISCV64SRL)
  7946  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7947  		v0.AddArg(x)
  7948  		v.AddArg2(v0, y)
  7949  		return true
  7950  	}
  7951  	return false
  7952  }
  7953  func rewriteValueRISCV64_OpRsh8Ux8(v *Value) bool {
  7954  	v_1 := v.Args[1]
  7955  	v_0 := v.Args[0]
  7956  	b := v.Block
  7957  	typ := &b.Func.Config.Types
  7958  	// match: (Rsh8Ux8 <t> x y)
  7959  	// cond: !shiftIsBounded(v)
  7960  	// result: (AND (SRL <t> (ZeroExt8to64 x) y) (Neg8 <t> (SLTIU <t> [64] (ZeroExt8to64 y))))
  7961  	for {
  7962  		t := v.Type
  7963  		x := v_0
  7964  		y := v_1
  7965  		if !(!shiftIsBounded(v)) {
  7966  			break
  7967  		}
  7968  		v.reset(OpRISCV64AND)
  7969  		v0 := b.NewValue0(v.Pos, OpRISCV64SRL, t)
  7970  		v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7971  		v1.AddArg(x)
  7972  		v0.AddArg2(v1, y)
  7973  		v2 := b.NewValue0(v.Pos, OpNeg8, t)
  7974  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, t)
  7975  		v3.AuxInt = int64ToAuxInt(64)
  7976  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7977  		v4.AddArg(y)
  7978  		v3.AddArg(v4)
  7979  		v2.AddArg(v3)
  7980  		v.AddArg2(v0, v2)
  7981  		return true
  7982  	}
  7983  	// match: (Rsh8Ux8 x y)
  7984  	// cond: shiftIsBounded(v)
  7985  	// result: (SRL (ZeroExt8to64 x) y)
  7986  	for {
  7987  		x := v_0
  7988  		y := v_1
  7989  		if !(shiftIsBounded(v)) {
  7990  			break
  7991  		}
  7992  		v.reset(OpRISCV64SRL)
  7993  		v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  7994  		v0.AddArg(x)
  7995  		v.AddArg2(v0, y)
  7996  		return true
  7997  	}
  7998  	return false
  7999  }
  8000  func rewriteValueRISCV64_OpRsh8x16(v *Value) bool {
  8001  	v_1 := v.Args[1]
  8002  	v_0 := v.Args[0]
  8003  	b := v.Block
  8004  	typ := &b.Func.Config.Types
  8005  	// match: (Rsh8x16 <t> x y)
  8006  	// cond: !shiftIsBounded(v)
  8007  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt16to64 y)))))
  8008  	for {
  8009  		t := v.Type
  8010  		x := v_0
  8011  		y := v_1
  8012  		if !(!shiftIsBounded(v)) {
  8013  			break
  8014  		}
  8015  		v.reset(OpRISCV64SRA)
  8016  		v.Type = t
  8017  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8018  		v0.AddArg(x)
  8019  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8020  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8021  		v2.AuxInt = int64ToAuxInt(-1)
  8022  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8023  		v3.AuxInt = int64ToAuxInt(64)
  8024  		v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  8025  		v4.AddArg(y)
  8026  		v3.AddArg(v4)
  8027  		v2.AddArg(v3)
  8028  		v1.AddArg2(y, v2)
  8029  		v.AddArg2(v0, v1)
  8030  		return true
  8031  	}
  8032  	// match: (Rsh8x16 x y)
  8033  	// cond: shiftIsBounded(v)
  8034  	// result: (SRA (SignExt8to64 x) y)
  8035  	for {
  8036  		x := v_0
  8037  		y := v_1
  8038  		if !(shiftIsBounded(v)) {
  8039  			break
  8040  		}
  8041  		v.reset(OpRISCV64SRA)
  8042  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8043  		v0.AddArg(x)
  8044  		v.AddArg2(v0, y)
  8045  		return true
  8046  	}
  8047  	return false
  8048  }
  8049  func rewriteValueRISCV64_OpRsh8x32(v *Value) bool {
  8050  	v_1 := v.Args[1]
  8051  	v_0 := v.Args[0]
  8052  	b := v.Block
  8053  	typ := &b.Func.Config.Types
  8054  	// match: (Rsh8x32 <t> x y)
  8055  	// cond: !shiftIsBounded(v)
  8056  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt32to64 y)))))
  8057  	for {
  8058  		t := v.Type
  8059  		x := v_0
  8060  		y := v_1
  8061  		if !(!shiftIsBounded(v)) {
  8062  			break
  8063  		}
  8064  		v.reset(OpRISCV64SRA)
  8065  		v.Type = t
  8066  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8067  		v0.AddArg(x)
  8068  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8069  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8070  		v2.AuxInt = int64ToAuxInt(-1)
  8071  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8072  		v3.AuxInt = int64ToAuxInt(64)
  8073  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8074  		v4.AddArg(y)
  8075  		v3.AddArg(v4)
  8076  		v2.AddArg(v3)
  8077  		v1.AddArg2(y, v2)
  8078  		v.AddArg2(v0, v1)
  8079  		return true
  8080  	}
  8081  	// match: (Rsh8x32 x y)
  8082  	// cond: shiftIsBounded(v)
  8083  	// result: (SRA (SignExt8to64 x) y)
  8084  	for {
  8085  		x := v_0
  8086  		y := v_1
  8087  		if !(shiftIsBounded(v)) {
  8088  			break
  8089  		}
  8090  		v.reset(OpRISCV64SRA)
  8091  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8092  		v0.AddArg(x)
  8093  		v.AddArg2(v0, y)
  8094  		return true
  8095  	}
  8096  	return false
  8097  }
  8098  func rewriteValueRISCV64_OpRsh8x64(v *Value) bool {
  8099  	v_1 := v.Args[1]
  8100  	v_0 := v.Args[0]
  8101  	b := v.Block
  8102  	typ := &b.Func.Config.Types
  8103  	// match: (Rsh8x64 <t> x y)
  8104  	// cond: !shiftIsBounded(v)
  8105  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] y))))
  8106  	for {
  8107  		t := v.Type
  8108  		x := v_0
  8109  		y := v_1
  8110  		if !(!shiftIsBounded(v)) {
  8111  			break
  8112  		}
  8113  		v.reset(OpRISCV64SRA)
  8114  		v.Type = t
  8115  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8116  		v0.AddArg(x)
  8117  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8118  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8119  		v2.AuxInt = int64ToAuxInt(-1)
  8120  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8121  		v3.AuxInt = int64ToAuxInt(64)
  8122  		v3.AddArg(y)
  8123  		v2.AddArg(v3)
  8124  		v1.AddArg2(y, v2)
  8125  		v.AddArg2(v0, v1)
  8126  		return true
  8127  	}
  8128  	// match: (Rsh8x64 x y)
  8129  	// cond: shiftIsBounded(v)
  8130  	// result: (SRA (SignExt8to64 x) y)
  8131  	for {
  8132  		x := v_0
  8133  		y := v_1
  8134  		if !(shiftIsBounded(v)) {
  8135  			break
  8136  		}
  8137  		v.reset(OpRISCV64SRA)
  8138  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8139  		v0.AddArg(x)
  8140  		v.AddArg2(v0, y)
  8141  		return true
  8142  	}
  8143  	return false
  8144  }
  8145  func rewriteValueRISCV64_OpRsh8x8(v *Value) bool {
  8146  	v_1 := v.Args[1]
  8147  	v_0 := v.Args[0]
  8148  	b := v.Block
  8149  	typ := &b.Func.Config.Types
  8150  	// match: (Rsh8x8 <t> x y)
  8151  	// cond: !shiftIsBounded(v)
  8152  	// result: (SRA <t> (SignExt8to64 x) (OR <y.Type> y (ADDI <y.Type> [-1] (SLTIU <y.Type> [64] (ZeroExt8to64 y)))))
  8153  	for {
  8154  		t := v.Type
  8155  		x := v_0
  8156  		y := v_1
  8157  		if !(!shiftIsBounded(v)) {
  8158  			break
  8159  		}
  8160  		v.reset(OpRISCV64SRA)
  8161  		v.Type = t
  8162  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8163  		v0.AddArg(x)
  8164  		v1 := b.NewValue0(v.Pos, OpRISCV64OR, y.Type)
  8165  		v2 := b.NewValue0(v.Pos, OpRISCV64ADDI, y.Type)
  8166  		v2.AuxInt = int64ToAuxInt(-1)
  8167  		v3 := b.NewValue0(v.Pos, OpRISCV64SLTIU, y.Type)
  8168  		v3.AuxInt = int64ToAuxInt(64)
  8169  		v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
  8170  		v4.AddArg(y)
  8171  		v3.AddArg(v4)
  8172  		v2.AddArg(v3)
  8173  		v1.AddArg2(y, v2)
  8174  		v.AddArg2(v0, v1)
  8175  		return true
  8176  	}
  8177  	// match: (Rsh8x8 x y)
  8178  	// cond: shiftIsBounded(v)
  8179  	// result: (SRA (SignExt8to64 x) y)
  8180  	for {
  8181  		x := v_0
  8182  		y := v_1
  8183  		if !(shiftIsBounded(v)) {
  8184  			break
  8185  		}
  8186  		v.reset(OpRISCV64SRA)
  8187  		v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
  8188  		v0.AddArg(x)
  8189  		v.AddArg2(v0, y)
  8190  		return true
  8191  	}
  8192  	return false
  8193  }
  8194  func rewriteValueRISCV64_OpSelect0(v *Value) bool {
  8195  	v_0 := v.Args[0]
  8196  	b := v.Block
  8197  	typ := &b.Func.Config.Types
  8198  	// match: (Select0 (Add64carry x y c))
  8199  	// result: (ADD (ADD <typ.UInt64> x y) c)
  8200  	for {
  8201  		if v_0.Op != OpAdd64carry {
  8202  			break
  8203  		}
  8204  		c := v_0.Args[2]
  8205  		x := v_0.Args[0]
  8206  		y := v_0.Args[1]
  8207  		v.reset(OpRISCV64ADD)
  8208  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8209  		v0.AddArg2(x, y)
  8210  		v.AddArg2(v0, c)
  8211  		return true
  8212  	}
  8213  	// match: (Select0 (Sub64borrow x y c))
  8214  	// result: (SUB (SUB <typ.UInt64> x y) c)
  8215  	for {
  8216  		if v_0.Op != OpSub64borrow {
  8217  			break
  8218  		}
  8219  		c := v_0.Args[2]
  8220  		x := v_0.Args[0]
  8221  		y := v_0.Args[1]
  8222  		v.reset(OpRISCV64SUB)
  8223  		v0 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8224  		v0.AddArg2(x, y)
  8225  		v.AddArg2(v0, c)
  8226  		return true
  8227  	}
  8228  	// match: (Select0 m:(LoweredMuluhilo x y))
  8229  	// cond: m.Uses == 1
  8230  	// result: (MULHU x y)
  8231  	for {
  8232  		m := v_0
  8233  		if m.Op != OpRISCV64LoweredMuluhilo {
  8234  			break
  8235  		}
  8236  		y := m.Args[1]
  8237  		x := m.Args[0]
  8238  		if !(m.Uses == 1) {
  8239  			break
  8240  		}
  8241  		v.reset(OpRISCV64MULHU)
  8242  		v.AddArg2(x, y)
  8243  		return true
  8244  	}
  8245  	return false
  8246  }
  8247  func rewriteValueRISCV64_OpSelect1(v *Value) bool {
  8248  	v_0 := v.Args[0]
  8249  	b := v.Block
  8250  	typ := &b.Func.Config.Types
  8251  	// match: (Select1 (Add64carry x y c))
  8252  	// result: (OR (SLTU <typ.UInt64> s:(ADD <typ.UInt64> x y) x) (SLTU <typ.UInt64> (ADD <typ.UInt64> s c) s))
  8253  	for {
  8254  		if v_0.Op != OpAdd64carry {
  8255  			break
  8256  		}
  8257  		c := v_0.Args[2]
  8258  		x := v_0.Args[0]
  8259  		y := v_0.Args[1]
  8260  		v.reset(OpRISCV64OR)
  8261  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8262  		s := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8263  		s.AddArg2(x, y)
  8264  		v0.AddArg2(s, x)
  8265  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8266  		v3 := b.NewValue0(v.Pos, OpRISCV64ADD, typ.UInt64)
  8267  		v3.AddArg2(s, c)
  8268  		v2.AddArg2(v3, s)
  8269  		v.AddArg2(v0, v2)
  8270  		return true
  8271  	}
  8272  	// match: (Select1 (Sub64borrow x y c))
  8273  	// result: (OR (SLTU <typ.UInt64> x s:(SUB <typ.UInt64> x y)) (SLTU <typ.UInt64> s (SUB <typ.UInt64> s c)))
  8274  	for {
  8275  		if v_0.Op != OpSub64borrow {
  8276  			break
  8277  		}
  8278  		c := v_0.Args[2]
  8279  		x := v_0.Args[0]
  8280  		y := v_0.Args[1]
  8281  		v.reset(OpRISCV64OR)
  8282  		v0 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8283  		s := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8284  		s.AddArg2(x, y)
  8285  		v0.AddArg2(x, s)
  8286  		v2 := b.NewValue0(v.Pos, OpRISCV64SLTU, typ.UInt64)
  8287  		v3 := b.NewValue0(v.Pos, OpRISCV64SUB, typ.UInt64)
  8288  		v3.AddArg2(s, c)
  8289  		v2.AddArg2(s, v3)
  8290  		v.AddArg2(v0, v2)
  8291  		return true
  8292  	}
  8293  	// match: (Select1 m:(LoweredMuluhilo x y))
  8294  	// cond: m.Uses == 1
  8295  	// result: (MUL x y)
  8296  	for {
  8297  		m := v_0
  8298  		if m.Op != OpRISCV64LoweredMuluhilo {
  8299  			break
  8300  		}
  8301  		y := m.Args[1]
  8302  		x := m.Args[0]
  8303  		if !(m.Uses == 1) {
  8304  			break
  8305  		}
  8306  		v.reset(OpRISCV64MUL)
  8307  		v.AddArg2(x, y)
  8308  		return true
  8309  	}
  8310  	return false
  8311  }
  8312  func rewriteValueRISCV64_OpSlicemask(v *Value) bool {
  8313  	v_0 := v.Args[0]
  8314  	b := v.Block
  8315  	// match: (Slicemask <t> x)
  8316  	// result: (SRAI [63] (NEG <t> x))
  8317  	for {
  8318  		t := v.Type
  8319  		x := v_0
  8320  		v.reset(OpRISCV64SRAI)
  8321  		v.AuxInt = int64ToAuxInt(63)
  8322  		v0 := b.NewValue0(v.Pos, OpRISCV64NEG, t)
  8323  		v0.AddArg(x)
  8324  		v.AddArg(v0)
  8325  		return true
  8326  	}
  8327  }
  8328  func rewriteValueRISCV64_OpStore(v *Value) bool {
  8329  	v_2 := v.Args[2]
  8330  	v_1 := v.Args[1]
  8331  	v_0 := v.Args[0]
  8332  	// match: (Store {t} ptr val mem)
  8333  	// cond: t.Size() == 1
  8334  	// result: (MOVBstore ptr val mem)
  8335  	for {
  8336  		t := auxToType(v.Aux)
  8337  		ptr := v_0
  8338  		val := v_1
  8339  		mem := v_2
  8340  		if !(t.Size() == 1) {
  8341  			break
  8342  		}
  8343  		v.reset(OpRISCV64MOVBstore)
  8344  		v.AddArg3(ptr, val, mem)
  8345  		return true
  8346  	}
  8347  	// match: (Store {t} ptr val mem)
  8348  	// cond: t.Size() == 2
  8349  	// result: (MOVHstore ptr val mem)
  8350  	for {
  8351  		t := auxToType(v.Aux)
  8352  		ptr := v_0
  8353  		val := v_1
  8354  		mem := v_2
  8355  		if !(t.Size() == 2) {
  8356  			break
  8357  		}
  8358  		v.reset(OpRISCV64MOVHstore)
  8359  		v.AddArg3(ptr, val, mem)
  8360  		return true
  8361  	}
  8362  	// match: (Store {t} ptr val mem)
  8363  	// cond: t.Size() == 4 && !t.IsFloat()
  8364  	// result: (MOVWstore ptr val mem)
  8365  	for {
  8366  		t := auxToType(v.Aux)
  8367  		ptr := v_0
  8368  		val := v_1
  8369  		mem := v_2
  8370  		if !(t.Size() == 4 && !t.IsFloat()) {
  8371  			break
  8372  		}
  8373  		v.reset(OpRISCV64MOVWstore)
  8374  		v.AddArg3(ptr, val, mem)
  8375  		return true
  8376  	}
  8377  	// match: (Store {t} ptr val mem)
  8378  	// cond: t.Size() == 8 && !t.IsFloat()
  8379  	// result: (MOVDstore ptr val mem)
  8380  	for {
  8381  		t := auxToType(v.Aux)
  8382  		ptr := v_0
  8383  		val := v_1
  8384  		mem := v_2
  8385  		if !(t.Size() == 8 && !t.IsFloat()) {
  8386  			break
  8387  		}
  8388  		v.reset(OpRISCV64MOVDstore)
  8389  		v.AddArg3(ptr, val, mem)
  8390  		return true
  8391  	}
  8392  	// match: (Store {t} ptr val mem)
  8393  	// cond: t.Size() == 4 && t.IsFloat()
  8394  	// result: (FMOVWstore ptr val mem)
  8395  	for {
  8396  		t := auxToType(v.Aux)
  8397  		ptr := v_0
  8398  		val := v_1
  8399  		mem := v_2
  8400  		if !(t.Size() == 4 && t.IsFloat()) {
  8401  			break
  8402  		}
  8403  		v.reset(OpRISCV64FMOVWstore)
  8404  		v.AddArg3(ptr, val, mem)
  8405  		return true
  8406  	}
  8407  	// match: (Store {t} ptr val mem)
  8408  	// cond: t.Size() == 8 && t.IsFloat()
  8409  	// result: (FMOVDstore ptr val mem)
  8410  	for {
  8411  		t := auxToType(v.Aux)
  8412  		ptr := v_0
  8413  		val := v_1
  8414  		mem := v_2
  8415  		if !(t.Size() == 8 && t.IsFloat()) {
  8416  			break
  8417  		}
  8418  		v.reset(OpRISCV64FMOVDstore)
  8419  		v.AddArg3(ptr, val, mem)
  8420  		return true
  8421  	}
  8422  	return false
  8423  }
  8424  func rewriteValueRISCV64_OpZero(v *Value) bool {
  8425  	v_1 := v.Args[1]
  8426  	v_0 := v.Args[0]
  8427  	b := v.Block
  8428  	config := b.Func.Config
  8429  	typ := &b.Func.Config.Types
  8430  	// match: (Zero [0] _ mem)
  8431  	// result: mem
  8432  	for {
  8433  		if auxIntToInt64(v.AuxInt) != 0 {
  8434  			break
  8435  		}
  8436  		mem := v_1
  8437  		v.copyOf(mem)
  8438  		return true
  8439  	}
  8440  	// match: (Zero [1] ptr mem)
  8441  	// result: (MOVBstore ptr (MOVDconst [0]) mem)
  8442  	for {
  8443  		if auxIntToInt64(v.AuxInt) != 1 {
  8444  			break
  8445  		}
  8446  		ptr := v_0
  8447  		mem := v_1
  8448  		v.reset(OpRISCV64MOVBstore)
  8449  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8450  		v0.AuxInt = int64ToAuxInt(0)
  8451  		v.AddArg3(ptr, v0, mem)
  8452  		return true
  8453  	}
  8454  	// match: (Zero [2] {t} ptr mem)
  8455  	// cond: t.Alignment()%2 == 0
  8456  	// result: (MOVHstore ptr (MOVDconst [0]) mem)
  8457  	for {
  8458  		if auxIntToInt64(v.AuxInt) != 2 {
  8459  			break
  8460  		}
  8461  		t := auxToType(v.Aux)
  8462  		ptr := v_0
  8463  		mem := v_1
  8464  		if !(t.Alignment()%2 == 0) {
  8465  			break
  8466  		}
  8467  		v.reset(OpRISCV64MOVHstore)
  8468  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8469  		v0.AuxInt = int64ToAuxInt(0)
  8470  		v.AddArg3(ptr, v0, mem)
  8471  		return true
  8472  	}
  8473  	// match: (Zero [2] ptr mem)
  8474  	// result: (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))
  8475  	for {
  8476  		if auxIntToInt64(v.AuxInt) != 2 {
  8477  			break
  8478  		}
  8479  		ptr := v_0
  8480  		mem := v_1
  8481  		v.reset(OpRISCV64MOVBstore)
  8482  		v.AuxInt = int32ToAuxInt(1)
  8483  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8484  		v0.AuxInt = int64ToAuxInt(0)
  8485  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8486  		v1.AddArg3(ptr, v0, mem)
  8487  		v.AddArg3(ptr, v0, v1)
  8488  		return true
  8489  	}
  8490  	// match: (Zero [4] {t} ptr mem)
  8491  	// cond: t.Alignment()%4 == 0
  8492  	// result: (MOVWstore ptr (MOVDconst [0]) mem)
  8493  	for {
  8494  		if auxIntToInt64(v.AuxInt) != 4 {
  8495  			break
  8496  		}
  8497  		t := auxToType(v.Aux)
  8498  		ptr := v_0
  8499  		mem := v_1
  8500  		if !(t.Alignment()%4 == 0) {
  8501  			break
  8502  		}
  8503  		v.reset(OpRISCV64MOVWstore)
  8504  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8505  		v0.AuxInt = int64ToAuxInt(0)
  8506  		v.AddArg3(ptr, v0, mem)
  8507  		return true
  8508  	}
  8509  	// match: (Zero [4] {t} ptr mem)
  8510  	// cond: t.Alignment()%2 == 0
  8511  	// result: (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))
  8512  	for {
  8513  		if auxIntToInt64(v.AuxInt) != 4 {
  8514  			break
  8515  		}
  8516  		t := auxToType(v.Aux)
  8517  		ptr := v_0
  8518  		mem := v_1
  8519  		if !(t.Alignment()%2 == 0) {
  8520  			break
  8521  		}
  8522  		v.reset(OpRISCV64MOVHstore)
  8523  		v.AuxInt = int32ToAuxInt(2)
  8524  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8525  		v0.AuxInt = int64ToAuxInt(0)
  8526  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8527  		v1.AddArg3(ptr, v0, mem)
  8528  		v.AddArg3(ptr, v0, v1)
  8529  		return true
  8530  	}
  8531  	// match: (Zero [4] ptr mem)
  8532  	// result: (MOVBstore [3] ptr (MOVDconst [0]) (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem))))
  8533  	for {
  8534  		if auxIntToInt64(v.AuxInt) != 4 {
  8535  			break
  8536  		}
  8537  		ptr := v_0
  8538  		mem := v_1
  8539  		v.reset(OpRISCV64MOVBstore)
  8540  		v.AuxInt = int32ToAuxInt(3)
  8541  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8542  		v0.AuxInt = int64ToAuxInt(0)
  8543  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8544  		v1.AuxInt = int32ToAuxInt(2)
  8545  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8546  		v2.AuxInt = int32ToAuxInt(1)
  8547  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8548  		v3.AddArg3(ptr, v0, mem)
  8549  		v2.AddArg3(ptr, v0, v3)
  8550  		v1.AddArg3(ptr, v0, v2)
  8551  		v.AddArg3(ptr, v0, v1)
  8552  		return true
  8553  	}
  8554  	// match: (Zero [8] {t} ptr mem)
  8555  	// cond: t.Alignment()%8 == 0
  8556  	// result: (MOVDstore ptr (MOVDconst [0]) mem)
  8557  	for {
  8558  		if auxIntToInt64(v.AuxInt) != 8 {
  8559  			break
  8560  		}
  8561  		t := auxToType(v.Aux)
  8562  		ptr := v_0
  8563  		mem := v_1
  8564  		if !(t.Alignment()%8 == 0) {
  8565  			break
  8566  		}
  8567  		v.reset(OpRISCV64MOVDstore)
  8568  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8569  		v0.AuxInt = int64ToAuxInt(0)
  8570  		v.AddArg3(ptr, v0, mem)
  8571  		return true
  8572  	}
  8573  	// match: (Zero [8] {t} ptr mem)
  8574  	// cond: t.Alignment()%4 == 0
  8575  	// result: (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem))
  8576  	for {
  8577  		if auxIntToInt64(v.AuxInt) != 8 {
  8578  			break
  8579  		}
  8580  		t := auxToType(v.Aux)
  8581  		ptr := v_0
  8582  		mem := v_1
  8583  		if !(t.Alignment()%4 == 0) {
  8584  			break
  8585  		}
  8586  		v.reset(OpRISCV64MOVWstore)
  8587  		v.AuxInt = int32ToAuxInt(4)
  8588  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8589  		v0.AuxInt = int64ToAuxInt(0)
  8590  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8591  		v1.AddArg3(ptr, v0, mem)
  8592  		v.AddArg3(ptr, v0, v1)
  8593  		return true
  8594  	}
  8595  	// match: (Zero [8] {t} ptr mem)
  8596  	// cond: t.Alignment()%2 == 0
  8597  	// result: (MOVHstore [6] ptr (MOVDconst [0]) (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem))))
  8598  	for {
  8599  		if auxIntToInt64(v.AuxInt) != 8 {
  8600  			break
  8601  		}
  8602  		t := auxToType(v.Aux)
  8603  		ptr := v_0
  8604  		mem := v_1
  8605  		if !(t.Alignment()%2 == 0) {
  8606  			break
  8607  		}
  8608  		v.reset(OpRISCV64MOVHstore)
  8609  		v.AuxInt = int32ToAuxInt(6)
  8610  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8611  		v0.AuxInt = int64ToAuxInt(0)
  8612  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8613  		v1.AuxInt = int32ToAuxInt(4)
  8614  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8615  		v2.AuxInt = int32ToAuxInt(2)
  8616  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8617  		v3.AddArg3(ptr, v0, mem)
  8618  		v2.AddArg3(ptr, v0, v3)
  8619  		v1.AddArg3(ptr, v0, v2)
  8620  		v.AddArg3(ptr, v0, v1)
  8621  		return true
  8622  	}
  8623  	// match: (Zero [3] ptr mem)
  8624  	// result: (MOVBstore [2] ptr (MOVDconst [0]) (MOVBstore [1] ptr (MOVDconst [0]) (MOVBstore ptr (MOVDconst [0]) mem)))
  8625  	for {
  8626  		if auxIntToInt64(v.AuxInt) != 3 {
  8627  			break
  8628  		}
  8629  		ptr := v_0
  8630  		mem := v_1
  8631  		v.reset(OpRISCV64MOVBstore)
  8632  		v.AuxInt = int32ToAuxInt(2)
  8633  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8634  		v0.AuxInt = int64ToAuxInt(0)
  8635  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8636  		v1.AuxInt = int32ToAuxInt(1)
  8637  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVBstore, types.TypeMem)
  8638  		v2.AddArg3(ptr, v0, mem)
  8639  		v1.AddArg3(ptr, v0, v2)
  8640  		v.AddArg3(ptr, v0, v1)
  8641  		return true
  8642  	}
  8643  	// match: (Zero [6] {t} ptr mem)
  8644  	// cond: t.Alignment()%2 == 0
  8645  	// result: (MOVHstore [4] ptr (MOVDconst [0]) (MOVHstore [2] ptr (MOVDconst [0]) (MOVHstore ptr (MOVDconst [0]) mem)))
  8646  	for {
  8647  		if auxIntToInt64(v.AuxInt) != 6 {
  8648  			break
  8649  		}
  8650  		t := auxToType(v.Aux)
  8651  		ptr := v_0
  8652  		mem := v_1
  8653  		if !(t.Alignment()%2 == 0) {
  8654  			break
  8655  		}
  8656  		v.reset(OpRISCV64MOVHstore)
  8657  		v.AuxInt = int32ToAuxInt(4)
  8658  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8659  		v0.AuxInt = int64ToAuxInt(0)
  8660  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8661  		v1.AuxInt = int32ToAuxInt(2)
  8662  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVHstore, types.TypeMem)
  8663  		v2.AddArg3(ptr, v0, mem)
  8664  		v1.AddArg3(ptr, v0, v2)
  8665  		v.AddArg3(ptr, v0, v1)
  8666  		return true
  8667  	}
  8668  	// match: (Zero [12] {t} ptr mem)
  8669  	// cond: t.Alignment()%4 == 0
  8670  	// result: (MOVWstore [8] ptr (MOVDconst [0]) (MOVWstore [4] ptr (MOVDconst [0]) (MOVWstore ptr (MOVDconst [0]) mem)))
  8671  	for {
  8672  		if auxIntToInt64(v.AuxInt) != 12 {
  8673  			break
  8674  		}
  8675  		t := auxToType(v.Aux)
  8676  		ptr := v_0
  8677  		mem := v_1
  8678  		if !(t.Alignment()%4 == 0) {
  8679  			break
  8680  		}
  8681  		v.reset(OpRISCV64MOVWstore)
  8682  		v.AuxInt = int32ToAuxInt(8)
  8683  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8684  		v0.AuxInt = int64ToAuxInt(0)
  8685  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8686  		v1.AuxInt = int32ToAuxInt(4)
  8687  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVWstore, types.TypeMem)
  8688  		v2.AddArg3(ptr, v0, mem)
  8689  		v1.AddArg3(ptr, v0, v2)
  8690  		v.AddArg3(ptr, v0, v1)
  8691  		return true
  8692  	}
  8693  	// match: (Zero [16] {t} ptr mem)
  8694  	// cond: t.Alignment()%8 == 0
  8695  	// result: (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))
  8696  	for {
  8697  		if auxIntToInt64(v.AuxInt) != 16 {
  8698  			break
  8699  		}
  8700  		t := auxToType(v.Aux)
  8701  		ptr := v_0
  8702  		mem := v_1
  8703  		if !(t.Alignment()%8 == 0) {
  8704  			break
  8705  		}
  8706  		v.reset(OpRISCV64MOVDstore)
  8707  		v.AuxInt = int32ToAuxInt(8)
  8708  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8709  		v0.AuxInt = int64ToAuxInt(0)
  8710  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8711  		v1.AddArg3(ptr, v0, mem)
  8712  		v.AddArg3(ptr, v0, v1)
  8713  		return true
  8714  	}
  8715  	// match: (Zero [24] {t} ptr mem)
  8716  	// cond: t.Alignment()%8 == 0
  8717  	// result: (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem)))
  8718  	for {
  8719  		if auxIntToInt64(v.AuxInt) != 24 {
  8720  			break
  8721  		}
  8722  		t := auxToType(v.Aux)
  8723  		ptr := v_0
  8724  		mem := v_1
  8725  		if !(t.Alignment()%8 == 0) {
  8726  			break
  8727  		}
  8728  		v.reset(OpRISCV64MOVDstore)
  8729  		v.AuxInt = int32ToAuxInt(16)
  8730  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8731  		v0.AuxInt = int64ToAuxInt(0)
  8732  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8733  		v1.AuxInt = int32ToAuxInt(8)
  8734  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8735  		v2.AddArg3(ptr, v0, mem)
  8736  		v1.AddArg3(ptr, v0, v2)
  8737  		v.AddArg3(ptr, v0, v1)
  8738  		return true
  8739  	}
  8740  	// match: (Zero [32] {t} ptr mem)
  8741  	// cond: t.Alignment()%8 == 0
  8742  	// result: (MOVDstore [24] ptr (MOVDconst [0]) (MOVDstore [16] ptr (MOVDconst [0]) (MOVDstore [8] ptr (MOVDconst [0]) (MOVDstore ptr (MOVDconst [0]) mem))))
  8743  	for {
  8744  		if auxIntToInt64(v.AuxInt) != 32 {
  8745  			break
  8746  		}
  8747  		t := auxToType(v.Aux)
  8748  		ptr := v_0
  8749  		mem := v_1
  8750  		if !(t.Alignment()%8 == 0) {
  8751  			break
  8752  		}
  8753  		v.reset(OpRISCV64MOVDstore)
  8754  		v.AuxInt = int32ToAuxInt(24)
  8755  		v0 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8756  		v0.AuxInt = int64ToAuxInt(0)
  8757  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8758  		v1.AuxInt = int32ToAuxInt(16)
  8759  		v2 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8760  		v2.AuxInt = int32ToAuxInt(8)
  8761  		v3 := b.NewValue0(v.Pos, OpRISCV64MOVDstore, types.TypeMem)
  8762  		v3.AddArg3(ptr, v0, mem)
  8763  		v2.AddArg3(ptr, v0, v3)
  8764  		v1.AddArg3(ptr, v0, v2)
  8765  		v.AddArg3(ptr, v0, v1)
  8766  		return true
  8767  	}
  8768  	// match: (Zero [s] {t} ptr mem)
  8769  	// cond: s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice
  8770  	// result: (DUFFZERO [8 * (128 - s/8)] ptr mem)
  8771  	for {
  8772  		s := auxIntToInt64(v.AuxInt)
  8773  		t := auxToType(v.Aux)
  8774  		ptr := v_0
  8775  		mem := v_1
  8776  		if !(s%8 == 0 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
  8777  			break
  8778  		}
  8779  		v.reset(OpRISCV64DUFFZERO)
  8780  		v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
  8781  		v.AddArg2(ptr, mem)
  8782  		return true
  8783  	}
  8784  	// match: (Zero [s] {t} ptr mem)
  8785  	// result: (LoweredZero [t.Alignment()] ptr (ADD <ptr.Type> ptr (MOVDconst [s-moveSize(t.Alignment(), config)])) mem)
  8786  	for {
  8787  		s := auxIntToInt64(v.AuxInt)
  8788  		t := auxToType(v.Aux)
  8789  		ptr := v_0
  8790  		mem := v_1
  8791  		v.reset(OpRISCV64LoweredZero)
  8792  		v.AuxInt = int64ToAuxInt(t.Alignment())
  8793  		v0 := b.NewValue0(v.Pos, OpRISCV64ADD, ptr.Type)
  8794  		v1 := b.NewValue0(v.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8795  		v1.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
  8796  		v0.AddArg2(ptr, v1)
  8797  		v.AddArg3(ptr, v0, mem)
  8798  		return true
  8799  	}
  8800  }
  8801  func rewriteBlockRISCV64(b *Block) bool {
  8802  	typ := &b.Func.Config.Types
  8803  	switch b.Kind {
  8804  	case BlockRISCV64BEQ:
  8805  		// match: (BEQ (MOVDconst [0]) cond yes no)
  8806  		// result: (BEQZ cond yes no)
  8807  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  8808  			v_0 := b.Controls[0]
  8809  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8810  				break
  8811  			}
  8812  			cond := b.Controls[1]
  8813  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  8814  			return true
  8815  		}
  8816  		// match: (BEQ cond (MOVDconst [0]) yes no)
  8817  		// result: (BEQZ cond yes no)
  8818  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  8819  			cond := b.Controls[0]
  8820  			v_1 := b.Controls[1]
  8821  			if auxIntToInt64(v_1.AuxInt) != 0 {
  8822  				break
  8823  			}
  8824  			b.resetWithControl(BlockRISCV64BEQZ, cond)
  8825  			return true
  8826  		}
  8827  	case BlockRISCV64BEQZ:
  8828  		// match: (BEQZ (SEQZ x) yes no)
  8829  		// result: (BNEZ x yes no)
  8830  		for b.Controls[0].Op == OpRISCV64SEQZ {
  8831  			v_0 := b.Controls[0]
  8832  			x := v_0.Args[0]
  8833  			b.resetWithControl(BlockRISCV64BNEZ, x)
  8834  			return true
  8835  		}
  8836  		// match: (BEQZ (SNEZ x) yes no)
  8837  		// result: (BEQZ x yes no)
  8838  		for b.Controls[0].Op == OpRISCV64SNEZ {
  8839  			v_0 := b.Controls[0]
  8840  			x := v_0.Args[0]
  8841  			b.resetWithControl(BlockRISCV64BEQZ, x)
  8842  			return true
  8843  		}
  8844  		// match: (BEQZ (NEG x) yes no)
  8845  		// result: (BEQZ x yes no)
  8846  		for b.Controls[0].Op == OpRISCV64NEG {
  8847  			v_0 := b.Controls[0]
  8848  			x := v_0.Args[0]
  8849  			b.resetWithControl(BlockRISCV64BEQZ, x)
  8850  			return true
  8851  		}
  8852  		// match: (BEQZ (FNES <t> x y) yes no)
  8853  		// result: (BNEZ (FEQS <t> x y) yes no)
  8854  		for b.Controls[0].Op == OpRISCV64FNES {
  8855  			v_0 := b.Controls[0]
  8856  			t := v_0.Type
  8857  			_ = v_0.Args[1]
  8858  			v_0_0 := v_0.Args[0]
  8859  			v_0_1 := v_0.Args[1]
  8860  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8861  				x := v_0_0
  8862  				y := v_0_1
  8863  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  8864  				v0.AddArg2(x, y)
  8865  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  8866  				return true
  8867  			}
  8868  		}
  8869  		// match: (BEQZ (FNED <t> x y) yes no)
  8870  		// result: (BNEZ (FEQD <t> x y) yes no)
  8871  		for b.Controls[0].Op == OpRISCV64FNED {
  8872  			v_0 := b.Controls[0]
  8873  			t := v_0.Type
  8874  			_ = v_0.Args[1]
  8875  			v_0_0 := v_0.Args[0]
  8876  			v_0_1 := v_0.Args[1]
  8877  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  8878  				x := v_0_0
  8879  				y := v_0_1
  8880  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  8881  				v0.AddArg2(x, y)
  8882  				b.resetWithControl(BlockRISCV64BNEZ, v0)
  8883  				return true
  8884  			}
  8885  		}
  8886  		// match: (BEQZ (SUB x y) yes no)
  8887  		// result: (BEQ x y yes no)
  8888  		for b.Controls[0].Op == OpRISCV64SUB {
  8889  			v_0 := b.Controls[0]
  8890  			y := v_0.Args[1]
  8891  			x := v_0.Args[0]
  8892  			b.resetWithControl2(BlockRISCV64BEQ, x, y)
  8893  			return true
  8894  		}
  8895  		// match: (BEQZ (SLT x y) yes no)
  8896  		// result: (BGE x y yes no)
  8897  		for b.Controls[0].Op == OpRISCV64SLT {
  8898  			v_0 := b.Controls[0]
  8899  			y := v_0.Args[1]
  8900  			x := v_0.Args[0]
  8901  			b.resetWithControl2(BlockRISCV64BGE, x, y)
  8902  			return true
  8903  		}
  8904  		// match: (BEQZ (SLTU x y) yes no)
  8905  		// result: (BGEU x y yes no)
  8906  		for b.Controls[0].Op == OpRISCV64SLTU {
  8907  			v_0 := b.Controls[0]
  8908  			y := v_0.Args[1]
  8909  			x := v_0.Args[0]
  8910  			b.resetWithControl2(BlockRISCV64BGEU, x, y)
  8911  			return true
  8912  		}
  8913  		// match: (BEQZ (SLTI [x] y) yes no)
  8914  		// result: (BGE y (MOVDconst [x]) yes no)
  8915  		for b.Controls[0].Op == OpRISCV64SLTI {
  8916  			v_0 := b.Controls[0]
  8917  			x := auxIntToInt64(v_0.AuxInt)
  8918  			y := v_0.Args[0]
  8919  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8920  			v0.AuxInt = int64ToAuxInt(x)
  8921  			b.resetWithControl2(BlockRISCV64BGE, y, v0)
  8922  			return true
  8923  		}
  8924  		// match: (BEQZ (SLTIU [x] y) yes no)
  8925  		// result: (BGEU y (MOVDconst [x]) yes no)
  8926  		for b.Controls[0].Op == OpRISCV64SLTIU {
  8927  			v_0 := b.Controls[0]
  8928  			x := auxIntToInt64(v_0.AuxInt)
  8929  			y := v_0.Args[0]
  8930  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  8931  			v0.AuxInt = int64ToAuxInt(x)
  8932  			b.resetWithControl2(BlockRISCV64BGEU, y, v0)
  8933  			return true
  8934  		}
  8935  	case BlockRISCV64BGE:
  8936  		// match: (BGE (MOVDconst [0]) cond yes no)
  8937  		// result: (BLEZ cond yes no)
  8938  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  8939  			v_0 := b.Controls[0]
  8940  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8941  				break
  8942  			}
  8943  			cond := b.Controls[1]
  8944  			b.resetWithControl(BlockRISCV64BLEZ, cond)
  8945  			return true
  8946  		}
  8947  		// match: (BGE cond (MOVDconst [0]) yes no)
  8948  		// result: (BGEZ cond yes no)
  8949  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  8950  			cond := b.Controls[0]
  8951  			v_1 := b.Controls[1]
  8952  			if auxIntToInt64(v_1.AuxInt) != 0 {
  8953  				break
  8954  			}
  8955  			b.resetWithControl(BlockRISCV64BGEZ, cond)
  8956  			return true
  8957  		}
  8958  	case BlockRISCV64BLT:
  8959  		// match: (BLT (MOVDconst [0]) cond yes no)
  8960  		// result: (BGTZ cond yes no)
  8961  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  8962  			v_0 := b.Controls[0]
  8963  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8964  				break
  8965  			}
  8966  			cond := b.Controls[1]
  8967  			b.resetWithControl(BlockRISCV64BGTZ, cond)
  8968  			return true
  8969  		}
  8970  		// match: (BLT cond (MOVDconst [0]) yes no)
  8971  		// result: (BLTZ cond yes no)
  8972  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  8973  			cond := b.Controls[0]
  8974  			v_1 := b.Controls[1]
  8975  			if auxIntToInt64(v_1.AuxInt) != 0 {
  8976  				break
  8977  			}
  8978  			b.resetWithControl(BlockRISCV64BLTZ, cond)
  8979  			return true
  8980  		}
  8981  	case BlockRISCV64BNE:
  8982  		// match: (BNE (MOVDconst [0]) cond yes no)
  8983  		// result: (BNEZ cond yes no)
  8984  		for b.Controls[0].Op == OpRISCV64MOVDconst {
  8985  			v_0 := b.Controls[0]
  8986  			if auxIntToInt64(v_0.AuxInt) != 0 {
  8987  				break
  8988  			}
  8989  			cond := b.Controls[1]
  8990  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  8991  			return true
  8992  		}
  8993  		// match: (BNE cond (MOVDconst [0]) yes no)
  8994  		// result: (BNEZ cond yes no)
  8995  		for b.Controls[1].Op == OpRISCV64MOVDconst {
  8996  			cond := b.Controls[0]
  8997  			v_1 := b.Controls[1]
  8998  			if auxIntToInt64(v_1.AuxInt) != 0 {
  8999  				break
  9000  			}
  9001  			b.resetWithControl(BlockRISCV64BNEZ, cond)
  9002  			return true
  9003  		}
  9004  	case BlockRISCV64BNEZ:
  9005  		// match: (BNEZ (SEQZ x) yes no)
  9006  		// result: (BEQZ x yes no)
  9007  		for b.Controls[0].Op == OpRISCV64SEQZ {
  9008  			v_0 := b.Controls[0]
  9009  			x := v_0.Args[0]
  9010  			b.resetWithControl(BlockRISCV64BEQZ, x)
  9011  			return true
  9012  		}
  9013  		// match: (BNEZ (SNEZ x) yes no)
  9014  		// result: (BNEZ x yes no)
  9015  		for b.Controls[0].Op == OpRISCV64SNEZ {
  9016  			v_0 := b.Controls[0]
  9017  			x := v_0.Args[0]
  9018  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9019  			return true
  9020  		}
  9021  		// match: (BNEZ (NEG x) yes no)
  9022  		// result: (BNEZ x yes no)
  9023  		for b.Controls[0].Op == OpRISCV64NEG {
  9024  			v_0 := b.Controls[0]
  9025  			x := v_0.Args[0]
  9026  			b.resetWithControl(BlockRISCV64BNEZ, x)
  9027  			return true
  9028  		}
  9029  		// match: (BNEZ (FNES <t> x y) yes no)
  9030  		// result: (BEQZ (FEQS <t> x y) yes no)
  9031  		for b.Controls[0].Op == OpRISCV64FNES {
  9032  			v_0 := b.Controls[0]
  9033  			t := v_0.Type
  9034  			_ = v_0.Args[1]
  9035  			v_0_0 := v_0.Args[0]
  9036  			v_0_1 := v_0.Args[1]
  9037  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9038  				x := v_0_0
  9039  				y := v_0_1
  9040  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQS, t)
  9041  				v0.AddArg2(x, y)
  9042  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9043  				return true
  9044  			}
  9045  		}
  9046  		// match: (BNEZ (FNED <t> x y) yes no)
  9047  		// result: (BEQZ (FEQD <t> x y) yes no)
  9048  		for b.Controls[0].Op == OpRISCV64FNED {
  9049  			v_0 := b.Controls[0]
  9050  			t := v_0.Type
  9051  			_ = v_0.Args[1]
  9052  			v_0_0 := v_0.Args[0]
  9053  			v_0_1 := v_0.Args[1]
  9054  			for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
  9055  				x := v_0_0
  9056  				y := v_0_1
  9057  				v0 := b.NewValue0(v_0.Pos, OpRISCV64FEQD, t)
  9058  				v0.AddArg2(x, y)
  9059  				b.resetWithControl(BlockRISCV64BEQZ, v0)
  9060  				return true
  9061  			}
  9062  		}
  9063  		// match: (BNEZ (SUB x y) yes no)
  9064  		// result: (BNE x y yes no)
  9065  		for b.Controls[0].Op == OpRISCV64SUB {
  9066  			v_0 := b.Controls[0]
  9067  			y := v_0.Args[1]
  9068  			x := v_0.Args[0]
  9069  			b.resetWithControl2(BlockRISCV64BNE, x, y)
  9070  			return true
  9071  		}
  9072  		// match: (BNEZ (SLT x y) yes no)
  9073  		// result: (BLT x y yes no)
  9074  		for b.Controls[0].Op == OpRISCV64SLT {
  9075  			v_0 := b.Controls[0]
  9076  			y := v_0.Args[1]
  9077  			x := v_0.Args[0]
  9078  			b.resetWithControl2(BlockRISCV64BLT, x, y)
  9079  			return true
  9080  		}
  9081  		// match: (BNEZ (SLTU x y) yes no)
  9082  		// result: (BLTU x y yes no)
  9083  		for b.Controls[0].Op == OpRISCV64SLTU {
  9084  			v_0 := b.Controls[0]
  9085  			y := v_0.Args[1]
  9086  			x := v_0.Args[0]
  9087  			b.resetWithControl2(BlockRISCV64BLTU, x, y)
  9088  			return true
  9089  		}
  9090  		// match: (BNEZ (SLTI [x] y) yes no)
  9091  		// result: (BLT y (MOVDconst [x]) yes no)
  9092  		for b.Controls[0].Op == OpRISCV64SLTI {
  9093  			v_0 := b.Controls[0]
  9094  			x := auxIntToInt64(v_0.AuxInt)
  9095  			y := v_0.Args[0]
  9096  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9097  			v0.AuxInt = int64ToAuxInt(x)
  9098  			b.resetWithControl2(BlockRISCV64BLT, y, v0)
  9099  			return true
  9100  		}
  9101  		// match: (BNEZ (SLTIU [x] y) yes no)
  9102  		// result: (BLTU y (MOVDconst [x]) yes no)
  9103  		for b.Controls[0].Op == OpRISCV64SLTIU {
  9104  			v_0 := b.Controls[0]
  9105  			x := auxIntToInt64(v_0.AuxInt)
  9106  			y := v_0.Args[0]
  9107  			v0 := b.NewValue0(b.Pos, OpRISCV64MOVDconst, typ.UInt64)
  9108  			v0.AuxInt = int64ToAuxInt(x)
  9109  			b.resetWithControl2(BlockRISCV64BLTU, y, v0)
  9110  			return true
  9111  		}
  9112  	case BlockIf:
  9113  		// match: (If cond yes no)
  9114  		// result: (BNEZ (MOVBUreg <typ.UInt64> cond) yes no)
  9115  		for {
  9116  			cond := b.Controls[0]
  9117  			v0 := b.NewValue0(cond.Pos, OpRISCV64MOVBUreg, typ.UInt64)
  9118  			v0.AddArg(cond)
  9119  			b.resetWithControl(BlockRISCV64BNEZ, v0)
  9120  			return true
  9121  		}
  9122  	}
  9123  	return false
  9124  }
  9125  

View as plain text