Source file src/cmd/compile/internal/ssa/rewritegeneric.go

Documentation: cmd/compile/internal/ssa

     1  // Code generated from gen/generic.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "fmt"
     7  import "math"
     8  import "cmd/internal/obj"
     9  import "cmd/internal/objabi"
    10  import "cmd/compile/internal/types"
    11  
    12  var _ = fmt.Println   // in case not otherwise used
    13  var _ = math.MinInt8  // in case not otherwise used
    14  var _ = obj.ANOP      // in case not otherwise used
    15  var _ = objabi.GOROOT // in case not otherwise used
    16  var _ = types.TypeMem // in case not otherwise used
    17  
    18  func rewriteValuegeneric(v *Value) bool {
    19  	switch v.Op {
    20  	case OpAdd16:
    21  		return rewriteValuegeneric_OpAdd16_0(v) || rewriteValuegeneric_OpAdd16_10(v) || rewriteValuegeneric_OpAdd16_20(v) || rewriteValuegeneric_OpAdd16_30(v)
    22  	case OpAdd32:
    23  		return rewriteValuegeneric_OpAdd32_0(v) || rewriteValuegeneric_OpAdd32_10(v) || rewriteValuegeneric_OpAdd32_20(v) || rewriteValuegeneric_OpAdd32_30(v)
    24  	case OpAdd32F:
    25  		return rewriteValuegeneric_OpAdd32F_0(v)
    26  	case OpAdd64:
    27  		return rewriteValuegeneric_OpAdd64_0(v) || rewriteValuegeneric_OpAdd64_10(v) || rewriteValuegeneric_OpAdd64_20(v) || rewriteValuegeneric_OpAdd64_30(v)
    28  	case OpAdd64F:
    29  		return rewriteValuegeneric_OpAdd64F_0(v)
    30  	case OpAdd8:
    31  		return rewriteValuegeneric_OpAdd8_0(v) || rewriteValuegeneric_OpAdd8_10(v) || rewriteValuegeneric_OpAdd8_20(v) || rewriteValuegeneric_OpAdd8_30(v)
    32  	case OpAddPtr:
    33  		return rewriteValuegeneric_OpAddPtr_0(v)
    34  	case OpAnd16:
    35  		return rewriteValuegeneric_OpAnd16_0(v) || rewriteValuegeneric_OpAnd16_10(v) || rewriteValuegeneric_OpAnd16_20(v)
    36  	case OpAnd32:
    37  		return rewriteValuegeneric_OpAnd32_0(v) || rewriteValuegeneric_OpAnd32_10(v) || rewriteValuegeneric_OpAnd32_20(v)
    38  	case OpAnd64:
    39  		return rewriteValuegeneric_OpAnd64_0(v) || rewriteValuegeneric_OpAnd64_10(v) || rewriteValuegeneric_OpAnd64_20(v)
    40  	case OpAnd8:
    41  		return rewriteValuegeneric_OpAnd8_0(v) || rewriteValuegeneric_OpAnd8_10(v) || rewriteValuegeneric_OpAnd8_20(v)
    42  	case OpArraySelect:
    43  		return rewriteValuegeneric_OpArraySelect_0(v)
    44  	case OpCom16:
    45  		return rewriteValuegeneric_OpCom16_0(v)
    46  	case OpCom32:
    47  		return rewriteValuegeneric_OpCom32_0(v)
    48  	case OpCom64:
    49  		return rewriteValuegeneric_OpCom64_0(v)
    50  	case OpCom8:
    51  		return rewriteValuegeneric_OpCom8_0(v)
    52  	case OpConstInterface:
    53  		return rewriteValuegeneric_OpConstInterface_0(v)
    54  	case OpConstSlice:
    55  		return rewriteValuegeneric_OpConstSlice_0(v)
    56  	case OpConstString:
    57  		return rewriteValuegeneric_OpConstString_0(v)
    58  	case OpConvert:
    59  		return rewriteValuegeneric_OpConvert_0(v)
    60  	case OpCvt32Fto32:
    61  		return rewriteValuegeneric_OpCvt32Fto32_0(v)
    62  	case OpCvt32Fto64:
    63  		return rewriteValuegeneric_OpCvt32Fto64_0(v)
    64  	case OpCvt32Fto64F:
    65  		return rewriteValuegeneric_OpCvt32Fto64F_0(v)
    66  	case OpCvt32to32F:
    67  		return rewriteValuegeneric_OpCvt32to32F_0(v)
    68  	case OpCvt32to64F:
    69  		return rewriteValuegeneric_OpCvt32to64F_0(v)
    70  	case OpCvt64Fto32:
    71  		return rewriteValuegeneric_OpCvt64Fto32_0(v)
    72  	case OpCvt64Fto32F:
    73  		return rewriteValuegeneric_OpCvt64Fto32F_0(v)
    74  	case OpCvt64Fto64:
    75  		return rewriteValuegeneric_OpCvt64Fto64_0(v)
    76  	case OpCvt64to32F:
    77  		return rewriteValuegeneric_OpCvt64to32F_0(v)
    78  	case OpCvt64to64F:
    79  		return rewriteValuegeneric_OpCvt64to64F_0(v)
    80  	case OpDiv16:
    81  		return rewriteValuegeneric_OpDiv16_0(v)
    82  	case OpDiv16u:
    83  		return rewriteValuegeneric_OpDiv16u_0(v)
    84  	case OpDiv32:
    85  		return rewriteValuegeneric_OpDiv32_0(v)
    86  	case OpDiv32F:
    87  		return rewriteValuegeneric_OpDiv32F_0(v)
    88  	case OpDiv32u:
    89  		return rewriteValuegeneric_OpDiv32u_0(v)
    90  	case OpDiv64:
    91  		return rewriteValuegeneric_OpDiv64_0(v)
    92  	case OpDiv64F:
    93  		return rewriteValuegeneric_OpDiv64F_0(v)
    94  	case OpDiv64u:
    95  		return rewriteValuegeneric_OpDiv64u_0(v)
    96  	case OpDiv8:
    97  		return rewriteValuegeneric_OpDiv8_0(v)
    98  	case OpDiv8u:
    99  		return rewriteValuegeneric_OpDiv8u_0(v)
   100  	case OpEq16:
   101  		return rewriteValuegeneric_OpEq16_0(v) || rewriteValuegeneric_OpEq16_10(v) || rewriteValuegeneric_OpEq16_20(v) || rewriteValuegeneric_OpEq16_30(v) || rewriteValuegeneric_OpEq16_40(v) || rewriteValuegeneric_OpEq16_50(v)
   102  	case OpEq32:
   103  		return rewriteValuegeneric_OpEq32_0(v) || rewriteValuegeneric_OpEq32_10(v) || rewriteValuegeneric_OpEq32_20(v) || rewriteValuegeneric_OpEq32_30(v) || rewriteValuegeneric_OpEq32_40(v) || rewriteValuegeneric_OpEq32_50(v) || rewriteValuegeneric_OpEq32_60(v) || rewriteValuegeneric_OpEq32_70(v) || rewriteValuegeneric_OpEq32_80(v) || rewriteValuegeneric_OpEq32_90(v)
   104  	case OpEq32F:
   105  		return rewriteValuegeneric_OpEq32F_0(v)
   106  	case OpEq64:
   107  		return rewriteValuegeneric_OpEq64_0(v) || rewriteValuegeneric_OpEq64_10(v) || rewriteValuegeneric_OpEq64_20(v) || rewriteValuegeneric_OpEq64_30(v) || rewriteValuegeneric_OpEq64_40(v) || rewriteValuegeneric_OpEq64_50(v) || rewriteValuegeneric_OpEq64_60(v)
   108  	case OpEq64F:
   109  		return rewriteValuegeneric_OpEq64F_0(v)
   110  	case OpEq8:
   111  		return rewriteValuegeneric_OpEq8_0(v) || rewriteValuegeneric_OpEq8_10(v) || rewriteValuegeneric_OpEq8_20(v) || rewriteValuegeneric_OpEq8_30(v)
   112  	case OpEqB:
   113  		return rewriteValuegeneric_OpEqB_0(v)
   114  	case OpEqInter:
   115  		return rewriteValuegeneric_OpEqInter_0(v)
   116  	case OpEqPtr:
   117  		return rewriteValuegeneric_OpEqPtr_0(v) || rewriteValuegeneric_OpEqPtr_10(v) || rewriteValuegeneric_OpEqPtr_20(v)
   118  	case OpEqSlice:
   119  		return rewriteValuegeneric_OpEqSlice_0(v)
   120  	case OpGeq16:
   121  		return rewriteValuegeneric_OpGeq16_0(v)
   122  	case OpGeq16U:
   123  		return rewriteValuegeneric_OpGeq16U_0(v)
   124  	case OpGeq32:
   125  		return rewriteValuegeneric_OpGeq32_0(v)
   126  	case OpGeq32F:
   127  		return rewriteValuegeneric_OpGeq32F_0(v)
   128  	case OpGeq32U:
   129  		return rewriteValuegeneric_OpGeq32U_0(v)
   130  	case OpGeq64:
   131  		return rewriteValuegeneric_OpGeq64_0(v)
   132  	case OpGeq64F:
   133  		return rewriteValuegeneric_OpGeq64F_0(v)
   134  	case OpGeq64U:
   135  		return rewriteValuegeneric_OpGeq64U_0(v)
   136  	case OpGeq8:
   137  		return rewriteValuegeneric_OpGeq8_0(v)
   138  	case OpGeq8U:
   139  		return rewriteValuegeneric_OpGeq8U_0(v)
   140  	case OpGreater16:
   141  		return rewriteValuegeneric_OpGreater16_0(v)
   142  	case OpGreater16U:
   143  		return rewriteValuegeneric_OpGreater16U_0(v)
   144  	case OpGreater32:
   145  		return rewriteValuegeneric_OpGreater32_0(v)
   146  	case OpGreater32F:
   147  		return rewriteValuegeneric_OpGreater32F_0(v)
   148  	case OpGreater32U:
   149  		return rewriteValuegeneric_OpGreater32U_0(v)
   150  	case OpGreater64:
   151  		return rewriteValuegeneric_OpGreater64_0(v)
   152  	case OpGreater64F:
   153  		return rewriteValuegeneric_OpGreater64F_0(v)
   154  	case OpGreater64U:
   155  		return rewriteValuegeneric_OpGreater64U_0(v)
   156  	case OpGreater8:
   157  		return rewriteValuegeneric_OpGreater8_0(v)
   158  	case OpGreater8U:
   159  		return rewriteValuegeneric_OpGreater8U_0(v)
   160  	case OpIMake:
   161  		return rewriteValuegeneric_OpIMake_0(v)
   162  	case OpInterCall:
   163  		return rewriteValuegeneric_OpInterCall_0(v)
   164  	case OpIsInBounds:
   165  		return rewriteValuegeneric_OpIsInBounds_0(v) || rewriteValuegeneric_OpIsInBounds_10(v) || rewriteValuegeneric_OpIsInBounds_20(v) || rewriteValuegeneric_OpIsInBounds_30(v)
   166  	case OpIsNonNil:
   167  		return rewriteValuegeneric_OpIsNonNil_0(v)
   168  	case OpIsSliceInBounds:
   169  		return rewriteValuegeneric_OpIsSliceInBounds_0(v)
   170  	case OpLeq16:
   171  		return rewriteValuegeneric_OpLeq16_0(v)
   172  	case OpLeq16U:
   173  		return rewriteValuegeneric_OpLeq16U_0(v)
   174  	case OpLeq32:
   175  		return rewriteValuegeneric_OpLeq32_0(v)
   176  	case OpLeq32F:
   177  		return rewriteValuegeneric_OpLeq32F_0(v)
   178  	case OpLeq32U:
   179  		return rewriteValuegeneric_OpLeq32U_0(v)
   180  	case OpLeq64:
   181  		return rewriteValuegeneric_OpLeq64_0(v)
   182  	case OpLeq64F:
   183  		return rewriteValuegeneric_OpLeq64F_0(v)
   184  	case OpLeq64U:
   185  		return rewriteValuegeneric_OpLeq64U_0(v)
   186  	case OpLeq8:
   187  		return rewriteValuegeneric_OpLeq8_0(v)
   188  	case OpLeq8U:
   189  		return rewriteValuegeneric_OpLeq8U_0(v)
   190  	case OpLess16:
   191  		return rewriteValuegeneric_OpLess16_0(v)
   192  	case OpLess16U:
   193  		return rewriteValuegeneric_OpLess16U_0(v)
   194  	case OpLess32:
   195  		return rewriteValuegeneric_OpLess32_0(v)
   196  	case OpLess32F:
   197  		return rewriteValuegeneric_OpLess32F_0(v)
   198  	case OpLess32U:
   199  		return rewriteValuegeneric_OpLess32U_0(v)
   200  	case OpLess64:
   201  		return rewriteValuegeneric_OpLess64_0(v)
   202  	case OpLess64F:
   203  		return rewriteValuegeneric_OpLess64F_0(v)
   204  	case OpLess64U:
   205  		return rewriteValuegeneric_OpLess64U_0(v)
   206  	case OpLess8:
   207  		return rewriteValuegeneric_OpLess8_0(v)
   208  	case OpLess8U:
   209  		return rewriteValuegeneric_OpLess8U_0(v)
   210  	case OpLoad:
   211  		return rewriteValuegeneric_OpLoad_0(v) || rewriteValuegeneric_OpLoad_10(v) || rewriteValuegeneric_OpLoad_20(v)
   212  	case OpLsh16x16:
   213  		return rewriteValuegeneric_OpLsh16x16_0(v)
   214  	case OpLsh16x32:
   215  		return rewriteValuegeneric_OpLsh16x32_0(v)
   216  	case OpLsh16x64:
   217  		return rewriteValuegeneric_OpLsh16x64_0(v)
   218  	case OpLsh16x8:
   219  		return rewriteValuegeneric_OpLsh16x8_0(v)
   220  	case OpLsh32x16:
   221  		return rewriteValuegeneric_OpLsh32x16_0(v)
   222  	case OpLsh32x32:
   223  		return rewriteValuegeneric_OpLsh32x32_0(v)
   224  	case OpLsh32x64:
   225  		return rewriteValuegeneric_OpLsh32x64_0(v)
   226  	case OpLsh32x8:
   227  		return rewriteValuegeneric_OpLsh32x8_0(v)
   228  	case OpLsh64x16:
   229  		return rewriteValuegeneric_OpLsh64x16_0(v)
   230  	case OpLsh64x32:
   231  		return rewriteValuegeneric_OpLsh64x32_0(v)
   232  	case OpLsh64x64:
   233  		return rewriteValuegeneric_OpLsh64x64_0(v)
   234  	case OpLsh64x8:
   235  		return rewriteValuegeneric_OpLsh64x8_0(v)
   236  	case OpLsh8x16:
   237  		return rewriteValuegeneric_OpLsh8x16_0(v)
   238  	case OpLsh8x32:
   239  		return rewriteValuegeneric_OpLsh8x32_0(v)
   240  	case OpLsh8x64:
   241  		return rewriteValuegeneric_OpLsh8x64_0(v)
   242  	case OpLsh8x8:
   243  		return rewriteValuegeneric_OpLsh8x8_0(v)
   244  	case OpMod16:
   245  		return rewriteValuegeneric_OpMod16_0(v)
   246  	case OpMod16u:
   247  		return rewriteValuegeneric_OpMod16u_0(v)
   248  	case OpMod32:
   249  		return rewriteValuegeneric_OpMod32_0(v)
   250  	case OpMod32u:
   251  		return rewriteValuegeneric_OpMod32u_0(v)
   252  	case OpMod64:
   253  		return rewriteValuegeneric_OpMod64_0(v)
   254  	case OpMod64u:
   255  		return rewriteValuegeneric_OpMod64u_0(v)
   256  	case OpMod8:
   257  		return rewriteValuegeneric_OpMod8_0(v)
   258  	case OpMod8u:
   259  		return rewriteValuegeneric_OpMod8u_0(v)
   260  	case OpMove:
   261  		return rewriteValuegeneric_OpMove_0(v) || rewriteValuegeneric_OpMove_10(v) || rewriteValuegeneric_OpMove_20(v)
   262  	case OpMul16:
   263  		return rewriteValuegeneric_OpMul16_0(v) || rewriteValuegeneric_OpMul16_10(v)
   264  	case OpMul32:
   265  		return rewriteValuegeneric_OpMul32_0(v) || rewriteValuegeneric_OpMul32_10(v)
   266  	case OpMul32F:
   267  		return rewriteValuegeneric_OpMul32F_0(v)
   268  	case OpMul64:
   269  		return rewriteValuegeneric_OpMul64_0(v) || rewriteValuegeneric_OpMul64_10(v)
   270  	case OpMul64F:
   271  		return rewriteValuegeneric_OpMul64F_0(v)
   272  	case OpMul8:
   273  		return rewriteValuegeneric_OpMul8_0(v) || rewriteValuegeneric_OpMul8_10(v)
   274  	case OpNeg16:
   275  		return rewriteValuegeneric_OpNeg16_0(v)
   276  	case OpNeg32:
   277  		return rewriteValuegeneric_OpNeg32_0(v)
   278  	case OpNeg32F:
   279  		return rewriteValuegeneric_OpNeg32F_0(v)
   280  	case OpNeg64:
   281  		return rewriteValuegeneric_OpNeg64_0(v)
   282  	case OpNeg64F:
   283  		return rewriteValuegeneric_OpNeg64F_0(v)
   284  	case OpNeg8:
   285  		return rewriteValuegeneric_OpNeg8_0(v)
   286  	case OpNeq16:
   287  		return rewriteValuegeneric_OpNeq16_0(v)
   288  	case OpNeq32:
   289  		return rewriteValuegeneric_OpNeq32_0(v)
   290  	case OpNeq32F:
   291  		return rewriteValuegeneric_OpNeq32F_0(v)
   292  	case OpNeq64:
   293  		return rewriteValuegeneric_OpNeq64_0(v)
   294  	case OpNeq64F:
   295  		return rewriteValuegeneric_OpNeq64F_0(v)
   296  	case OpNeq8:
   297  		return rewriteValuegeneric_OpNeq8_0(v)
   298  	case OpNeqB:
   299  		return rewriteValuegeneric_OpNeqB_0(v)
   300  	case OpNeqInter:
   301  		return rewriteValuegeneric_OpNeqInter_0(v)
   302  	case OpNeqPtr:
   303  		return rewriteValuegeneric_OpNeqPtr_0(v) || rewriteValuegeneric_OpNeqPtr_10(v) || rewriteValuegeneric_OpNeqPtr_20(v)
   304  	case OpNeqSlice:
   305  		return rewriteValuegeneric_OpNeqSlice_0(v)
   306  	case OpNilCheck:
   307  		return rewriteValuegeneric_OpNilCheck_0(v)
   308  	case OpNot:
   309  		return rewriteValuegeneric_OpNot_0(v) || rewriteValuegeneric_OpNot_10(v) || rewriteValuegeneric_OpNot_20(v) || rewriteValuegeneric_OpNot_30(v) || rewriteValuegeneric_OpNot_40(v)
   310  	case OpOffPtr:
   311  		return rewriteValuegeneric_OpOffPtr_0(v)
   312  	case OpOr16:
   313  		return rewriteValuegeneric_OpOr16_0(v) || rewriteValuegeneric_OpOr16_10(v) || rewriteValuegeneric_OpOr16_20(v)
   314  	case OpOr32:
   315  		return rewriteValuegeneric_OpOr32_0(v) || rewriteValuegeneric_OpOr32_10(v) || rewriteValuegeneric_OpOr32_20(v)
   316  	case OpOr64:
   317  		return rewriteValuegeneric_OpOr64_0(v) || rewriteValuegeneric_OpOr64_10(v) || rewriteValuegeneric_OpOr64_20(v)
   318  	case OpOr8:
   319  		return rewriteValuegeneric_OpOr8_0(v) || rewriteValuegeneric_OpOr8_10(v) || rewriteValuegeneric_OpOr8_20(v)
   320  	case OpPhi:
   321  		return rewriteValuegeneric_OpPhi_0(v)
   322  	case OpPtrIndex:
   323  		return rewriteValuegeneric_OpPtrIndex_0(v)
   324  	case OpRotateLeft16:
   325  		return rewriteValuegeneric_OpRotateLeft16_0(v)
   326  	case OpRotateLeft32:
   327  		return rewriteValuegeneric_OpRotateLeft32_0(v)
   328  	case OpRotateLeft64:
   329  		return rewriteValuegeneric_OpRotateLeft64_0(v)
   330  	case OpRotateLeft8:
   331  		return rewriteValuegeneric_OpRotateLeft8_0(v)
   332  	case OpRound32F:
   333  		return rewriteValuegeneric_OpRound32F_0(v)
   334  	case OpRound64F:
   335  		return rewriteValuegeneric_OpRound64F_0(v)
   336  	case OpRsh16Ux16:
   337  		return rewriteValuegeneric_OpRsh16Ux16_0(v)
   338  	case OpRsh16Ux32:
   339  		return rewriteValuegeneric_OpRsh16Ux32_0(v)
   340  	case OpRsh16Ux64:
   341  		return rewriteValuegeneric_OpRsh16Ux64_0(v)
   342  	case OpRsh16Ux8:
   343  		return rewriteValuegeneric_OpRsh16Ux8_0(v)
   344  	case OpRsh16x16:
   345  		return rewriteValuegeneric_OpRsh16x16_0(v)
   346  	case OpRsh16x32:
   347  		return rewriteValuegeneric_OpRsh16x32_0(v)
   348  	case OpRsh16x64:
   349  		return rewriteValuegeneric_OpRsh16x64_0(v)
   350  	case OpRsh16x8:
   351  		return rewriteValuegeneric_OpRsh16x8_0(v)
   352  	case OpRsh32Ux16:
   353  		return rewriteValuegeneric_OpRsh32Ux16_0(v)
   354  	case OpRsh32Ux32:
   355  		return rewriteValuegeneric_OpRsh32Ux32_0(v)
   356  	case OpRsh32Ux64:
   357  		return rewriteValuegeneric_OpRsh32Ux64_0(v)
   358  	case OpRsh32Ux8:
   359  		return rewriteValuegeneric_OpRsh32Ux8_0(v)
   360  	case OpRsh32x16:
   361  		return rewriteValuegeneric_OpRsh32x16_0(v)
   362  	case OpRsh32x32:
   363  		return rewriteValuegeneric_OpRsh32x32_0(v)
   364  	case OpRsh32x64:
   365  		return rewriteValuegeneric_OpRsh32x64_0(v)
   366  	case OpRsh32x8:
   367  		return rewriteValuegeneric_OpRsh32x8_0(v)
   368  	case OpRsh64Ux16:
   369  		return rewriteValuegeneric_OpRsh64Ux16_0(v)
   370  	case OpRsh64Ux32:
   371  		return rewriteValuegeneric_OpRsh64Ux32_0(v)
   372  	case OpRsh64Ux64:
   373  		return rewriteValuegeneric_OpRsh64Ux64_0(v)
   374  	case OpRsh64Ux8:
   375  		return rewriteValuegeneric_OpRsh64Ux8_0(v)
   376  	case OpRsh64x16:
   377  		return rewriteValuegeneric_OpRsh64x16_0(v)
   378  	case OpRsh64x32:
   379  		return rewriteValuegeneric_OpRsh64x32_0(v)
   380  	case OpRsh64x64:
   381  		return rewriteValuegeneric_OpRsh64x64_0(v)
   382  	case OpRsh64x8:
   383  		return rewriteValuegeneric_OpRsh64x8_0(v)
   384  	case OpRsh8Ux16:
   385  		return rewriteValuegeneric_OpRsh8Ux16_0(v)
   386  	case OpRsh8Ux32:
   387  		return rewriteValuegeneric_OpRsh8Ux32_0(v)
   388  	case OpRsh8Ux64:
   389  		return rewriteValuegeneric_OpRsh8Ux64_0(v)
   390  	case OpRsh8Ux8:
   391  		return rewriteValuegeneric_OpRsh8Ux8_0(v)
   392  	case OpRsh8x16:
   393  		return rewriteValuegeneric_OpRsh8x16_0(v)
   394  	case OpRsh8x32:
   395  		return rewriteValuegeneric_OpRsh8x32_0(v)
   396  	case OpRsh8x64:
   397  		return rewriteValuegeneric_OpRsh8x64_0(v)
   398  	case OpRsh8x8:
   399  		return rewriteValuegeneric_OpRsh8x8_0(v)
   400  	case OpSelect0:
   401  		return rewriteValuegeneric_OpSelect0_0(v)
   402  	case OpSelect1:
   403  		return rewriteValuegeneric_OpSelect1_0(v)
   404  	case OpSignExt16to32:
   405  		return rewriteValuegeneric_OpSignExt16to32_0(v)
   406  	case OpSignExt16to64:
   407  		return rewriteValuegeneric_OpSignExt16to64_0(v)
   408  	case OpSignExt32to64:
   409  		return rewriteValuegeneric_OpSignExt32to64_0(v)
   410  	case OpSignExt8to16:
   411  		return rewriteValuegeneric_OpSignExt8to16_0(v)
   412  	case OpSignExt8to32:
   413  		return rewriteValuegeneric_OpSignExt8to32_0(v)
   414  	case OpSignExt8to64:
   415  		return rewriteValuegeneric_OpSignExt8to64_0(v)
   416  	case OpSliceCap:
   417  		return rewriteValuegeneric_OpSliceCap_0(v)
   418  	case OpSliceLen:
   419  		return rewriteValuegeneric_OpSliceLen_0(v)
   420  	case OpSlicePtr:
   421  		return rewriteValuegeneric_OpSlicePtr_0(v)
   422  	case OpSlicemask:
   423  		return rewriteValuegeneric_OpSlicemask_0(v)
   424  	case OpSqrt:
   425  		return rewriteValuegeneric_OpSqrt_0(v)
   426  	case OpStaticCall:
   427  		return rewriteValuegeneric_OpStaticCall_0(v)
   428  	case OpStore:
   429  		return rewriteValuegeneric_OpStore_0(v) || rewriteValuegeneric_OpStore_10(v) || rewriteValuegeneric_OpStore_20(v)
   430  	case OpStringLen:
   431  		return rewriteValuegeneric_OpStringLen_0(v)
   432  	case OpStringPtr:
   433  		return rewriteValuegeneric_OpStringPtr_0(v)
   434  	case OpStructSelect:
   435  		return rewriteValuegeneric_OpStructSelect_0(v) || rewriteValuegeneric_OpStructSelect_10(v)
   436  	case OpSub16:
   437  		return rewriteValuegeneric_OpSub16_0(v) || rewriteValuegeneric_OpSub16_10(v)
   438  	case OpSub32:
   439  		return rewriteValuegeneric_OpSub32_0(v) || rewriteValuegeneric_OpSub32_10(v)
   440  	case OpSub32F:
   441  		return rewriteValuegeneric_OpSub32F_0(v)
   442  	case OpSub64:
   443  		return rewriteValuegeneric_OpSub64_0(v) || rewriteValuegeneric_OpSub64_10(v)
   444  	case OpSub64F:
   445  		return rewriteValuegeneric_OpSub64F_0(v)
   446  	case OpSub8:
   447  		return rewriteValuegeneric_OpSub8_0(v) || rewriteValuegeneric_OpSub8_10(v)
   448  	case OpTrunc16to8:
   449  		return rewriteValuegeneric_OpTrunc16to8_0(v)
   450  	case OpTrunc32to16:
   451  		return rewriteValuegeneric_OpTrunc32to16_0(v)
   452  	case OpTrunc32to8:
   453  		return rewriteValuegeneric_OpTrunc32to8_0(v)
   454  	case OpTrunc64to16:
   455  		return rewriteValuegeneric_OpTrunc64to16_0(v)
   456  	case OpTrunc64to32:
   457  		return rewriteValuegeneric_OpTrunc64to32_0(v)
   458  	case OpTrunc64to8:
   459  		return rewriteValuegeneric_OpTrunc64to8_0(v)
   460  	case OpXor16:
   461  		return rewriteValuegeneric_OpXor16_0(v) || rewriteValuegeneric_OpXor16_10(v)
   462  	case OpXor32:
   463  		return rewriteValuegeneric_OpXor32_0(v) || rewriteValuegeneric_OpXor32_10(v)
   464  	case OpXor64:
   465  		return rewriteValuegeneric_OpXor64_0(v) || rewriteValuegeneric_OpXor64_10(v)
   466  	case OpXor8:
   467  		return rewriteValuegeneric_OpXor8_0(v) || rewriteValuegeneric_OpXor8_10(v)
   468  	case OpZero:
   469  		return rewriteValuegeneric_OpZero_0(v)
   470  	case OpZeroExt16to32:
   471  		return rewriteValuegeneric_OpZeroExt16to32_0(v)
   472  	case OpZeroExt16to64:
   473  		return rewriteValuegeneric_OpZeroExt16to64_0(v)
   474  	case OpZeroExt32to64:
   475  		return rewriteValuegeneric_OpZeroExt32to64_0(v)
   476  	case OpZeroExt8to16:
   477  		return rewriteValuegeneric_OpZeroExt8to16_0(v)
   478  	case OpZeroExt8to32:
   479  		return rewriteValuegeneric_OpZeroExt8to32_0(v)
   480  	case OpZeroExt8to64:
   481  		return rewriteValuegeneric_OpZeroExt8to64_0(v)
   482  	}
   483  	return false
   484  }
   485  func rewriteValuegeneric_OpAdd16_0(v *Value) bool {
   486  	b := v.Block
   487  	// match: (Add16 (Const16 [c]) (Const16 [d]))
   488  	// cond:
   489  	// result: (Const16 [int64(int16(c+d))])
   490  	for {
   491  		_ = v.Args[1]
   492  		v_0 := v.Args[0]
   493  		if v_0.Op != OpConst16 {
   494  			break
   495  		}
   496  		c := v_0.AuxInt
   497  		v_1 := v.Args[1]
   498  		if v_1.Op != OpConst16 {
   499  			break
   500  		}
   501  		d := v_1.AuxInt
   502  		v.reset(OpConst16)
   503  		v.AuxInt = int64(int16(c + d))
   504  		return true
   505  	}
   506  	// match: (Add16 (Const16 [d]) (Const16 [c]))
   507  	// cond:
   508  	// result: (Const16 [int64(int16(c+d))])
   509  	for {
   510  		_ = v.Args[1]
   511  		v_0 := v.Args[0]
   512  		if v_0.Op != OpConst16 {
   513  			break
   514  		}
   515  		d := v_0.AuxInt
   516  		v_1 := v.Args[1]
   517  		if v_1.Op != OpConst16 {
   518  			break
   519  		}
   520  		c := v_1.AuxInt
   521  		v.reset(OpConst16)
   522  		v.AuxInt = int64(int16(c + d))
   523  		return true
   524  	}
   525  	// match: (Add16 <t> (Mul16 x y) (Mul16 x z))
   526  	// cond:
   527  	// result: (Mul16 x (Add16 <t> y z))
   528  	for {
   529  		t := v.Type
   530  		_ = v.Args[1]
   531  		v_0 := v.Args[0]
   532  		if v_0.Op != OpMul16 {
   533  			break
   534  		}
   535  		y := v_0.Args[1]
   536  		x := v_0.Args[0]
   537  		v_1 := v.Args[1]
   538  		if v_1.Op != OpMul16 {
   539  			break
   540  		}
   541  		z := v_1.Args[1]
   542  		if x != v_1.Args[0] {
   543  			break
   544  		}
   545  		v.reset(OpMul16)
   546  		v.AddArg(x)
   547  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   548  		v0.AddArg(y)
   549  		v0.AddArg(z)
   550  		v.AddArg(v0)
   551  		return true
   552  	}
   553  	// match: (Add16 <t> (Mul16 y x) (Mul16 x z))
   554  	// cond:
   555  	// result: (Mul16 x (Add16 <t> y z))
   556  	for {
   557  		t := v.Type
   558  		_ = v.Args[1]
   559  		v_0 := v.Args[0]
   560  		if v_0.Op != OpMul16 {
   561  			break
   562  		}
   563  		x := v_0.Args[1]
   564  		y := v_0.Args[0]
   565  		v_1 := v.Args[1]
   566  		if v_1.Op != OpMul16 {
   567  			break
   568  		}
   569  		z := v_1.Args[1]
   570  		if x != v_1.Args[0] {
   571  			break
   572  		}
   573  		v.reset(OpMul16)
   574  		v.AddArg(x)
   575  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   576  		v0.AddArg(y)
   577  		v0.AddArg(z)
   578  		v.AddArg(v0)
   579  		return true
   580  	}
   581  	// match: (Add16 <t> (Mul16 x y) (Mul16 z x))
   582  	// cond:
   583  	// result: (Mul16 x (Add16 <t> y z))
   584  	for {
   585  		t := v.Type
   586  		_ = v.Args[1]
   587  		v_0 := v.Args[0]
   588  		if v_0.Op != OpMul16 {
   589  			break
   590  		}
   591  		y := v_0.Args[1]
   592  		x := v_0.Args[0]
   593  		v_1 := v.Args[1]
   594  		if v_1.Op != OpMul16 {
   595  			break
   596  		}
   597  		_ = v_1.Args[1]
   598  		z := v_1.Args[0]
   599  		if x != v_1.Args[1] {
   600  			break
   601  		}
   602  		v.reset(OpMul16)
   603  		v.AddArg(x)
   604  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   605  		v0.AddArg(y)
   606  		v0.AddArg(z)
   607  		v.AddArg(v0)
   608  		return true
   609  	}
   610  	// match: (Add16 <t> (Mul16 y x) (Mul16 z x))
   611  	// cond:
   612  	// result: (Mul16 x (Add16 <t> y z))
   613  	for {
   614  		t := v.Type
   615  		_ = v.Args[1]
   616  		v_0 := v.Args[0]
   617  		if v_0.Op != OpMul16 {
   618  			break
   619  		}
   620  		x := v_0.Args[1]
   621  		y := v_0.Args[0]
   622  		v_1 := v.Args[1]
   623  		if v_1.Op != OpMul16 {
   624  			break
   625  		}
   626  		_ = v_1.Args[1]
   627  		z := v_1.Args[0]
   628  		if x != v_1.Args[1] {
   629  			break
   630  		}
   631  		v.reset(OpMul16)
   632  		v.AddArg(x)
   633  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   634  		v0.AddArg(y)
   635  		v0.AddArg(z)
   636  		v.AddArg(v0)
   637  		return true
   638  	}
   639  	// match: (Add16 <t> (Mul16 x z) (Mul16 x y))
   640  	// cond:
   641  	// result: (Mul16 x (Add16 <t> y z))
   642  	for {
   643  		t := v.Type
   644  		_ = v.Args[1]
   645  		v_0 := v.Args[0]
   646  		if v_0.Op != OpMul16 {
   647  			break
   648  		}
   649  		z := v_0.Args[1]
   650  		x := v_0.Args[0]
   651  		v_1 := v.Args[1]
   652  		if v_1.Op != OpMul16 {
   653  			break
   654  		}
   655  		y := v_1.Args[1]
   656  		if x != v_1.Args[0] {
   657  			break
   658  		}
   659  		v.reset(OpMul16)
   660  		v.AddArg(x)
   661  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   662  		v0.AddArg(y)
   663  		v0.AddArg(z)
   664  		v.AddArg(v0)
   665  		return true
   666  	}
   667  	// match: (Add16 <t> (Mul16 z x) (Mul16 x y))
   668  	// cond:
   669  	// result: (Mul16 x (Add16 <t> y z))
   670  	for {
   671  		t := v.Type
   672  		_ = v.Args[1]
   673  		v_0 := v.Args[0]
   674  		if v_0.Op != OpMul16 {
   675  			break
   676  		}
   677  		x := v_0.Args[1]
   678  		z := v_0.Args[0]
   679  		v_1 := v.Args[1]
   680  		if v_1.Op != OpMul16 {
   681  			break
   682  		}
   683  		y := v_1.Args[1]
   684  		if x != v_1.Args[0] {
   685  			break
   686  		}
   687  		v.reset(OpMul16)
   688  		v.AddArg(x)
   689  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   690  		v0.AddArg(y)
   691  		v0.AddArg(z)
   692  		v.AddArg(v0)
   693  		return true
   694  	}
   695  	// match: (Add16 <t> (Mul16 x z) (Mul16 y x))
   696  	// cond:
   697  	// result: (Mul16 x (Add16 <t> y z))
   698  	for {
   699  		t := v.Type
   700  		_ = v.Args[1]
   701  		v_0 := v.Args[0]
   702  		if v_0.Op != OpMul16 {
   703  			break
   704  		}
   705  		z := v_0.Args[1]
   706  		x := v_0.Args[0]
   707  		v_1 := v.Args[1]
   708  		if v_1.Op != OpMul16 {
   709  			break
   710  		}
   711  		_ = v_1.Args[1]
   712  		y := v_1.Args[0]
   713  		if x != v_1.Args[1] {
   714  			break
   715  		}
   716  		v.reset(OpMul16)
   717  		v.AddArg(x)
   718  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   719  		v0.AddArg(y)
   720  		v0.AddArg(z)
   721  		v.AddArg(v0)
   722  		return true
   723  	}
   724  	// match: (Add16 <t> (Mul16 z x) (Mul16 y x))
   725  	// cond:
   726  	// result: (Mul16 x (Add16 <t> y z))
   727  	for {
   728  		t := v.Type
   729  		_ = v.Args[1]
   730  		v_0 := v.Args[0]
   731  		if v_0.Op != OpMul16 {
   732  			break
   733  		}
   734  		x := v_0.Args[1]
   735  		z := v_0.Args[0]
   736  		v_1 := v.Args[1]
   737  		if v_1.Op != OpMul16 {
   738  			break
   739  		}
   740  		_ = v_1.Args[1]
   741  		y := v_1.Args[0]
   742  		if x != v_1.Args[1] {
   743  			break
   744  		}
   745  		v.reset(OpMul16)
   746  		v.AddArg(x)
   747  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   748  		v0.AddArg(y)
   749  		v0.AddArg(z)
   750  		v.AddArg(v0)
   751  		return true
   752  	}
   753  	return false
   754  }
   755  func rewriteValuegeneric_OpAdd16_10(v *Value) bool {
   756  	b := v.Block
   757  	// match: (Add16 (Const16 [0]) x)
   758  	// cond:
   759  	// result: x
   760  	for {
   761  		x := v.Args[1]
   762  		v_0 := v.Args[0]
   763  		if v_0.Op != OpConst16 {
   764  			break
   765  		}
   766  		if v_0.AuxInt != 0 {
   767  			break
   768  		}
   769  		v.reset(OpCopy)
   770  		v.Type = x.Type
   771  		v.AddArg(x)
   772  		return true
   773  	}
   774  	// match: (Add16 x (Const16 [0]))
   775  	// cond:
   776  	// result: x
   777  	for {
   778  		_ = v.Args[1]
   779  		x := v.Args[0]
   780  		v_1 := v.Args[1]
   781  		if v_1.Op != OpConst16 {
   782  			break
   783  		}
   784  		if v_1.AuxInt != 0 {
   785  			break
   786  		}
   787  		v.reset(OpCopy)
   788  		v.Type = x.Type
   789  		v.AddArg(x)
   790  		return true
   791  	}
   792  	// match: (Add16 (Const16 [1]) (Com16 x))
   793  	// cond:
   794  	// result: (Neg16 x)
   795  	for {
   796  		_ = v.Args[1]
   797  		v_0 := v.Args[0]
   798  		if v_0.Op != OpConst16 {
   799  			break
   800  		}
   801  		if v_0.AuxInt != 1 {
   802  			break
   803  		}
   804  		v_1 := v.Args[1]
   805  		if v_1.Op != OpCom16 {
   806  			break
   807  		}
   808  		x := v_1.Args[0]
   809  		v.reset(OpNeg16)
   810  		v.AddArg(x)
   811  		return true
   812  	}
   813  	// match: (Add16 (Com16 x) (Const16 [1]))
   814  	// cond:
   815  	// result: (Neg16 x)
   816  	for {
   817  		_ = v.Args[1]
   818  		v_0 := v.Args[0]
   819  		if v_0.Op != OpCom16 {
   820  			break
   821  		}
   822  		x := v_0.Args[0]
   823  		v_1 := v.Args[1]
   824  		if v_1.Op != OpConst16 {
   825  			break
   826  		}
   827  		if v_1.AuxInt != 1 {
   828  			break
   829  		}
   830  		v.reset(OpNeg16)
   831  		v.AddArg(x)
   832  		return true
   833  	}
   834  	// match: (Add16 (Add16 i:(Const16 <t>) z) x)
   835  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   836  	// result: (Add16 i (Add16 <t> z x))
   837  	for {
   838  		x := v.Args[1]
   839  		v_0 := v.Args[0]
   840  		if v_0.Op != OpAdd16 {
   841  			break
   842  		}
   843  		z := v_0.Args[1]
   844  		i := v_0.Args[0]
   845  		if i.Op != OpConst16 {
   846  			break
   847  		}
   848  		t := i.Type
   849  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   850  			break
   851  		}
   852  		v.reset(OpAdd16)
   853  		v.AddArg(i)
   854  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   855  		v0.AddArg(z)
   856  		v0.AddArg(x)
   857  		v.AddArg(v0)
   858  		return true
   859  	}
   860  	// match: (Add16 (Add16 z i:(Const16 <t>)) x)
   861  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   862  	// result: (Add16 i (Add16 <t> z x))
   863  	for {
   864  		x := v.Args[1]
   865  		v_0 := v.Args[0]
   866  		if v_0.Op != OpAdd16 {
   867  			break
   868  		}
   869  		_ = v_0.Args[1]
   870  		z := v_0.Args[0]
   871  		i := v_0.Args[1]
   872  		if i.Op != OpConst16 {
   873  			break
   874  		}
   875  		t := i.Type
   876  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   877  			break
   878  		}
   879  		v.reset(OpAdd16)
   880  		v.AddArg(i)
   881  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   882  		v0.AddArg(z)
   883  		v0.AddArg(x)
   884  		v.AddArg(v0)
   885  		return true
   886  	}
   887  	// match: (Add16 x (Add16 i:(Const16 <t>) z))
   888  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   889  	// result: (Add16 i (Add16 <t> z x))
   890  	for {
   891  		_ = v.Args[1]
   892  		x := v.Args[0]
   893  		v_1 := v.Args[1]
   894  		if v_1.Op != OpAdd16 {
   895  			break
   896  		}
   897  		z := v_1.Args[1]
   898  		i := v_1.Args[0]
   899  		if i.Op != OpConst16 {
   900  			break
   901  		}
   902  		t := i.Type
   903  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   904  			break
   905  		}
   906  		v.reset(OpAdd16)
   907  		v.AddArg(i)
   908  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   909  		v0.AddArg(z)
   910  		v0.AddArg(x)
   911  		v.AddArg(v0)
   912  		return true
   913  	}
   914  	// match: (Add16 x (Add16 z i:(Const16 <t>)))
   915  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   916  	// result: (Add16 i (Add16 <t> z x))
   917  	for {
   918  		_ = v.Args[1]
   919  		x := v.Args[0]
   920  		v_1 := v.Args[1]
   921  		if v_1.Op != OpAdd16 {
   922  			break
   923  		}
   924  		_ = v_1.Args[1]
   925  		z := v_1.Args[0]
   926  		i := v_1.Args[1]
   927  		if i.Op != OpConst16 {
   928  			break
   929  		}
   930  		t := i.Type
   931  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   932  			break
   933  		}
   934  		v.reset(OpAdd16)
   935  		v.AddArg(i)
   936  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
   937  		v0.AddArg(z)
   938  		v0.AddArg(x)
   939  		v.AddArg(v0)
   940  		return true
   941  	}
   942  	// match: (Add16 (Sub16 i:(Const16 <t>) z) x)
   943  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   944  	// result: (Add16 i (Sub16 <t> x z))
   945  	for {
   946  		x := v.Args[1]
   947  		v_0 := v.Args[0]
   948  		if v_0.Op != OpSub16 {
   949  			break
   950  		}
   951  		z := v_0.Args[1]
   952  		i := v_0.Args[0]
   953  		if i.Op != OpConst16 {
   954  			break
   955  		}
   956  		t := i.Type
   957  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   958  			break
   959  		}
   960  		v.reset(OpAdd16)
   961  		v.AddArg(i)
   962  		v0 := b.NewValue0(v.Pos, OpSub16, t)
   963  		v0.AddArg(x)
   964  		v0.AddArg(z)
   965  		v.AddArg(v0)
   966  		return true
   967  	}
   968  	// match: (Add16 x (Sub16 i:(Const16 <t>) z))
   969  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
   970  	// result: (Add16 i (Sub16 <t> x z))
   971  	for {
   972  		_ = v.Args[1]
   973  		x := v.Args[0]
   974  		v_1 := v.Args[1]
   975  		if v_1.Op != OpSub16 {
   976  			break
   977  		}
   978  		z := v_1.Args[1]
   979  		i := v_1.Args[0]
   980  		if i.Op != OpConst16 {
   981  			break
   982  		}
   983  		t := i.Type
   984  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
   985  			break
   986  		}
   987  		v.reset(OpAdd16)
   988  		v.AddArg(i)
   989  		v0 := b.NewValue0(v.Pos, OpSub16, t)
   990  		v0.AddArg(x)
   991  		v0.AddArg(z)
   992  		v.AddArg(v0)
   993  		return true
   994  	}
   995  	return false
   996  }
   997  func rewriteValuegeneric_OpAdd16_20(v *Value) bool {
   998  	b := v.Block
   999  	// match: (Add16 x (Sub16 i:(Const16 <t>) z))
  1000  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1001  	// result: (Add16 i (Sub16 <t> x z))
  1002  	for {
  1003  		_ = v.Args[1]
  1004  		x := v.Args[0]
  1005  		v_1 := v.Args[1]
  1006  		if v_1.Op != OpSub16 {
  1007  			break
  1008  		}
  1009  		z := v_1.Args[1]
  1010  		i := v_1.Args[0]
  1011  		if i.Op != OpConst16 {
  1012  			break
  1013  		}
  1014  		t := i.Type
  1015  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1016  			break
  1017  		}
  1018  		v.reset(OpAdd16)
  1019  		v.AddArg(i)
  1020  		v0 := b.NewValue0(v.Pos, OpSub16, t)
  1021  		v0.AddArg(x)
  1022  		v0.AddArg(z)
  1023  		v.AddArg(v0)
  1024  		return true
  1025  	}
  1026  	// match: (Add16 (Sub16 i:(Const16 <t>) z) x)
  1027  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1028  	// result: (Add16 i (Sub16 <t> x z))
  1029  	for {
  1030  		x := v.Args[1]
  1031  		v_0 := v.Args[0]
  1032  		if v_0.Op != OpSub16 {
  1033  			break
  1034  		}
  1035  		z := v_0.Args[1]
  1036  		i := v_0.Args[0]
  1037  		if i.Op != OpConst16 {
  1038  			break
  1039  		}
  1040  		t := i.Type
  1041  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1042  			break
  1043  		}
  1044  		v.reset(OpAdd16)
  1045  		v.AddArg(i)
  1046  		v0 := b.NewValue0(v.Pos, OpSub16, t)
  1047  		v0.AddArg(x)
  1048  		v0.AddArg(z)
  1049  		v.AddArg(v0)
  1050  		return true
  1051  	}
  1052  	// match: (Add16 (Sub16 z i:(Const16 <t>)) x)
  1053  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1054  	// result: (Sub16 (Add16 <t> x z) i)
  1055  	for {
  1056  		x := v.Args[1]
  1057  		v_0 := v.Args[0]
  1058  		if v_0.Op != OpSub16 {
  1059  			break
  1060  		}
  1061  		_ = v_0.Args[1]
  1062  		z := v_0.Args[0]
  1063  		i := v_0.Args[1]
  1064  		if i.Op != OpConst16 {
  1065  			break
  1066  		}
  1067  		t := i.Type
  1068  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1069  			break
  1070  		}
  1071  		v.reset(OpSub16)
  1072  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  1073  		v0.AddArg(x)
  1074  		v0.AddArg(z)
  1075  		v.AddArg(v0)
  1076  		v.AddArg(i)
  1077  		return true
  1078  	}
  1079  	// match: (Add16 x (Sub16 z i:(Const16 <t>)))
  1080  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1081  	// result: (Sub16 (Add16 <t> x z) i)
  1082  	for {
  1083  		_ = v.Args[1]
  1084  		x := v.Args[0]
  1085  		v_1 := v.Args[1]
  1086  		if v_1.Op != OpSub16 {
  1087  			break
  1088  		}
  1089  		_ = v_1.Args[1]
  1090  		z := v_1.Args[0]
  1091  		i := v_1.Args[1]
  1092  		if i.Op != OpConst16 {
  1093  			break
  1094  		}
  1095  		t := i.Type
  1096  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1097  			break
  1098  		}
  1099  		v.reset(OpSub16)
  1100  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  1101  		v0.AddArg(x)
  1102  		v0.AddArg(z)
  1103  		v.AddArg(v0)
  1104  		v.AddArg(i)
  1105  		return true
  1106  	}
  1107  	// match: (Add16 x (Sub16 z i:(Const16 <t>)))
  1108  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1109  	// result: (Sub16 (Add16 <t> x z) i)
  1110  	for {
  1111  		_ = v.Args[1]
  1112  		x := v.Args[0]
  1113  		v_1 := v.Args[1]
  1114  		if v_1.Op != OpSub16 {
  1115  			break
  1116  		}
  1117  		_ = v_1.Args[1]
  1118  		z := v_1.Args[0]
  1119  		i := v_1.Args[1]
  1120  		if i.Op != OpConst16 {
  1121  			break
  1122  		}
  1123  		t := i.Type
  1124  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1125  			break
  1126  		}
  1127  		v.reset(OpSub16)
  1128  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  1129  		v0.AddArg(x)
  1130  		v0.AddArg(z)
  1131  		v.AddArg(v0)
  1132  		v.AddArg(i)
  1133  		return true
  1134  	}
  1135  	// match: (Add16 (Sub16 z i:(Const16 <t>)) x)
  1136  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  1137  	// result: (Sub16 (Add16 <t> x z) i)
  1138  	for {
  1139  		x := v.Args[1]
  1140  		v_0 := v.Args[0]
  1141  		if v_0.Op != OpSub16 {
  1142  			break
  1143  		}
  1144  		_ = v_0.Args[1]
  1145  		z := v_0.Args[0]
  1146  		i := v_0.Args[1]
  1147  		if i.Op != OpConst16 {
  1148  			break
  1149  		}
  1150  		t := i.Type
  1151  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  1152  			break
  1153  		}
  1154  		v.reset(OpSub16)
  1155  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  1156  		v0.AddArg(x)
  1157  		v0.AddArg(z)
  1158  		v.AddArg(v0)
  1159  		v.AddArg(i)
  1160  		return true
  1161  	}
  1162  	// match: (Add16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  1163  	// cond:
  1164  	// result: (Add16 (Const16 <t> [int64(int16(c+d))]) x)
  1165  	for {
  1166  		_ = v.Args[1]
  1167  		v_0 := v.Args[0]
  1168  		if v_0.Op != OpConst16 {
  1169  			break
  1170  		}
  1171  		t := v_0.Type
  1172  		c := v_0.AuxInt
  1173  		v_1 := v.Args[1]
  1174  		if v_1.Op != OpAdd16 {
  1175  			break
  1176  		}
  1177  		x := v_1.Args[1]
  1178  		v_1_0 := v_1.Args[0]
  1179  		if v_1_0.Op != OpConst16 {
  1180  			break
  1181  		}
  1182  		if v_1_0.Type != t {
  1183  			break
  1184  		}
  1185  		d := v_1_0.AuxInt
  1186  		v.reset(OpAdd16)
  1187  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1188  		v0.AuxInt = int64(int16(c + d))
  1189  		v.AddArg(v0)
  1190  		v.AddArg(x)
  1191  		return true
  1192  	}
  1193  	// match: (Add16 (Const16 <t> [c]) (Add16 x (Const16 <t> [d])))
  1194  	// cond:
  1195  	// result: (Add16 (Const16 <t> [int64(int16(c+d))]) x)
  1196  	for {
  1197  		_ = v.Args[1]
  1198  		v_0 := v.Args[0]
  1199  		if v_0.Op != OpConst16 {
  1200  			break
  1201  		}
  1202  		t := v_0.Type
  1203  		c := v_0.AuxInt
  1204  		v_1 := v.Args[1]
  1205  		if v_1.Op != OpAdd16 {
  1206  			break
  1207  		}
  1208  		_ = v_1.Args[1]
  1209  		x := v_1.Args[0]
  1210  		v_1_1 := v_1.Args[1]
  1211  		if v_1_1.Op != OpConst16 {
  1212  			break
  1213  		}
  1214  		if v_1_1.Type != t {
  1215  			break
  1216  		}
  1217  		d := v_1_1.AuxInt
  1218  		v.reset(OpAdd16)
  1219  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1220  		v0.AuxInt = int64(int16(c + d))
  1221  		v.AddArg(v0)
  1222  		v.AddArg(x)
  1223  		return true
  1224  	}
  1225  	// match: (Add16 (Add16 (Const16 <t> [d]) x) (Const16 <t> [c]))
  1226  	// cond:
  1227  	// result: (Add16 (Const16 <t> [int64(int16(c+d))]) x)
  1228  	for {
  1229  		_ = v.Args[1]
  1230  		v_0 := v.Args[0]
  1231  		if v_0.Op != OpAdd16 {
  1232  			break
  1233  		}
  1234  		x := v_0.Args[1]
  1235  		v_0_0 := v_0.Args[0]
  1236  		if v_0_0.Op != OpConst16 {
  1237  			break
  1238  		}
  1239  		t := v_0_0.Type
  1240  		d := v_0_0.AuxInt
  1241  		v_1 := v.Args[1]
  1242  		if v_1.Op != OpConst16 {
  1243  			break
  1244  		}
  1245  		if v_1.Type != t {
  1246  			break
  1247  		}
  1248  		c := v_1.AuxInt
  1249  		v.reset(OpAdd16)
  1250  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1251  		v0.AuxInt = int64(int16(c + d))
  1252  		v.AddArg(v0)
  1253  		v.AddArg(x)
  1254  		return true
  1255  	}
  1256  	// match: (Add16 (Add16 x (Const16 <t> [d])) (Const16 <t> [c]))
  1257  	// cond:
  1258  	// result: (Add16 (Const16 <t> [int64(int16(c+d))]) x)
  1259  	for {
  1260  		_ = v.Args[1]
  1261  		v_0 := v.Args[0]
  1262  		if v_0.Op != OpAdd16 {
  1263  			break
  1264  		}
  1265  		_ = v_0.Args[1]
  1266  		x := v_0.Args[0]
  1267  		v_0_1 := v_0.Args[1]
  1268  		if v_0_1.Op != OpConst16 {
  1269  			break
  1270  		}
  1271  		t := v_0_1.Type
  1272  		d := v_0_1.AuxInt
  1273  		v_1 := v.Args[1]
  1274  		if v_1.Op != OpConst16 {
  1275  			break
  1276  		}
  1277  		if v_1.Type != t {
  1278  			break
  1279  		}
  1280  		c := v_1.AuxInt
  1281  		v.reset(OpAdd16)
  1282  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1283  		v0.AuxInt = int64(int16(c + d))
  1284  		v.AddArg(v0)
  1285  		v.AddArg(x)
  1286  		return true
  1287  	}
  1288  	return false
  1289  }
  1290  func rewriteValuegeneric_OpAdd16_30(v *Value) bool {
  1291  	b := v.Block
  1292  	// match: (Add16 (Const16 <t> [c]) (Sub16 (Const16 <t> [d]) x))
  1293  	// cond:
  1294  	// result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x)
  1295  	for {
  1296  		_ = v.Args[1]
  1297  		v_0 := v.Args[0]
  1298  		if v_0.Op != OpConst16 {
  1299  			break
  1300  		}
  1301  		t := v_0.Type
  1302  		c := v_0.AuxInt
  1303  		v_1 := v.Args[1]
  1304  		if v_1.Op != OpSub16 {
  1305  			break
  1306  		}
  1307  		x := v_1.Args[1]
  1308  		v_1_0 := v_1.Args[0]
  1309  		if v_1_0.Op != OpConst16 {
  1310  			break
  1311  		}
  1312  		if v_1_0.Type != t {
  1313  			break
  1314  		}
  1315  		d := v_1_0.AuxInt
  1316  		v.reset(OpSub16)
  1317  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1318  		v0.AuxInt = int64(int16(c + d))
  1319  		v.AddArg(v0)
  1320  		v.AddArg(x)
  1321  		return true
  1322  	}
  1323  	// match: (Add16 (Sub16 (Const16 <t> [d]) x) (Const16 <t> [c]))
  1324  	// cond:
  1325  	// result: (Sub16 (Const16 <t> [int64(int16(c+d))]) x)
  1326  	for {
  1327  		_ = v.Args[1]
  1328  		v_0 := v.Args[0]
  1329  		if v_0.Op != OpSub16 {
  1330  			break
  1331  		}
  1332  		x := v_0.Args[1]
  1333  		v_0_0 := v_0.Args[0]
  1334  		if v_0_0.Op != OpConst16 {
  1335  			break
  1336  		}
  1337  		t := v_0_0.Type
  1338  		d := v_0_0.AuxInt
  1339  		v_1 := v.Args[1]
  1340  		if v_1.Op != OpConst16 {
  1341  			break
  1342  		}
  1343  		if v_1.Type != t {
  1344  			break
  1345  		}
  1346  		c := v_1.AuxInt
  1347  		v.reset(OpSub16)
  1348  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1349  		v0.AuxInt = int64(int16(c + d))
  1350  		v.AddArg(v0)
  1351  		v.AddArg(x)
  1352  		return true
  1353  	}
  1354  	// match: (Add16 (Const16 <t> [c]) (Sub16 x (Const16 <t> [d])))
  1355  	// cond:
  1356  	// result: (Add16 (Const16 <t> [int64(int16(c-d))]) x)
  1357  	for {
  1358  		_ = v.Args[1]
  1359  		v_0 := v.Args[0]
  1360  		if v_0.Op != OpConst16 {
  1361  			break
  1362  		}
  1363  		t := v_0.Type
  1364  		c := v_0.AuxInt
  1365  		v_1 := v.Args[1]
  1366  		if v_1.Op != OpSub16 {
  1367  			break
  1368  		}
  1369  		_ = v_1.Args[1]
  1370  		x := v_1.Args[0]
  1371  		v_1_1 := v_1.Args[1]
  1372  		if v_1_1.Op != OpConst16 {
  1373  			break
  1374  		}
  1375  		if v_1_1.Type != t {
  1376  			break
  1377  		}
  1378  		d := v_1_1.AuxInt
  1379  		v.reset(OpAdd16)
  1380  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1381  		v0.AuxInt = int64(int16(c - d))
  1382  		v.AddArg(v0)
  1383  		v.AddArg(x)
  1384  		return true
  1385  	}
  1386  	// match: (Add16 (Sub16 x (Const16 <t> [d])) (Const16 <t> [c]))
  1387  	// cond:
  1388  	// result: (Add16 (Const16 <t> [int64(int16(c-d))]) x)
  1389  	for {
  1390  		_ = v.Args[1]
  1391  		v_0 := v.Args[0]
  1392  		if v_0.Op != OpSub16 {
  1393  			break
  1394  		}
  1395  		_ = v_0.Args[1]
  1396  		x := v_0.Args[0]
  1397  		v_0_1 := v_0.Args[1]
  1398  		if v_0_1.Op != OpConst16 {
  1399  			break
  1400  		}
  1401  		t := v_0_1.Type
  1402  		d := v_0_1.AuxInt
  1403  		v_1 := v.Args[1]
  1404  		if v_1.Op != OpConst16 {
  1405  			break
  1406  		}
  1407  		if v_1.Type != t {
  1408  			break
  1409  		}
  1410  		c := v_1.AuxInt
  1411  		v.reset(OpAdd16)
  1412  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  1413  		v0.AuxInt = int64(int16(c - d))
  1414  		v.AddArg(v0)
  1415  		v.AddArg(x)
  1416  		return true
  1417  	}
  1418  	return false
  1419  }
  1420  func rewriteValuegeneric_OpAdd32_0(v *Value) bool {
  1421  	b := v.Block
  1422  	// match: (Add32 (Const32 [c]) (Const32 [d]))
  1423  	// cond:
  1424  	// result: (Const32 [int64(int32(c+d))])
  1425  	for {
  1426  		_ = v.Args[1]
  1427  		v_0 := v.Args[0]
  1428  		if v_0.Op != OpConst32 {
  1429  			break
  1430  		}
  1431  		c := v_0.AuxInt
  1432  		v_1 := v.Args[1]
  1433  		if v_1.Op != OpConst32 {
  1434  			break
  1435  		}
  1436  		d := v_1.AuxInt
  1437  		v.reset(OpConst32)
  1438  		v.AuxInt = int64(int32(c + d))
  1439  		return true
  1440  	}
  1441  	// match: (Add32 (Const32 [d]) (Const32 [c]))
  1442  	// cond:
  1443  	// result: (Const32 [int64(int32(c+d))])
  1444  	for {
  1445  		_ = v.Args[1]
  1446  		v_0 := v.Args[0]
  1447  		if v_0.Op != OpConst32 {
  1448  			break
  1449  		}
  1450  		d := v_0.AuxInt
  1451  		v_1 := v.Args[1]
  1452  		if v_1.Op != OpConst32 {
  1453  			break
  1454  		}
  1455  		c := v_1.AuxInt
  1456  		v.reset(OpConst32)
  1457  		v.AuxInt = int64(int32(c + d))
  1458  		return true
  1459  	}
  1460  	// match: (Add32 <t> (Mul32 x y) (Mul32 x z))
  1461  	// cond:
  1462  	// result: (Mul32 x (Add32 <t> y z))
  1463  	for {
  1464  		t := v.Type
  1465  		_ = v.Args[1]
  1466  		v_0 := v.Args[0]
  1467  		if v_0.Op != OpMul32 {
  1468  			break
  1469  		}
  1470  		y := v_0.Args[1]
  1471  		x := v_0.Args[0]
  1472  		v_1 := v.Args[1]
  1473  		if v_1.Op != OpMul32 {
  1474  			break
  1475  		}
  1476  		z := v_1.Args[1]
  1477  		if x != v_1.Args[0] {
  1478  			break
  1479  		}
  1480  		v.reset(OpMul32)
  1481  		v.AddArg(x)
  1482  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1483  		v0.AddArg(y)
  1484  		v0.AddArg(z)
  1485  		v.AddArg(v0)
  1486  		return true
  1487  	}
  1488  	// match: (Add32 <t> (Mul32 y x) (Mul32 x z))
  1489  	// cond:
  1490  	// result: (Mul32 x (Add32 <t> y z))
  1491  	for {
  1492  		t := v.Type
  1493  		_ = v.Args[1]
  1494  		v_0 := v.Args[0]
  1495  		if v_0.Op != OpMul32 {
  1496  			break
  1497  		}
  1498  		x := v_0.Args[1]
  1499  		y := v_0.Args[0]
  1500  		v_1 := v.Args[1]
  1501  		if v_1.Op != OpMul32 {
  1502  			break
  1503  		}
  1504  		z := v_1.Args[1]
  1505  		if x != v_1.Args[0] {
  1506  			break
  1507  		}
  1508  		v.reset(OpMul32)
  1509  		v.AddArg(x)
  1510  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1511  		v0.AddArg(y)
  1512  		v0.AddArg(z)
  1513  		v.AddArg(v0)
  1514  		return true
  1515  	}
  1516  	// match: (Add32 <t> (Mul32 x y) (Mul32 z x))
  1517  	// cond:
  1518  	// result: (Mul32 x (Add32 <t> y z))
  1519  	for {
  1520  		t := v.Type
  1521  		_ = v.Args[1]
  1522  		v_0 := v.Args[0]
  1523  		if v_0.Op != OpMul32 {
  1524  			break
  1525  		}
  1526  		y := v_0.Args[1]
  1527  		x := v_0.Args[0]
  1528  		v_1 := v.Args[1]
  1529  		if v_1.Op != OpMul32 {
  1530  			break
  1531  		}
  1532  		_ = v_1.Args[1]
  1533  		z := v_1.Args[0]
  1534  		if x != v_1.Args[1] {
  1535  			break
  1536  		}
  1537  		v.reset(OpMul32)
  1538  		v.AddArg(x)
  1539  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1540  		v0.AddArg(y)
  1541  		v0.AddArg(z)
  1542  		v.AddArg(v0)
  1543  		return true
  1544  	}
  1545  	// match: (Add32 <t> (Mul32 y x) (Mul32 z x))
  1546  	// cond:
  1547  	// result: (Mul32 x (Add32 <t> y z))
  1548  	for {
  1549  		t := v.Type
  1550  		_ = v.Args[1]
  1551  		v_0 := v.Args[0]
  1552  		if v_0.Op != OpMul32 {
  1553  			break
  1554  		}
  1555  		x := v_0.Args[1]
  1556  		y := v_0.Args[0]
  1557  		v_1 := v.Args[1]
  1558  		if v_1.Op != OpMul32 {
  1559  			break
  1560  		}
  1561  		_ = v_1.Args[1]
  1562  		z := v_1.Args[0]
  1563  		if x != v_1.Args[1] {
  1564  			break
  1565  		}
  1566  		v.reset(OpMul32)
  1567  		v.AddArg(x)
  1568  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1569  		v0.AddArg(y)
  1570  		v0.AddArg(z)
  1571  		v.AddArg(v0)
  1572  		return true
  1573  	}
  1574  	// match: (Add32 <t> (Mul32 x z) (Mul32 x y))
  1575  	// cond:
  1576  	// result: (Mul32 x (Add32 <t> y z))
  1577  	for {
  1578  		t := v.Type
  1579  		_ = v.Args[1]
  1580  		v_0 := v.Args[0]
  1581  		if v_0.Op != OpMul32 {
  1582  			break
  1583  		}
  1584  		z := v_0.Args[1]
  1585  		x := v_0.Args[0]
  1586  		v_1 := v.Args[1]
  1587  		if v_1.Op != OpMul32 {
  1588  			break
  1589  		}
  1590  		y := v_1.Args[1]
  1591  		if x != v_1.Args[0] {
  1592  			break
  1593  		}
  1594  		v.reset(OpMul32)
  1595  		v.AddArg(x)
  1596  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1597  		v0.AddArg(y)
  1598  		v0.AddArg(z)
  1599  		v.AddArg(v0)
  1600  		return true
  1601  	}
  1602  	// match: (Add32 <t> (Mul32 z x) (Mul32 x y))
  1603  	// cond:
  1604  	// result: (Mul32 x (Add32 <t> y z))
  1605  	for {
  1606  		t := v.Type
  1607  		_ = v.Args[1]
  1608  		v_0 := v.Args[0]
  1609  		if v_0.Op != OpMul32 {
  1610  			break
  1611  		}
  1612  		x := v_0.Args[1]
  1613  		z := v_0.Args[0]
  1614  		v_1 := v.Args[1]
  1615  		if v_1.Op != OpMul32 {
  1616  			break
  1617  		}
  1618  		y := v_1.Args[1]
  1619  		if x != v_1.Args[0] {
  1620  			break
  1621  		}
  1622  		v.reset(OpMul32)
  1623  		v.AddArg(x)
  1624  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1625  		v0.AddArg(y)
  1626  		v0.AddArg(z)
  1627  		v.AddArg(v0)
  1628  		return true
  1629  	}
  1630  	// match: (Add32 <t> (Mul32 x z) (Mul32 y x))
  1631  	// cond:
  1632  	// result: (Mul32 x (Add32 <t> y z))
  1633  	for {
  1634  		t := v.Type
  1635  		_ = v.Args[1]
  1636  		v_0 := v.Args[0]
  1637  		if v_0.Op != OpMul32 {
  1638  			break
  1639  		}
  1640  		z := v_0.Args[1]
  1641  		x := v_0.Args[0]
  1642  		v_1 := v.Args[1]
  1643  		if v_1.Op != OpMul32 {
  1644  			break
  1645  		}
  1646  		_ = v_1.Args[1]
  1647  		y := v_1.Args[0]
  1648  		if x != v_1.Args[1] {
  1649  			break
  1650  		}
  1651  		v.reset(OpMul32)
  1652  		v.AddArg(x)
  1653  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1654  		v0.AddArg(y)
  1655  		v0.AddArg(z)
  1656  		v.AddArg(v0)
  1657  		return true
  1658  	}
  1659  	// match: (Add32 <t> (Mul32 z x) (Mul32 y x))
  1660  	// cond:
  1661  	// result: (Mul32 x (Add32 <t> y z))
  1662  	for {
  1663  		t := v.Type
  1664  		_ = v.Args[1]
  1665  		v_0 := v.Args[0]
  1666  		if v_0.Op != OpMul32 {
  1667  			break
  1668  		}
  1669  		x := v_0.Args[1]
  1670  		z := v_0.Args[0]
  1671  		v_1 := v.Args[1]
  1672  		if v_1.Op != OpMul32 {
  1673  			break
  1674  		}
  1675  		_ = v_1.Args[1]
  1676  		y := v_1.Args[0]
  1677  		if x != v_1.Args[1] {
  1678  			break
  1679  		}
  1680  		v.reset(OpMul32)
  1681  		v.AddArg(x)
  1682  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1683  		v0.AddArg(y)
  1684  		v0.AddArg(z)
  1685  		v.AddArg(v0)
  1686  		return true
  1687  	}
  1688  	return false
  1689  }
  1690  func rewriteValuegeneric_OpAdd32_10(v *Value) bool {
  1691  	b := v.Block
  1692  	// match: (Add32 (Const32 [0]) x)
  1693  	// cond:
  1694  	// result: x
  1695  	for {
  1696  		x := v.Args[1]
  1697  		v_0 := v.Args[0]
  1698  		if v_0.Op != OpConst32 {
  1699  			break
  1700  		}
  1701  		if v_0.AuxInt != 0 {
  1702  			break
  1703  		}
  1704  		v.reset(OpCopy)
  1705  		v.Type = x.Type
  1706  		v.AddArg(x)
  1707  		return true
  1708  	}
  1709  	// match: (Add32 x (Const32 [0]))
  1710  	// cond:
  1711  	// result: x
  1712  	for {
  1713  		_ = v.Args[1]
  1714  		x := v.Args[0]
  1715  		v_1 := v.Args[1]
  1716  		if v_1.Op != OpConst32 {
  1717  			break
  1718  		}
  1719  		if v_1.AuxInt != 0 {
  1720  			break
  1721  		}
  1722  		v.reset(OpCopy)
  1723  		v.Type = x.Type
  1724  		v.AddArg(x)
  1725  		return true
  1726  	}
  1727  	// match: (Add32 (Const32 [1]) (Com32 x))
  1728  	// cond:
  1729  	// result: (Neg32 x)
  1730  	for {
  1731  		_ = v.Args[1]
  1732  		v_0 := v.Args[0]
  1733  		if v_0.Op != OpConst32 {
  1734  			break
  1735  		}
  1736  		if v_0.AuxInt != 1 {
  1737  			break
  1738  		}
  1739  		v_1 := v.Args[1]
  1740  		if v_1.Op != OpCom32 {
  1741  			break
  1742  		}
  1743  		x := v_1.Args[0]
  1744  		v.reset(OpNeg32)
  1745  		v.AddArg(x)
  1746  		return true
  1747  	}
  1748  	// match: (Add32 (Com32 x) (Const32 [1]))
  1749  	// cond:
  1750  	// result: (Neg32 x)
  1751  	for {
  1752  		_ = v.Args[1]
  1753  		v_0 := v.Args[0]
  1754  		if v_0.Op != OpCom32 {
  1755  			break
  1756  		}
  1757  		x := v_0.Args[0]
  1758  		v_1 := v.Args[1]
  1759  		if v_1.Op != OpConst32 {
  1760  			break
  1761  		}
  1762  		if v_1.AuxInt != 1 {
  1763  			break
  1764  		}
  1765  		v.reset(OpNeg32)
  1766  		v.AddArg(x)
  1767  		return true
  1768  	}
  1769  	// match: (Add32 (Add32 i:(Const32 <t>) z) x)
  1770  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1771  	// result: (Add32 i (Add32 <t> z x))
  1772  	for {
  1773  		x := v.Args[1]
  1774  		v_0 := v.Args[0]
  1775  		if v_0.Op != OpAdd32 {
  1776  			break
  1777  		}
  1778  		z := v_0.Args[1]
  1779  		i := v_0.Args[0]
  1780  		if i.Op != OpConst32 {
  1781  			break
  1782  		}
  1783  		t := i.Type
  1784  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1785  			break
  1786  		}
  1787  		v.reset(OpAdd32)
  1788  		v.AddArg(i)
  1789  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1790  		v0.AddArg(z)
  1791  		v0.AddArg(x)
  1792  		v.AddArg(v0)
  1793  		return true
  1794  	}
  1795  	// match: (Add32 (Add32 z i:(Const32 <t>)) x)
  1796  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1797  	// result: (Add32 i (Add32 <t> z x))
  1798  	for {
  1799  		x := v.Args[1]
  1800  		v_0 := v.Args[0]
  1801  		if v_0.Op != OpAdd32 {
  1802  			break
  1803  		}
  1804  		_ = v_0.Args[1]
  1805  		z := v_0.Args[0]
  1806  		i := v_0.Args[1]
  1807  		if i.Op != OpConst32 {
  1808  			break
  1809  		}
  1810  		t := i.Type
  1811  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1812  			break
  1813  		}
  1814  		v.reset(OpAdd32)
  1815  		v.AddArg(i)
  1816  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1817  		v0.AddArg(z)
  1818  		v0.AddArg(x)
  1819  		v.AddArg(v0)
  1820  		return true
  1821  	}
  1822  	// match: (Add32 x (Add32 i:(Const32 <t>) z))
  1823  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1824  	// result: (Add32 i (Add32 <t> z x))
  1825  	for {
  1826  		_ = v.Args[1]
  1827  		x := v.Args[0]
  1828  		v_1 := v.Args[1]
  1829  		if v_1.Op != OpAdd32 {
  1830  			break
  1831  		}
  1832  		z := v_1.Args[1]
  1833  		i := v_1.Args[0]
  1834  		if i.Op != OpConst32 {
  1835  			break
  1836  		}
  1837  		t := i.Type
  1838  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1839  			break
  1840  		}
  1841  		v.reset(OpAdd32)
  1842  		v.AddArg(i)
  1843  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1844  		v0.AddArg(z)
  1845  		v0.AddArg(x)
  1846  		v.AddArg(v0)
  1847  		return true
  1848  	}
  1849  	// match: (Add32 x (Add32 z i:(Const32 <t>)))
  1850  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1851  	// result: (Add32 i (Add32 <t> z x))
  1852  	for {
  1853  		_ = v.Args[1]
  1854  		x := v.Args[0]
  1855  		v_1 := v.Args[1]
  1856  		if v_1.Op != OpAdd32 {
  1857  			break
  1858  		}
  1859  		_ = v_1.Args[1]
  1860  		z := v_1.Args[0]
  1861  		i := v_1.Args[1]
  1862  		if i.Op != OpConst32 {
  1863  			break
  1864  		}
  1865  		t := i.Type
  1866  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1867  			break
  1868  		}
  1869  		v.reset(OpAdd32)
  1870  		v.AddArg(i)
  1871  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  1872  		v0.AddArg(z)
  1873  		v0.AddArg(x)
  1874  		v.AddArg(v0)
  1875  		return true
  1876  	}
  1877  	// match: (Add32 (Sub32 i:(Const32 <t>) z) x)
  1878  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1879  	// result: (Add32 i (Sub32 <t> x z))
  1880  	for {
  1881  		x := v.Args[1]
  1882  		v_0 := v.Args[0]
  1883  		if v_0.Op != OpSub32 {
  1884  			break
  1885  		}
  1886  		z := v_0.Args[1]
  1887  		i := v_0.Args[0]
  1888  		if i.Op != OpConst32 {
  1889  			break
  1890  		}
  1891  		t := i.Type
  1892  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1893  			break
  1894  		}
  1895  		v.reset(OpAdd32)
  1896  		v.AddArg(i)
  1897  		v0 := b.NewValue0(v.Pos, OpSub32, t)
  1898  		v0.AddArg(x)
  1899  		v0.AddArg(z)
  1900  		v.AddArg(v0)
  1901  		return true
  1902  	}
  1903  	// match: (Add32 x (Sub32 i:(Const32 <t>) z))
  1904  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1905  	// result: (Add32 i (Sub32 <t> x z))
  1906  	for {
  1907  		_ = v.Args[1]
  1908  		x := v.Args[0]
  1909  		v_1 := v.Args[1]
  1910  		if v_1.Op != OpSub32 {
  1911  			break
  1912  		}
  1913  		z := v_1.Args[1]
  1914  		i := v_1.Args[0]
  1915  		if i.Op != OpConst32 {
  1916  			break
  1917  		}
  1918  		t := i.Type
  1919  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1920  			break
  1921  		}
  1922  		v.reset(OpAdd32)
  1923  		v.AddArg(i)
  1924  		v0 := b.NewValue0(v.Pos, OpSub32, t)
  1925  		v0.AddArg(x)
  1926  		v0.AddArg(z)
  1927  		v.AddArg(v0)
  1928  		return true
  1929  	}
  1930  	return false
  1931  }
  1932  func rewriteValuegeneric_OpAdd32_20(v *Value) bool {
  1933  	b := v.Block
  1934  	// match: (Add32 x (Sub32 i:(Const32 <t>) z))
  1935  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1936  	// result: (Add32 i (Sub32 <t> x z))
  1937  	for {
  1938  		_ = v.Args[1]
  1939  		x := v.Args[0]
  1940  		v_1 := v.Args[1]
  1941  		if v_1.Op != OpSub32 {
  1942  			break
  1943  		}
  1944  		z := v_1.Args[1]
  1945  		i := v_1.Args[0]
  1946  		if i.Op != OpConst32 {
  1947  			break
  1948  		}
  1949  		t := i.Type
  1950  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1951  			break
  1952  		}
  1953  		v.reset(OpAdd32)
  1954  		v.AddArg(i)
  1955  		v0 := b.NewValue0(v.Pos, OpSub32, t)
  1956  		v0.AddArg(x)
  1957  		v0.AddArg(z)
  1958  		v.AddArg(v0)
  1959  		return true
  1960  	}
  1961  	// match: (Add32 (Sub32 i:(Const32 <t>) z) x)
  1962  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1963  	// result: (Add32 i (Sub32 <t> x z))
  1964  	for {
  1965  		x := v.Args[1]
  1966  		v_0 := v.Args[0]
  1967  		if v_0.Op != OpSub32 {
  1968  			break
  1969  		}
  1970  		z := v_0.Args[1]
  1971  		i := v_0.Args[0]
  1972  		if i.Op != OpConst32 {
  1973  			break
  1974  		}
  1975  		t := i.Type
  1976  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  1977  			break
  1978  		}
  1979  		v.reset(OpAdd32)
  1980  		v.AddArg(i)
  1981  		v0 := b.NewValue0(v.Pos, OpSub32, t)
  1982  		v0.AddArg(x)
  1983  		v0.AddArg(z)
  1984  		v.AddArg(v0)
  1985  		return true
  1986  	}
  1987  	// match: (Add32 (Sub32 z i:(Const32 <t>)) x)
  1988  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  1989  	// result: (Sub32 (Add32 <t> x z) i)
  1990  	for {
  1991  		x := v.Args[1]
  1992  		v_0 := v.Args[0]
  1993  		if v_0.Op != OpSub32 {
  1994  			break
  1995  		}
  1996  		_ = v_0.Args[1]
  1997  		z := v_0.Args[0]
  1998  		i := v_0.Args[1]
  1999  		if i.Op != OpConst32 {
  2000  			break
  2001  		}
  2002  		t := i.Type
  2003  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  2004  			break
  2005  		}
  2006  		v.reset(OpSub32)
  2007  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  2008  		v0.AddArg(x)
  2009  		v0.AddArg(z)
  2010  		v.AddArg(v0)
  2011  		v.AddArg(i)
  2012  		return true
  2013  	}
  2014  	// match: (Add32 x (Sub32 z i:(Const32 <t>)))
  2015  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  2016  	// result: (Sub32 (Add32 <t> x z) i)
  2017  	for {
  2018  		_ = v.Args[1]
  2019  		x := v.Args[0]
  2020  		v_1 := v.Args[1]
  2021  		if v_1.Op != OpSub32 {
  2022  			break
  2023  		}
  2024  		_ = v_1.Args[1]
  2025  		z := v_1.Args[0]
  2026  		i := v_1.Args[1]
  2027  		if i.Op != OpConst32 {
  2028  			break
  2029  		}
  2030  		t := i.Type
  2031  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  2032  			break
  2033  		}
  2034  		v.reset(OpSub32)
  2035  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  2036  		v0.AddArg(x)
  2037  		v0.AddArg(z)
  2038  		v.AddArg(v0)
  2039  		v.AddArg(i)
  2040  		return true
  2041  	}
  2042  	// match: (Add32 x (Sub32 z i:(Const32 <t>)))
  2043  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  2044  	// result: (Sub32 (Add32 <t> x z) i)
  2045  	for {
  2046  		_ = v.Args[1]
  2047  		x := v.Args[0]
  2048  		v_1 := v.Args[1]
  2049  		if v_1.Op != OpSub32 {
  2050  			break
  2051  		}
  2052  		_ = v_1.Args[1]
  2053  		z := v_1.Args[0]
  2054  		i := v_1.Args[1]
  2055  		if i.Op != OpConst32 {
  2056  			break
  2057  		}
  2058  		t := i.Type
  2059  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  2060  			break
  2061  		}
  2062  		v.reset(OpSub32)
  2063  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  2064  		v0.AddArg(x)
  2065  		v0.AddArg(z)
  2066  		v.AddArg(v0)
  2067  		v.AddArg(i)
  2068  		return true
  2069  	}
  2070  	// match: (Add32 (Sub32 z i:(Const32 <t>)) x)
  2071  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  2072  	// result: (Sub32 (Add32 <t> x z) i)
  2073  	for {
  2074  		x := v.Args[1]
  2075  		v_0 := v.Args[0]
  2076  		if v_0.Op != OpSub32 {
  2077  			break
  2078  		}
  2079  		_ = v_0.Args[1]
  2080  		z := v_0.Args[0]
  2081  		i := v_0.Args[1]
  2082  		if i.Op != OpConst32 {
  2083  			break
  2084  		}
  2085  		t := i.Type
  2086  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  2087  			break
  2088  		}
  2089  		v.reset(OpSub32)
  2090  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  2091  		v0.AddArg(x)
  2092  		v0.AddArg(z)
  2093  		v.AddArg(v0)
  2094  		v.AddArg(i)
  2095  		return true
  2096  	}
  2097  	// match: (Add32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
  2098  	// cond:
  2099  	// result: (Add32 (Const32 <t> [int64(int32(c+d))]) x)
  2100  	for {
  2101  		_ = v.Args[1]
  2102  		v_0 := v.Args[0]
  2103  		if v_0.Op != OpConst32 {
  2104  			break
  2105  		}
  2106  		t := v_0.Type
  2107  		c := v_0.AuxInt
  2108  		v_1 := v.Args[1]
  2109  		if v_1.Op != OpAdd32 {
  2110  			break
  2111  		}
  2112  		x := v_1.Args[1]
  2113  		v_1_0 := v_1.Args[0]
  2114  		if v_1_0.Op != OpConst32 {
  2115  			break
  2116  		}
  2117  		if v_1_0.Type != t {
  2118  			break
  2119  		}
  2120  		d := v_1_0.AuxInt
  2121  		v.reset(OpAdd32)
  2122  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2123  		v0.AuxInt = int64(int32(c + d))
  2124  		v.AddArg(v0)
  2125  		v.AddArg(x)
  2126  		return true
  2127  	}
  2128  	// match: (Add32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
  2129  	// cond:
  2130  	// result: (Add32 (Const32 <t> [int64(int32(c+d))]) x)
  2131  	for {
  2132  		_ = v.Args[1]
  2133  		v_0 := v.Args[0]
  2134  		if v_0.Op != OpConst32 {
  2135  			break
  2136  		}
  2137  		t := v_0.Type
  2138  		c := v_0.AuxInt
  2139  		v_1 := v.Args[1]
  2140  		if v_1.Op != OpAdd32 {
  2141  			break
  2142  		}
  2143  		_ = v_1.Args[1]
  2144  		x := v_1.Args[0]
  2145  		v_1_1 := v_1.Args[1]
  2146  		if v_1_1.Op != OpConst32 {
  2147  			break
  2148  		}
  2149  		if v_1_1.Type != t {
  2150  			break
  2151  		}
  2152  		d := v_1_1.AuxInt
  2153  		v.reset(OpAdd32)
  2154  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2155  		v0.AuxInt = int64(int32(c + d))
  2156  		v.AddArg(v0)
  2157  		v.AddArg(x)
  2158  		return true
  2159  	}
  2160  	// match: (Add32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
  2161  	// cond:
  2162  	// result: (Add32 (Const32 <t> [int64(int32(c+d))]) x)
  2163  	for {
  2164  		_ = v.Args[1]
  2165  		v_0 := v.Args[0]
  2166  		if v_0.Op != OpAdd32 {
  2167  			break
  2168  		}
  2169  		x := v_0.Args[1]
  2170  		v_0_0 := v_0.Args[0]
  2171  		if v_0_0.Op != OpConst32 {
  2172  			break
  2173  		}
  2174  		t := v_0_0.Type
  2175  		d := v_0_0.AuxInt
  2176  		v_1 := v.Args[1]
  2177  		if v_1.Op != OpConst32 {
  2178  			break
  2179  		}
  2180  		if v_1.Type != t {
  2181  			break
  2182  		}
  2183  		c := v_1.AuxInt
  2184  		v.reset(OpAdd32)
  2185  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2186  		v0.AuxInt = int64(int32(c + d))
  2187  		v.AddArg(v0)
  2188  		v.AddArg(x)
  2189  		return true
  2190  	}
  2191  	// match: (Add32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
  2192  	// cond:
  2193  	// result: (Add32 (Const32 <t> [int64(int32(c+d))]) x)
  2194  	for {
  2195  		_ = v.Args[1]
  2196  		v_0 := v.Args[0]
  2197  		if v_0.Op != OpAdd32 {
  2198  			break
  2199  		}
  2200  		_ = v_0.Args[1]
  2201  		x := v_0.Args[0]
  2202  		v_0_1 := v_0.Args[1]
  2203  		if v_0_1.Op != OpConst32 {
  2204  			break
  2205  		}
  2206  		t := v_0_1.Type
  2207  		d := v_0_1.AuxInt
  2208  		v_1 := v.Args[1]
  2209  		if v_1.Op != OpConst32 {
  2210  			break
  2211  		}
  2212  		if v_1.Type != t {
  2213  			break
  2214  		}
  2215  		c := v_1.AuxInt
  2216  		v.reset(OpAdd32)
  2217  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2218  		v0.AuxInt = int64(int32(c + d))
  2219  		v.AddArg(v0)
  2220  		v.AddArg(x)
  2221  		return true
  2222  	}
  2223  	return false
  2224  }
  2225  func rewriteValuegeneric_OpAdd32_30(v *Value) bool {
  2226  	b := v.Block
  2227  	// match: (Add32 (Const32 <t> [c]) (Sub32 (Const32 <t> [d]) x))
  2228  	// cond:
  2229  	// result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x)
  2230  	for {
  2231  		_ = v.Args[1]
  2232  		v_0 := v.Args[0]
  2233  		if v_0.Op != OpConst32 {
  2234  			break
  2235  		}
  2236  		t := v_0.Type
  2237  		c := v_0.AuxInt
  2238  		v_1 := v.Args[1]
  2239  		if v_1.Op != OpSub32 {
  2240  			break
  2241  		}
  2242  		x := v_1.Args[1]
  2243  		v_1_0 := v_1.Args[0]
  2244  		if v_1_0.Op != OpConst32 {
  2245  			break
  2246  		}
  2247  		if v_1_0.Type != t {
  2248  			break
  2249  		}
  2250  		d := v_1_0.AuxInt
  2251  		v.reset(OpSub32)
  2252  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2253  		v0.AuxInt = int64(int32(c + d))
  2254  		v.AddArg(v0)
  2255  		v.AddArg(x)
  2256  		return true
  2257  	}
  2258  	// match: (Add32 (Sub32 (Const32 <t> [d]) x) (Const32 <t> [c]))
  2259  	// cond:
  2260  	// result: (Sub32 (Const32 <t> [int64(int32(c+d))]) x)
  2261  	for {
  2262  		_ = v.Args[1]
  2263  		v_0 := v.Args[0]
  2264  		if v_0.Op != OpSub32 {
  2265  			break
  2266  		}
  2267  		x := v_0.Args[1]
  2268  		v_0_0 := v_0.Args[0]
  2269  		if v_0_0.Op != OpConst32 {
  2270  			break
  2271  		}
  2272  		t := v_0_0.Type
  2273  		d := v_0_0.AuxInt
  2274  		v_1 := v.Args[1]
  2275  		if v_1.Op != OpConst32 {
  2276  			break
  2277  		}
  2278  		if v_1.Type != t {
  2279  			break
  2280  		}
  2281  		c := v_1.AuxInt
  2282  		v.reset(OpSub32)
  2283  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2284  		v0.AuxInt = int64(int32(c + d))
  2285  		v.AddArg(v0)
  2286  		v.AddArg(x)
  2287  		return true
  2288  	}
  2289  	// match: (Add32 (Const32 <t> [c]) (Sub32 x (Const32 <t> [d])))
  2290  	// cond:
  2291  	// result: (Add32 (Const32 <t> [int64(int32(c-d))]) x)
  2292  	for {
  2293  		_ = v.Args[1]
  2294  		v_0 := v.Args[0]
  2295  		if v_0.Op != OpConst32 {
  2296  			break
  2297  		}
  2298  		t := v_0.Type
  2299  		c := v_0.AuxInt
  2300  		v_1 := v.Args[1]
  2301  		if v_1.Op != OpSub32 {
  2302  			break
  2303  		}
  2304  		_ = v_1.Args[1]
  2305  		x := v_1.Args[0]
  2306  		v_1_1 := v_1.Args[1]
  2307  		if v_1_1.Op != OpConst32 {
  2308  			break
  2309  		}
  2310  		if v_1_1.Type != t {
  2311  			break
  2312  		}
  2313  		d := v_1_1.AuxInt
  2314  		v.reset(OpAdd32)
  2315  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2316  		v0.AuxInt = int64(int32(c - d))
  2317  		v.AddArg(v0)
  2318  		v.AddArg(x)
  2319  		return true
  2320  	}
  2321  	// match: (Add32 (Sub32 x (Const32 <t> [d])) (Const32 <t> [c]))
  2322  	// cond:
  2323  	// result: (Add32 (Const32 <t> [int64(int32(c-d))]) x)
  2324  	for {
  2325  		_ = v.Args[1]
  2326  		v_0 := v.Args[0]
  2327  		if v_0.Op != OpSub32 {
  2328  			break
  2329  		}
  2330  		_ = v_0.Args[1]
  2331  		x := v_0.Args[0]
  2332  		v_0_1 := v_0.Args[1]
  2333  		if v_0_1.Op != OpConst32 {
  2334  			break
  2335  		}
  2336  		t := v_0_1.Type
  2337  		d := v_0_1.AuxInt
  2338  		v_1 := v.Args[1]
  2339  		if v_1.Op != OpConst32 {
  2340  			break
  2341  		}
  2342  		if v_1.Type != t {
  2343  			break
  2344  		}
  2345  		c := v_1.AuxInt
  2346  		v.reset(OpAdd32)
  2347  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  2348  		v0.AuxInt = int64(int32(c - d))
  2349  		v.AddArg(v0)
  2350  		v.AddArg(x)
  2351  		return true
  2352  	}
  2353  	return false
  2354  }
  2355  func rewriteValuegeneric_OpAdd32F_0(v *Value) bool {
  2356  	// match: (Add32F (Const32F [c]) (Const32F [d]))
  2357  	// cond:
  2358  	// result: (Const32F [auxFrom32F(auxTo32F(c) + auxTo32F(d))])
  2359  	for {
  2360  		_ = v.Args[1]
  2361  		v_0 := v.Args[0]
  2362  		if v_0.Op != OpConst32F {
  2363  			break
  2364  		}
  2365  		c := v_0.AuxInt
  2366  		v_1 := v.Args[1]
  2367  		if v_1.Op != OpConst32F {
  2368  			break
  2369  		}
  2370  		d := v_1.AuxInt
  2371  		v.reset(OpConst32F)
  2372  		v.AuxInt = auxFrom32F(auxTo32F(c) + auxTo32F(d))
  2373  		return true
  2374  	}
  2375  	// match: (Add32F (Const32F [d]) (Const32F [c]))
  2376  	// cond:
  2377  	// result: (Const32F [auxFrom32F(auxTo32F(c) + auxTo32F(d))])
  2378  	for {
  2379  		_ = v.Args[1]
  2380  		v_0 := v.Args[0]
  2381  		if v_0.Op != OpConst32F {
  2382  			break
  2383  		}
  2384  		d := v_0.AuxInt
  2385  		v_1 := v.Args[1]
  2386  		if v_1.Op != OpConst32F {
  2387  			break
  2388  		}
  2389  		c := v_1.AuxInt
  2390  		v.reset(OpConst32F)
  2391  		v.AuxInt = auxFrom32F(auxTo32F(c) + auxTo32F(d))
  2392  		return true
  2393  	}
  2394  	return false
  2395  }
  2396  func rewriteValuegeneric_OpAdd64_0(v *Value) bool {
  2397  	b := v.Block
  2398  	// match: (Add64 (Const64 [c]) (Const64 [d]))
  2399  	// cond:
  2400  	// result: (Const64 [c+d])
  2401  	for {
  2402  		_ = v.Args[1]
  2403  		v_0 := v.Args[0]
  2404  		if v_0.Op != OpConst64 {
  2405  			break
  2406  		}
  2407  		c := v_0.AuxInt
  2408  		v_1 := v.Args[1]
  2409  		if v_1.Op != OpConst64 {
  2410  			break
  2411  		}
  2412  		d := v_1.AuxInt
  2413  		v.reset(OpConst64)
  2414  		v.AuxInt = c + d
  2415  		return true
  2416  	}
  2417  	// match: (Add64 (Const64 [d]) (Const64 [c]))
  2418  	// cond:
  2419  	// result: (Const64 [c+d])
  2420  	for {
  2421  		_ = v.Args[1]
  2422  		v_0 := v.Args[0]
  2423  		if v_0.Op != OpConst64 {
  2424  			break
  2425  		}
  2426  		d := v_0.AuxInt
  2427  		v_1 := v.Args[1]
  2428  		if v_1.Op != OpConst64 {
  2429  			break
  2430  		}
  2431  		c := v_1.AuxInt
  2432  		v.reset(OpConst64)
  2433  		v.AuxInt = c + d
  2434  		return true
  2435  	}
  2436  	// match: (Add64 <t> (Mul64 x y) (Mul64 x z))
  2437  	// cond:
  2438  	// result: (Mul64 x (Add64 <t> y z))
  2439  	for {
  2440  		t := v.Type
  2441  		_ = v.Args[1]
  2442  		v_0 := v.Args[0]
  2443  		if v_0.Op != OpMul64 {
  2444  			break
  2445  		}
  2446  		y := v_0.Args[1]
  2447  		x := v_0.Args[0]
  2448  		v_1 := v.Args[1]
  2449  		if v_1.Op != OpMul64 {
  2450  			break
  2451  		}
  2452  		z := v_1.Args[1]
  2453  		if x != v_1.Args[0] {
  2454  			break
  2455  		}
  2456  		v.reset(OpMul64)
  2457  		v.AddArg(x)
  2458  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2459  		v0.AddArg(y)
  2460  		v0.AddArg(z)
  2461  		v.AddArg(v0)
  2462  		return true
  2463  	}
  2464  	// match: (Add64 <t> (Mul64 y x) (Mul64 x z))
  2465  	// cond:
  2466  	// result: (Mul64 x (Add64 <t> y z))
  2467  	for {
  2468  		t := v.Type
  2469  		_ = v.Args[1]
  2470  		v_0 := v.Args[0]
  2471  		if v_0.Op != OpMul64 {
  2472  			break
  2473  		}
  2474  		x := v_0.Args[1]
  2475  		y := v_0.Args[0]
  2476  		v_1 := v.Args[1]
  2477  		if v_1.Op != OpMul64 {
  2478  			break
  2479  		}
  2480  		z := v_1.Args[1]
  2481  		if x != v_1.Args[0] {
  2482  			break
  2483  		}
  2484  		v.reset(OpMul64)
  2485  		v.AddArg(x)
  2486  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2487  		v0.AddArg(y)
  2488  		v0.AddArg(z)
  2489  		v.AddArg(v0)
  2490  		return true
  2491  	}
  2492  	// match: (Add64 <t> (Mul64 x y) (Mul64 z x))
  2493  	// cond:
  2494  	// result: (Mul64 x (Add64 <t> y z))
  2495  	for {
  2496  		t := v.Type
  2497  		_ = v.Args[1]
  2498  		v_0 := v.Args[0]
  2499  		if v_0.Op != OpMul64 {
  2500  			break
  2501  		}
  2502  		y := v_0.Args[1]
  2503  		x := v_0.Args[0]
  2504  		v_1 := v.Args[1]
  2505  		if v_1.Op != OpMul64 {
  2506  			break
  2507  		}
  2508  		_ = v_1.Args[1]
  2509  		z := v_1.Args[0]
  2510  		if x != v_1.Args[1] {
  2511  			break
  2512  		}
  2513  		v.reset(OpMul64)
  2514  		v.AddArg(x)
  2515  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2516  		v0.AddArg(y)
  2517  		v0.AddArg(z)
  2518  		v.AddArg(v0)
  2519  		return true
  2520  	}
  2521  	// match: (Add64 <t> (Mul64 y x) (Mul64 z x))
  2522  	// cond:
  2523  	// result: (Mul64 x (Add64 <t> y z))
  2524  	for {
  2525  		t := v.Type
  2526  		_ = v.Args[1]
  2527  		v_0 := v.Args[0]
  2528  		if v_0.Op != OpMul64 {
  2529  			break
  2530  		}
  2531  		x := v_0.Args[1]
  2532  		y := v_0.Args[0]
  2533  		v_1 := v.Args[1]
  2534  		if v_1.Op != OpMul64 {
  2535  			break
  2536  		}
  2537  		_ = v_1.Args[1]
  2538  		z := v_1.Args[0]
  2539  		if x != v_1.Args[1] {
  2540  			break
  2541  		}
  2542  		v.reset(OpMul64)
  2543  		v.AddArg(x)
  2544  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2545  		v0.AddArg(y)
  2546  		v0.AddArg(z)
  2547  		v.AddArg(v0)
  2548  		return true
  2549  	}
  2550  	// match: (Add64 <t> (Mul64 x z) (Mul64 x y))
  2551  	// cond:
  2552  	// result: (Mul64 x (Add64 <t> y z))
  2553  	for {
  2554  		t := v.Type
  2555  		_ = v.Args[1]
  2556  		v_0 := v.Args[0]
  2557  		if v_0.Op != OpMul64 {
  2558  			break
  2559  		}
  2560  		z := v_0.Args[1]
  2561  		x := v_0.Args[0]
  2562  		v_1 := v.Args[1]
  2563  		if v_1.Op != OpMul64 {
  2564  			break
  2565  		}
  2566  		y := v_1.Args[1]
  2567  		if x != v_1.Args[0] {
  2568  			break
  2569  		}
  2570  		v.reset(OpMul64)
  2571  		v.AddArg(x)
  2572  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2573  		v0.AddArg(y)
  2574  		v0.AddArg(z)
  2575  		v.AddArg(v0)
  2576  		return true
  2577  	}
  2578  	// match: (Add64 <t> (Mul64 z x) (Mul64 x y))
  2579  	// cond:
  2580  	// result: (Mul64 x (Add64 <t> y z))
  2581  	for {
  2582  		t := v.Type
  2583  		_ = v.Args[1]
  2584  		v_0 := v.Args[0]
  2585  		if v_0.Op != OpMul64 {
  2586  			break
  2587  		}
  2588  		x := v_0.Args[1]
  2589  		z := v_0.Args[0]
  2590  		v_1 := v.Args[1]
  2591  		if v_1.Op != OpMul64 {
  2592  			break
  2593  		}
  2594  		y := v_1.Args[1]
  2595  		if x != v_1.Args[0] {
  2596  			break
  2597  		}
  2598  		v.reset(OpMul64)
  2599  		v.AddArg(x)
  2600  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2601  		v0.AddArg(y)
  2602  		v0.AddArg(z)
  2603  		v.AddArg(v0)
  2604  		return true
  2605  	}
  2606  	// match: (Add64 <t> (Mul64 x z) (Mul64 y x))
  2607  	// cond:
  2608  	// result: (Mul64 x (Add64 <t> y z))
  2609  	for {
  2610  		t := v.Type
  2611  		_ = v.Args[1]
  2612  		v_0 := v.Args[0]
  2613  		if v_0.Op != OpMul64 {
  2614  			break
  2615  		}
  2616  		z := v_0.Args[1]
  2617  		x := v_0.Args[0]
  2618  		v_1 := v.Args[1]
  2619  		if v_1.Op != OpMul64 {
  2620  			break
  2621  		}
  2622  		_ = v_1.Args[1]
  2623  		y := v_1.Args[0]
  2624  		if x != v_1.Args[1] {
  2625  			break
  2626  		}
  2627  		v.reset(OpMul64)
  2628  		v.AddArg(x)
  2629  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2630  		v0.AddArg(y)
  2631  		v0.AddArg(z)
  2632  		v.AddArg(v0)
  2633  		return true
  2634  	}
  2635  	// match: (Add64 <t> (Mul64 z x) (Mul64 y x))
  2636  	// cond:
  2637  	// result: (Mul64 x (Add64 <t> y z))
  2638  	for {
  2639  		t := v.Type
  2640  		_ = v.Args[1]
  2641  		v_0 := v.Args[0]
  2642  		if v_0.Op != OpMul64 {
  2643  			break
  2644  		}
  2645  		x := v_0.Args[1]
  2646  		z := v_0.Args[0]
  2647  		v_1 := v.Args[1]
  2648  		if v_1.Op != OpMul64 {
  2649  			break
  2650  		}
  2651  		_ = v_1.Args[1]
  2652  		y := v_1.Args[0]
  2653  		if x != v_1.Args[1] {
  2654  			break
  2655  		}
  2656  		v.reset(OpMul64)
  2657  		v.AddArg(x)
  2658  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2659  		v0.AddArg(y)
  2660  		v0.AddArg(z)
  2661  		v.AddArg(v0)
  2662  		return true
  2663  	}
  2664  	return false
  2665  }
  2666  func rewriteValuegeneric_OpAdd64_10(v *Value) bool {
  2667  	b := v.Block
  2668  	// match: (Add64 (Const64 [0]) x)
  2669  	// cond:
  2670  	// result: x
  2671  	for {
  2672  		x := v.Args[1]
  2673  		v_0 := v.Args[0]
  2674  		if v_0.Op != OpConst64 {
  2675  			break
  2676  		}
  2677  		if v_0.AuxInt != 0 {
  2678  			break
  2679  		}
  2680  		v.reset(OpCopy)
  2681  		v.Type = x.Type
  2682  		v.AddArg(x)
  2683  		return true
  2684  	}
  2685  	// match: (Add64 x (Const64 [0]))
  2686  	// cond:
  2687  	// result: x
  2688  	for {
  2689  		_ = v.Args[1]
  2690  		x := v.Args[0]
  2691  		v_1 := v.Args[1]
  2692  		if v_1.Op != OpConst64 {
  2693  			break
  2694  		}
  2695  		if v_1.AuxInt != 0 {
  2696  			break
  2697  		}
  2698  		v.reset(OpCopy)
  2699  		v.Type = x.Type
  2700  		v.AddArg(x)
  2701  		return true
  2702  	}
  2703  	// match: (Add64 (Const64 [1]) (Com64 x))
  2704  	// cond:
  2705  	// result: (Neg64 x)
  2706  	for {
  2707  		_ = v.Args[1]
  2708  		v_0 := v.Args[0]
  2709  		if v_0.Op != OpConst64 {
  2710  			break
  2711  		}
  2712  		if v_0.AuxInt != 1 {
  2713  			break
  2714  		}
  2715  		v_1 := v.Args[1]
  2716  		if v_1.Op != OpCom64 {
  2717  			break
  2718  		}
  2719  		x := v_1.Args[0]
  2720  		v.reset(OpNeg64)
  2721  		v.AddArg(x)
  2722  		return true
  2723  	}
  2724  	// match: (Add64 (Com64 x) (Const64 [1]))
  2725  	// cond:
  2726  	// result: (Neg64 x)
  2727  	for {
  2728  		_ = v.Args[1]
  2729  		v_0 := v.Args[0]
  2730  		if v_0.Op != OpCom64 {
  2731  			break
  2732  		}
  2733  		x := v_0.Args[0]
  2734  		v_1 := v.Args[1]
  2735  		if v_1.Op != OpConst64 {
  2736  			break
  2737  		}
  2738  		if v_1.AuxInt != 1 {
  2739  			break
  2740  		}
  2741  		v.reset(OpNeg64)
  2742  		v.AddArg(x)
  2743  		return true
  2744  	}
  2745  	// match: (Add64 (Add64 i:(Const64 <t>) z) x)
  2746  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2747  	// result: (Add64 i (Add64 <t> z x))
  2748  	for {
  2749  		x := v.Args[1]
  2750  		v_0 := v.Args[0]
  2751  		if v_0.Op != OpAdd64 {
  2752  			break
  2753  		}
  2754  		z := v_0.Args[1]
  2755  		i := v_0.Args[0]
  2756  		if i.Op != OpConst64 {
  2757  			break
  2758  		}
  2759  		t := i.Type
  2760  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2761  			break
  2762  		}
  2763  		v.reset(OpAdd64)
  2764  		v.AddArg(i)
  2765  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2766  		v0.AddArg(z)
  2767  		v0.AddArg(x)
  2768  		v.AddArg(v0)
  2769  		return true
  2770  	}
  2771  	// match: (Add64 (Add64 z i:(Const64 <t>)) x)
  2772  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2773  	// result: (Add64 i (Add64 <t> z x))
  2774  	for {
  2775  		x := v.Args[1]
  2776  		v_0 := v.Args[0]
  2777  		if v_0.Op != OpAdd64 {
  2778  			break
  2779  		}
  2780  		_ = v_0.Args[1]
  2781  		z := v_0.Args[0]
  2782  		i := v_0.Args[1]
  2783  		if i.Op != OpConst64 {
  2784  			break
  2785  		}
  2786  		t := i.Type
  2787  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2788  			break
  2789  		}
  2790  		v.reset(OpAdd64)
  2791  		v.AddArg(i)
  2792  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2793  		v0.AddArg(z)
  2794  		v0.AddArg(x)
  2795  		v.AddArg(v0)
  2796  		return true
  2797  	}
  2798  	// match: (Add64 x (Add64 i:(Const64 <t>) z))
  2799  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2800  	// result: (Add64 i (Add64 <t> z x))
  2801  	for {
  2802  		_ = v.Args[1]
  2803  		x := v.Args[0]
  2804  		v_1 := v.Args[1]
  2805  		if v_1.Op != OpAdd64 {
  2806  			break
  2807  		}
  2808  		z := v_1.Args[1]
  2809  		i := v_1.Args[0]
  2810  		if i.Op != OpConst64 {
  2811  			break
  2812  		}
  2813  		t := i.Type
  2814  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2815  			break
  2816  		}
  2817  		v.reset(OpAdd64)
  2818  		v.AddArg(i)
  2819  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2820  		v0.AddArg(z)
  2821  		v0.AddArg(x)
  2822  		v.AddArg(v0)
  2823  		return true
  2824  	}
  2825  	// match: (Add64 x (Add64 z i:(Const64 <t>)))
  2826  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2827  	// result: (Add64 i (Add64 <t> z x))
  2828  	for {
  2829  		_ = v.Args[1]
  2830  		x := v.Args[0]
  2831  		v_1 := v.Args[1]
  2832  		if v_1.Op != OpAdd64 {
  2833  			break
  2834  		}
  2835  		_ = v_1.Args[1]
  2836  		z := v_1.Args[0]
  2837  		i := v_1.Args[1]
  2838  		if i.Op != OpConst64 {
  2839  			break
  2840  		}
  2841  		t := i.Type
  2842  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2843  			break
  2844  		}
  2845  		v.reset(OpAdd64)
  2846  		v.AddArg(i)
  2847  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2848  		v0.AddArg(z)
  2849  		v0.AddArg(x)
  2850  		v.AddArg(v0)
  2851  		return true
  2852  	}
  2853  	// match: (Add64 (Sub64 i:(Const64 <t>) z) x)
  2854  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2855  	// result: (Add64 i (Sub64 <t> x z))
  2856  	for {
  2857  		x := v.Args[1]
  2858  		v_0 := v.Args[0]
  2859  		if v_0.Op != OpSub64 {
  2860  			break
  2861  		}
  2862  		z := v_0.Args[1]
  2863  		i := v_0.Args[0]
  2864  		if i.Op != OpConst64 {
  2865  			break
  2866  		}
  2867  		t := i.Type
  2868  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2869  			break
  2870  		}
  2871  		v.reset(OpAdd64)
  2872  		v.AddArg(i)
  2873  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2874  		v0.AddArg(x)
  2875  		v0.AddArg(z)
  2876  		v.AddArg(v0)
  2877  		return true
  2878  	}
  2879  	// match: (Add64 x (Sub64 i:(Const64 <t>) z))
  2880  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2881  	// result: (Add64 i (Sub64 <t> x z))
  2882  	for {
  2883  		_ = v.Args[1]
  2884  		x := v.Args[0]
  2885  		v_1 := v.Args[1]
  2886  		if v_1.Op != OpSub64 {
  2887  			break
  2888  		}
  2889  		z := v_1.Args[1]
  2890  		i := v_1.Args[0]
  2891  		if i.Op != OpConst64 {
  2892  			break
  2893  		}
  2894  		t := i.Type
  2895  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2896  			break
  2897  		}
  2898  		v.reset(OpAdd64)
  2899  		v.AddArg(i)
  2900  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2901  		v0.AddArg(x)
  2902  		v0.AddArg(z)
  2903  		v.AddArg(v0)
  2904  		return true
  2905  	}
  2906  	return false
  2907  }
  2908  func rewriteValuegeneric_OpAdd64_20(v *Value) bool {
  2909  	b := v.Block
  2910  	// match: (Add64 x (Sub64 i:(Const64 <t>) z))
  2911  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2912  	// result: (Add64 i (Sub64 <t> x z))
  2913  	for {
  2914  		_ = v.Args[1]
  2915  		x := v.Args[0]
  2916  		v_1 := v.Args[1]
  2917  		if v_1.Op != OpSub64 {
  2918  			break
  2919  		}
  2920  		z := v_1.Args[1]
  2921  		i := v_1.Args[0]
  2922  		if i.Op != OpConst64 {
  2923  			break
  2924  		}
  2925  		t := i.Type
  2926  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2927  			break
  2928  		}
  2929  		v.reset(OpAdd64)
  2930  		v.AddArg(i)
  2931  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2932  		v0.AddArg(x)
  2933  		v0.AddArg(z)
  2934  		v.AddArg(v0)
  2935  		return true
  2936  	}
  2937  	// match: (Add64 (Sub64 i:(Const64 <t>) z) x)
  2938  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2939  	// result: (Add64 i (Sub64 <t> x z))
  2940  	for {
  2941  		x := v.Args[1]
  2942  		v_0 := v.Args[0]
  2943  		if v_0.Op != OpSub64 {
  2944  			break
  2945  		}
  2946  		z := v_0.Args[1]
  2947  		i := v_0.Args[0]
  2948  		if i.Op != OpConst64 {
  2949  			break
  2950  		}
  2951  		t := i.Type
  2952  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2953  			break
  2954  		}
  2955  		v.reset(OpAdd64)
  2956  		v.AddArg(i)
  2957  		v0 := b.NewValue0(v.Pos, OpSub64, t)
  2958  		v0.AddArg(x)
  2959  		v0.AddArg(z)
  2960  		v.AddArg(v0)
  2961  		return true
  2962  	}
  2963  	// match: (Add64 (Sub64 z i:(Const64 <t>)) x)
  2964  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2965  	// result: (Sub64 (Add64 <t> x z) i)
  2966  	for {
  2967  		x := v.Args[1]
  2968  		v_0 := v.Args[0]
  2969  		if v_0.Op != OpSub64 {
  2970  			break
  2971  		}
  2972  		_ = v_0.Args[1]
  2973  		z := v_0.Args[0]
  2974  		i := v_0.Args[1]
  2975  		if i.Op != OpConst64 {
  2976  			break
  2977  		}
  2978  		t := i.Type
  2979  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  2980  			break
  2981  		}
  2982  		v.reset(OpSub64)
  2983  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  2984  		v0.AddArg(x)
  2985  		v0.AddArg(z)
  2986  		v.AddArg(v0)
  2987  		v.AddArg(i)
  2988  		return true
  2989  	}
  2990  	// match: (Add64 x (Sub64 z i:(Const64 <t>)))
  2991  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  2992  	// result: (Sub64 (Add64 <t> x z) i)
  2993  	for {
  2994  		_ = v.Args[1]
  2995  		x := v.Args[0]
  2996  		v_1 := v.Args[1]
  2997  		if v_1.Op != OpSub64 {
  2998  			break
  2999  		}
  3000  		_ = v_1.Args[1]
  3001  		z := v_1.Args[0]
  3002  		i := v_1.Args[1]
  3003  		if i.Op != OpConst64 {
  3004  			break
  3005  		}
  3006  		t := i.Type
  3007  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  3008  			break
  3009  		}
  3010  		v.reset(OpSub64)
  3011  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  3012  		v0.AddArg(x)
  3013  		v0.AddArg(z)
  3014  		v.AddArg(v0)
  3015  		v.AddArg(i)
  3016  		return true
  3017  	}
  3018  	// match: (Add64 x (Sub64 z i:(Const64 <t>)))
  3019  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  3020  	// result: (Sub64 (Add64 <t> x z) i)
  3021  	for {
  3022  		_ = v.Args[1]
  3023  		x := v.Args[0]
  3024  		v_1 := v.Args[1]
  3025  		if v_1.Op != OpSub64 {
  3026  			break
  3027  		}
  3028  		_ = v_1.Args[1]
  3029  		z := v_1.Args[0]
  3030  		i := v_1.Args[1]
  3031  		if i.Op != OpConst64 {
  3032  			break
  3033  		}
  3034  		t := i.Type
  3035  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  3036  			break
  3037  		}
  3038  		v.reset(OpSub64)
  3039  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  3040  		v0.AddArg(x)
  3041  		v0.AddArg(z)
  3042  		v.AddArg(v0)
  3043  		v.AddArg(i)
  3044  		return true
  3045  	}
  3046  	// match: (Add64 (Sub64 z i:(Const64 <t>)) x)
  3047  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  3048  	// result: (Sub64 (Add64 <t> x z) i)
  3049  	for {
  3050  		x := v.Args[1]
  3051  		v_0 := v.Args[0]
  3052  		if v_0.Op != OpSub64 {
  3053  			break
  3054  		}
  3055  		_ = v_0.Args[1]
  3056  		z := v_0.Args[0]
  3057  		i := v_0.Args[1]
  3058  		if i.Op != OpConst64 {
  3059  			break
  3060  		}
  3061  		t := i.Type
  3062  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  3063  			break
  3064  		}
  3065  		v.reset(OpSub64)
  3066  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  3067  		v0.AddArg(x)
  3068  		v0.AddArg(z)
  3069  		v.AddArg(v0)
  3070  		v.AddArg(i)
  3071  		return true
  3072  	}
  3073  	// match: (Add64 (Const64 <t> [c]) (Add64 (Const64 <t> [d]) x))
  3074  	// cond:
  3075  	// result: (Add64 (Const64 <t> [c+d]) x)
  3076  	for {
  3077  		_ = v.Args[1]
  3078  		v_0 := v.Args[0]
  3079  		if v_0.Op != OpConst64 {
  3080  			break
  3081  		}
  3082  		t := v_0.Type
  3083  		c := v_0.AuxInt
  3084  		v_1 := v.Args[1]
  3085  		if v_1.Op != OpAdd64 {
  3086  			break
  3087  		}
  3088  		x := v_1.Args[1]
  3089  		v_1_0 := v_1.Args[0]
  3090  		if v_1_0.Op != OpConst64 {
  3091  			break
  3092  		}
  3093  		if v_1_0.Type != t {
  3094  			break
  3095  		}
  3096  		d := v_1_0.AuxInt
  3097  		v.reset(OpAdd64)
  3098  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3099  		v0.AuxInt = c + d
  3100  		v.AddArg(v0)
  3101  		v.AddArg(x)
  3102  		return true
  3103  	}
  3104  	// match: (Add64 (Const64 <t> [c]) (Add64 x (Const64 <t> [d])))
  3105  	// cond:
  3106  	// result: (Add64 (Const64 <t> [c+d]) x)
  3107  	for {
  3108  		_ = v.Args[1]
  3109  		v_0 := v.Args[0]
  3110  		if v_0.Op != OpConst64 {
  3111  			break
  3112  		}
  3113  		t := v_0.Type
  3114  		c := v_0.AuxInt
  3115  		v_1 := v.Args[1]
  3116  		if v_1.Op != OpAdd64 {
  3117  			break
  3118  		}
  3119  		_ = v_1.Args[1]
  3120  		x := v_1.Args[0]
  3121  		v_1_1 := v_1.Args[1]
  3122  		if v_1_1.Op != OpConst64 {
  3123  			break
  3124  		}
  3125  		if v_1_1.Type != t {
  3126  			break
  3127  		}
  3128  		d := v_1_1.AuxInt
  3129  		v.reset(OpAdd64)
  3130  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3131  		v0.AuxInt = c + d
  3132  		v.AddArg(v0)
  3133  		v.AddArg(x)
  3134  		return true
  3135  	}
  3136  	// match: (Add64 (Add64 (Const64 <t> [d]) x) (Const64 <t> [c]))
  3137  	// cond:
  3138  	// result: (Add64 (Const64 <t> [c+d]) x)
  3139  	for {
  3140  		_ = v.Args[1]
  3141  		v_0 := v.Args[0]
  3142  		if v_0.Op != OpAdd64 {
  3143  			break
  3144  		}
  3145  		x := v_0.Args[1]
  3146  		v_0_0 := v_0.Args[0]
  3147  		if v_0_0.Op != OpConst64 {
  3148  			break
  3149  		}
  3150  		t := v_0_0.Type
  3151  		d := v_0_0.AuxInt
  3152  		v_1 := v.Args[1]
  3153  		if v_1.Op != OpConst64 {
  3154  			break
  3155  		}
  3156  		if v_1.Type != t {
  3157  			break
  3158  		}
  3159  		c := v_1.AuxInt
  3160  		v.reset(OpAdd64)
  3161  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3162  		v0.AuxInt = c + d
  3163  		v.AddArg(v0)
  3164  		v.AddArg(x)
  3165  		return true
  3166  	}
  3167  	// match: (Add64 (Add64 x (Const64 <t> [d])) (Const64 <t> [c]))
  3168  	// cond:
  3169  	// result: (Add64 (Const64 <t> [c+d]) x)
  3170  	for {
  3171  		_ = v.Args[1]
  3172  		v_0 := v.Args[0]
  3173  		if v_0.Op != OpAdd64 {
  3174  			break
  3175  		}
  3176  		_ = v_0.Args[1]
  3177  		x := v_0.Args[0]
  3178  		v_0_1 := v_0.Args[1]
  3179  		if v_0_1.Op != OpConst64 {
  3180  			break
  3181  		}
  3182  		t := v_0_1.Type
  3183  		d := v_0_1.AuxInt
  3184  		v_1 := v.Args[1]
  3185  		if v_1.Op != OpConst64 {
  3186  			break
  3187  		}
  3188  		if v_1.Type != t {
  3189  			break
  3190  		}
  3191  		c := v_1.AuxInt
  3192  		v.reset(OpAdd64)
  3193  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3194  		v0.AuxInt = c + d
  3195  		v.AddArg(v0)
  3196  		v.AddArg(x)
  3197  		return true
  3198  	}
  3199  	return false
  3200  }
  3201  func rewriteValuegeneric_OpAdd64_30(v *Value) bool {
  3202  	b := v.Block
  3203  	// match: (Add64 (Const64 <t> [c]) (Sub64 (Const64 <t> [d]) x))
  3204  	// cond:
  3205  	// result: (Sub64 (Const64 <t> [c+d]) x)
  3206  	for {
  3207  		_ = v.Args[1]
  3208  		v_0 := v.Args[0]
  3209  		if v_0.Op != OpConst64 {
  3210  			break
  3211  		}
  3212  		t := v_0.Type
  3213  		c := v_0.AuxInt
  3214  		v_1 := v.Args[1]
  3215  		if v_1.Op != OpSub64 {
  3216  			break
  3217  		}
  3218  		x := v_1.Args[1]
  3219  		v_1_0 := v_1.Args[0]
  3220  		if v_1_0.Op != OpConst64 {
  3221  			break
  3222  		}
  3223  		if v_1_0.Type != t {
  3224  			break
  3225  		}
  3226  		d := v_1_0.AuxInt
  3227  		v.reset(OpSub64)
  3228  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3229  		v0.AuxInt = c + d
  3230  		v.AddArg(v0)
  3231  		v.AddArg(x)
  3232  		return true
  3233  	}
  3234  	// match: (Add64 (Sub64 (Const64 <t> [d]) x) (Const64 <t> [c]))
  3235  	// cond:
  3236  	// result: (Sub64 (Const64 <t> [c+d]) x)
  3237  	for {
  3238  		_ = v.Args[1]
  3239  		v_0 := v.Args[0]
  3240  		if v_0.Op != OpSub64 {
  3241  			break
  3242  		}
  3243  		x := v_0.Args[1]
  3244  		v_0_0 := v_0.Args[0]
  3245  		if v_0_0.Op != OpConst64 {
  3246  			break
  3247  		}
  3248  		t := v_0_0.Type
  3249  		d := v_0_0.AuxInt
  3250  		v_1 := v.Args[1]
  3251  		if v_1.Op != OpConst64 {
  3252  			break
  3253  		}
  3254  		if v_1.Type != t {
  3255  			break
  3256  		}
  3257  		c := v_1.AuxInt
  3258  		v.reset(OpSub64)
  3259  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3260  		v0.AuxInt = c + d
  3261  		v.AddArg(v0)
  3262  		v.AddArg(x)
  3263  		return true
  3264  	}
  3265  	// match: (Add64 (Const64 <t> [c]) (Sub64 x (Const64 <t> [d])))
  3266  	// cond:
  3267  	// result: (Add64 (Const64 <t> [c-d]) x)
  3268  	for {
  3269  		_ = v.Args[1]
  3270  		v_0 := v.Args[0]
  3271  		if v_0.Op != OpConst64 {
  3272  			break
  3273  		}
  3274  		t := v_0.Type
  3275  		c := v_0.AuxInt
  3276  		v_1 := v.Args[1]
  3277  		if v_1.Op != OpSub64 {
  3278  			break
  3279  		}
  3280  		_ = v_1.Args[1]
  3281  		x := v_1.Args[0]
  3282  		v_1_1 := v_1.Args[1]
  3283  		if v_1_1.Op != OpConst64 {
  3284  			break
  3285  		}
  3286  		if v_1_1.Type != t {
  3287  			break
  3288  		}
  3289  		d := v_1_1.AuxInt
  3290  		v.reset(OpAdd64)
  3291  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3292  		v0.AuxInt = c - d
  3293  		v.AddArg(v0)
  3294  		v.AddArg(x)
  3295  		return true
  3296  	}
  3297  	// match: (Add64 (Sub64 x (Const64 <t> [d])) (Const64 <t> [c]))
  3298  	// cond:
  3299  	// result: (Add64 (Const64 <t> [c-d]) x)
  3300  	for {
  3301  		_ = v.Args[1]
  3302  		v_0 := v.Args[0]
  3303  		if v_0.Op != OpSub64 {
  3304  			break
  3305  		}
  3306  		_ = v_0.Args[1]
  3307  		x := v_0.Args[0]
  3308  		v_0_1 := v_0.Args[1]
  3309  		if v_0_1.Op != OpConst64 {
  3310  			break
  3311  		}
  3312  		t := v_0_1.Type
  3313  		d := v_0_1.AuxInt
  3314  		v_1 := v.Args[1]
  3315  		if v_1.Op != OpConst64 {
  3316  			break
  3317  		}
  3318  		if v_1.Type != t {
  3319  			break
  3320  		}
  3321  		c := v_1.AuxInt
  3322  		v.reset(OpAdd64)
  3323  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  3324  		v0.AuxInt = c - d
  3325  		v.AddArg(v0)
  3326  		v.AddArg(x)
  3327  		return true
  3328  	}
  3329  	return false
  3330  }
  3331  func rewriteValuegeneric_OpAdd64F_0(v *Value) bool {
  3332  	// match: (Add64F (Const64F [c]) (Const64F [d]))
  3333  	// cond:
  3334  	// result: (Const64F [auxFrom64F(auxTo64F(c) + auxTo64F(d))])
  3335  	for {
  3336  		_ = v.Args[1]
  3337  		v_0 := v.Args[0]
  3338  		if v_0.Op != OpConst64F {
  3339  			break
  3340  		}
  3341  		c := v_0.AuxInt
  3342  		v_1 := v.Args[1]
  3343  		if v_1.Op != OpConst64F {
  3344  			break
  3345  		}
  3346  		d := v_1.AuxInt
  3347  		v.reset(OpConst64F)
  3348  		v.AuxInt = auxFrom64F(auxTo64F(c) + auxTo64F(d))
  3349  		return true
  3350  	}
  3351  	// match: (Add64F (Const64F [d]) (Const64F [c]))
  3352  	// cond:
  3353  	// result: (Const64F [auxFrom64F(auxTo64F(c) + auxTo64F(d))])
  3354  	for {
  3355  		_ = v.Args[1]
  3356  		v_0 := v.Args[0]
  3357  		if v_0.Op != OpConst64F {
  3358  			break
  3359  		}
  3360  		d := v_0.AuxInt
  3361  		v_1 := v.Args[1]
  3362  		if v_1.Op != OpConst64F {
  3363  			break
  3364  		}
  3365  		c := v_1.AuxInt
  3366  		v.reset(OpConst64F)
  3367  		v.AuxInt = auxFrom64F(auxTo64F(c) + auxTo64F(d))
  3368  		return true
  3369  	}
  3370  	return false
  3371  }
  3372  func rewriteValuegeneric_OpAdd8_0(v *Value) bool {
  3373  	b := v.Block
  3374  	// match: (Add8 (Const8 [c]) (Const8 [d]))
  3375  	// cond:
  3376  	// result: (Const8 [int64(int8(c+d))])
  3377  	for {
  3378  		_ = v.Args[1]
  3379  		v_0 := v.Args[0]
  3380  		if v_0.Op != OpConst8 {
  3381  			break
  3382  		}
  3383  		c := v_0.AuxInt
  3384  		v_1 := v.Args[1]
  3385  		if v_1.Op != OpConst8 {
  3386  			break
  3387  		}
  3388  		d := v_1.AuxInt
  3389  		v.reset(OpConst8)
  3390  		v.AuxInt = int64(int8(c + d))
  3391  		return true
  3392  	}
  3393  	// match: (Add8 (Const8 [d]) (Const8 [c]))
  3394  	// cond:
  3395  	// result: (Const8 [int64(int8(c+d))])
  3396  	for {
  3397  		_ = v.Args[1]
  3398  		v_0 := v.Args[0]
  3399  		if v_0.Op != OpConst8 {
  3400  			break
  3401  		}
  3402  		d := v_0.AuxInt
  3403  		v_1 := v.Args[1]
  3404  		if v_1.Op != OpConst8 {
  3405  			break
  3406  		}
  3407  		c := v_1.AuxInt
  3408  		v.reset(OpConst8)
  3409  		v.AuxInt = int64(int8(c + d))
  3410  		return true
  3411  	}
  3412  	// match: (Add8 <t> (Mul8 x y) (Mul8 x z))
  3413  	// cond:
  3414  	// result: (Mul8 x (Add8 <t> y z))
  3415  	for {
  3416  		t := v.Type
  3417  		_ = v.Args[1]
  3418  		v_0 := v.Args[0]
  3419  		if v_0.Op != OpMul8 {
  3420  			break
  3421  		}
  3422  		y := v_0.Args[1]
  3423  		x := v_0.Args[0]
  3424  		v_1 := v.Args[1]
  3425  		if v_1.Op != OpMul8 {
  3426  			break
  3427  		}
  3428  		z := v_1.Args[1]
  3429  		if x != v_1.Args[0] {
  3430  			break
  3431  		}
  3432  		v.reset(OpMul8)
  3433  		v.AddArg(x)
  3434  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3435  		v0.AddArg(y)
  3436  		v0.AddArg(z)
  3437  		v.AddArg(v0)
  3438  		return true
  3439  	}
  3440  	// match: (Add8 <t> (Mul8 y x) (Mul8 x z))
  3441  	// cond:
  3442  	// result: (Mul8 x (Add8 <t> y z))
  3443  	for {
  3444  		t := v.Type
  3445  		_ = v.Args[1]
  3446  		v_0 := v.Args[0]
  3447  		if v_0.Op != OpMul8 {
  3448  			break
  3449  		}
  3450  		x := v_0.Args[1]
  3451  		y := v_0.Args[0]
  3452  		v_1 := v.Args[1]
  3453  		if v_1.Op != OpMul8 {
  3454  			break
  3455  		}
  3456  		z := v_1.Args[1]
  3457  		if x != v_1.Args[0] {
  3458  			break
  3459  		}
  3460  		v.reset(OpMul8)
  3461  		v.AddArg(x)
  3462  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3463  		v0.AddArg(y)
  3464  		v0.AddArg(z)
  3465  		v.AddArg(v0)
  3466  		return true
  3467  	}
  3468  	// match: (Add8 <t> (Mul8 x y) (Mul8 z x))
  3469  	// cond:
  3470  	// result: (Mul8 x (Add8 <t> y z))
  3471  	for {
  3472  		t := v.Type
  3473  		_ = v.Args[1]
  3474  		v_0 := v.Args[0]
  3475  		if v_0.Op != OpMul8 {
  3476  			break
  3477  		}
  3478  		y := v_0.Args[1]
  3479  		x := v_0.Args[0]
  3480  		v_1 := v.Args[1]
  3481  		if v_1.Op != OpMul8 {
  3482  			break
  3483  		}
  3484  		_ = v_1.Args[1]
  3485  		z := v_1.Args[0]
  3486  		if x != v_1.Args[1] {
  3487  			break
  3488  		}
  3489  		v.reset(OpMul8)
  3490  		v.AddArg(x)
  3491  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3492  		v0.AddArg(y)
  3493  		v0.AddArg(z)
  3494  		v.AddArg(v0)
  3495  		return true
  3496  	}
  3497  	// match: (Add8 <t> (Mul8 y x) (Mul8 z x))
  3498  	// cond:
  3499  	// result: (Mul8 x (Add8 <t> y z))
  3500  	for {
  3501  		t := v.Type
  3502  		_ = v.Args[1]
  3503  		v_0 := v.Args[0]
  3504  		if v_0.Op != OpMul8 {
  3505  			break
  3506  		}
  3507  		x := v_0.Args[1]
  3508  		y := v_0.Args[0]
  3509  		v_1 := v.Args[1]
  3510  		if v_1.Op != OpMul8 {
  3511  			break
  3512  		}
  3513  		_ = v_1.Args[1]
  3514  		z := v_1.Args[0]
  3515  		if x != v_1.Args[1] {
  3516  			break
  3517  		}
  3518  		v.reset(OpMul8)
  3519  		v.AddArg(x)
  3520  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3521  		v0.AddArg(y)
  3522  		v0.AddArg(z)
  3523  		v.AddArg(v0)
  3524  		return true
  3525  	}
  3526  	// match: (Add8 <t> (Mul8 x z) (Mul8 x y))
  3527  	// cond:
  3528  	// result: (Mul8 x (Add8 <t> y z))
  3529  	for {
  3530  		t := v.Type
  3531  		_ = v.Args[1]
  3532  		v_0 := v.Args[0]
  3533  		if v_0.Op != OpMul8 {
  3534  			break
  3535  		}
  3536  		z := v_0.Args[1]
  3537  		x := v_0.Args[0]
  3538  		v_1 := v.Args[1]
  3539  		if v_1.Op != OpMul8 {
  3540  			break
  3541  		}
  3542  		y := v_1.Args[1]
  3543  		if x != v_1.Args[0] {
  3544  			break
  3545  		}
  3546  		v.reset(OpMul8)
  3547  		v.AddArg(x)
  3548  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3549  		v0.AddArg(y)
  3550  		v0.AddArg(z)
  3551  		v.AddArg(v0)
  3552  		return true
  3553  	}
  3554  	// match: (Add8 <t> (Mul8 z x) (Mul8 x y))
  3555  	// cond:
  3556  	// result: (Mul8 x (Add8 <t> y z))
  3557  	for {
  3558  		t := v.Type
  3559  		_ = v.Args[1]
  3560  		v_0 := v.Args[0]
  3561  		if v_0.Op != OpMul8 {
  3562  			break
  3563  		}
  3564  		x := v_0.Args[1]
  3565  		z := v_0.Args[0]
  3566  		v_1 := v.Args[1]
  3567  		if v_1.Op != OpMul8 {
  3568  			break
  3569  		}
  3570  		y := v_1.Args[1]
  3571  		if x != v_1.Args[0] {
  3572  			break
  3573  		}
  3574  		v.reset(OpMul8)
  3575  		v.AddArg(x)
  3576  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3577  		v0.AddArg(y)
  3578  		v0.AddArg(z)
  3579  		v.AddArg(v0)
  3580  		return true
  3581  	}
  3582  	// match: (Add8 <t> (Mul8 x z) (Mul8 y x))
  3583  	// cond:
  3584  	// result: (Mul8 x (Add8 <t> y z))
  3585  	for {
  3586  		t := v.Type
  3587  		_ = v.Args[1]
  3588  		v_0 := v.Args[0]
  3589  		if v_0.Op != OpMul8 {
  3590  			break
  3591  		}
  3592  		z := v_0.Args[1]
  3593  		x := v_0.Args[0]
  3594  		v_1 := v.Args[1]
  3595  		if v_1.Op != OpMul8 {
  3596  			break
  3597  		}
  3598  		_ = v_1.Args[1]
  3599  		y := v_1.Args[0]
  3600  		if x != v_1.Args[1] {
  3601  			break
  3602  		}
  3603  		v.reset(OpMul8)
  3604  		v.AddArg(x)
  3605  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3606  		v0.AddArg(y)
  3607  		v0.AddArg(z)
  3608  		v.AddArg(v0)
  3609  		return true
  3610  	}
  3611  	// match: (Add8 <t> (Mul8 z x) (Mul8 y x))
  3612  	// cond:
  3613  	// result: (Mul8 x (Add8 <t> y z))
  3614  	for {
  3615  		t := v.Type
  3616  		_ = v.Args[1]
  3617  		v_0 := v.Args[0]
  3618  		if v_0.Op != OpMul8 {
  3619  			break
  3620  		}
  3621  		x := v_0.Args[1]
  3622  		z := v_0.Args[0]
  3623  		v_1 := v.Args[1]
  3624  		if v_1.Op != OpMul8 {
  3625  			break
  3626  		}
  3627  		_ = v_1.Args[1]
  3628  		y := v_1.Args[0]
  3629  		if x != v_1.Args[1] {
  3630  			break
  3631  		}
  3632  		v.reset(OpMul8)
  3633  		v.AddArg(x)
  3634  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3635  		v0.AddArg(y)
  3636  		v0.AddArg(z)
  3637  		v.AddArg(v0)
  3638  		return true
  3639  	}
  3640  	return false
  3641  }
  3642  func rewriteValuegeneric_OpAdd8_10(v *Value) bool {
  3643  	b := v.Block
  3644  	// match: (Add8 (Const8 [0]) x)
  3645  	// cond:
  3646  	// result: x
  3647  	for {
  3648  		x := v.Args[1]
  3649  		v_0 := v.Args[0]
  3650  		if v_0.Op != OpConst8 {
  3651  			break
  3652  		}
  3653  		if v_0.AuxInt != 0 {
  3654  			break
  3655  		}
  3656  		v.reset(OpCopy)
  3657  		v.Type = x.Type
  3658  		v.AddArg(x)
  3659  		return true
  3660  	}
  3661  	// match: (Add8 x (Const8 [0]))
  3662  	// cond:
  3663  	// result: x
  3664  	for {
  3665  		_ = v.Args[1]
  3666  		x := v.Args[0]
  3667  		v_1 := v.Args[1]
  3668  		if v_1.Op != OpConst8 {
  3669  			break
  3670  		}
  3671  		if v_1.AuxInt != 0 {
  3672  			break
  3673  		}
  3674  		v.reset(OpCopy)
  3675  		v.Type = x.Type
  3676  		v.AddArg(x)
  3677  		return true
  3678  	}
  3679  	// match: (Add8 (Const8 [1]) (Com8 x))
  3680  	// cond:
  3681  	// result: (Neg8 x)
  3682  	for {
  3683  		_ = v.Args[1]
  3684  		v_0 := v.Args[0]
  3685  		if v_0.Op != OpConst8 {
  3686  			break
  3687  		}
  3688  		if v_0.AuxInt != 1 {
  3689  			break
  3690  		}
  3691  		v_1 := v.Args[1]
  3692  		if v_1.Op != OpCom8 {
  3693  			break
  3694  		}
  3695  		x := v_1.Args[0]
  3696  		v.reset(OpNeg8)
  3697  		v.AddArg(x)
  3698  		return true
  3699  	}
  3700  	// match: (Add8 (Com8 x) (Const8 [1]))
  3701  	// cond:
  3702  	// result: (Neg8 x)
  3703  	for {
  3704  		_ = v.Args[1]
  3705  		v_0 := v.Args[0]
  3706  		if v_0.Op != OpCom8 {
  3707  			break
  3708  		}
  3709  		x := v_0.Args[0]
  3710  		v_1 := v.Args[1]
  3711  		if v_1.Op != OpConst8 {
  3712  			break
  3713  		}
  3714  		if v_1.AuxInt != 1 {
  3715  			break
  3716  		}
  3717  		v.reset(OpNeg8)
  3718  		v.AddArg(x)
  3719  		return true
  3720  	}
  3721  	// match: (Add8 (Add8 i:(Const8 <t>) z) x)
  3722  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3723  	// result: (Add8 i (Add8 <t> z x))
  3724  	for {
  3725  		x := v.Args[1]
  3726  		v_0 := v.Args[0]
  3727  		if v_0.Op != OpAdd8 {
  3728  			break
  3729  		}
  3730  		z := v_0.Args[1]
  3731  		i := v_0.Args[0]
  3732  		if i.Op != OpConst8 {
  3733  			break
  3734  		}
  3735  		t := i.Type
  3736  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3737  			break
  3738  		}
  3739  		v.reset(OpAdd8)
  3740  		v.AddArg(i)
  3741  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3742  		v0.AddArg(z)
  3743  		v0.AddArg(x)
  3744  		v.AddArg(v0)
  3745  		return true
  3746  	}
  3747  	// match: (Add8 (Add8 z i:(Const8 <t>)) x)
  3748  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3749  	// result: (Add8 i (Add8 <t> z x))
  3750  	for {
  3751  		x := v.Args[1]
  3752  		v_0 := v.Args[0]
  3753  		if v_0.Op != OpAdd8 {
  3754  			break
  3755  		}
  3756  		_ = v_0.Args[1]
  3757  		z := v_0.Args[0]
  3758  		i := v_0.Args[1]
  3759  		if i.Op != OpConst8 {
  3760  			break
  3761  		}
  3762  		t := i.Type
  3763  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3764  			break
  3765  		}
  3766  		v.reset(OpAdd8)
  3767  		v.AddArg(i)
  3768  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3769  		v0.AddArg(z)
  3770  		v0.AddArg(x)
  3771  		v.AddArg(v0)
  3772  		return true
  3773  	}
  3774  	// match: (Add8 x (Add8 i:(Const8 <t>) z))
  3775  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3776  	// result: (Add8 i (Add8 <t> z x))
  3777  	for {
  3778  		_ = v.Args[1]
  3779  		x := v.Args[0]
  3780  		v_1 := v.Args[1]
  3781  		if v_1.Op != OpAdd8 {
  3782  			break
  3783  		}
  3784  		z := v_1.Args[1]
  3785  		i := v_1.Args[0]
  3786  		if i.Op != OpConst8 {
  3787  			break
  3788  		}
  3789  		t := i.Type
  3790  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3791  			break
  3792  		}
  3793  		v.reset(OpAdd8)
  3794  		v.AddArg(i)
  3795  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3796  		v0.AddArg(z)
  3797  		v0.AddArg(x)
  3798  		v.AddArg(v0)
  3799  		return true
  3800  	}
  3801  	// match: (Add8 x (Add8 z i:(Const8 <t>)))
  3802  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3803  	// result: (Add8 i (Add8 <t> z x))
  3804  	for {
  3805  		_ = v.Args[1]
  3806  		x := v.Args[0]
  3807  		v_1 := v.Args[1]
  3808  		if v_1.Op != OpAdd8 {
  3809  			break
  3810  		}
  3811  		_ = v_1.Args[1]
  3812  		z := v_1.Args[0]
  3813  		i := v_1.Args[1]
  3814  		if i.Op != OpConst8 {
  3815  			break
  3816  		}
  3817  		t := i.Type
  3818  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3819  			break
  3820  		}
  3821  		v.reset(OpAdd8)
  3822  		v.AddArg(i)
  3823  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3824  		v0.AddArg(z)
  3825  		v0.AddArg(x)
  3826  		v.AddArg(v0)
  3827  		return true
  3828  	}
  3829  	// match: (Add8 (Sub8 i:(Const8 <t>) z) x)
  3830  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3831  	// result: (Add8 i (Sub8 <t> x z))
  3832  	for {
  3833  		x := v.Args[1]
  3834  		v_0 := v.Args[0]
  3835  		if v_0.Op != OpSub8 {
  3836  			break
  3837  		}
  3838  		z := v_0.Args[1]
  3839  		i := v_0.Args[0]
  3840  		if i.Op != OpConst8 {
  3841  			break
  3842  		}
  3843  		t := i.Type
  3844  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3845  			break
  3846  		}
  3847  		v.reset(OpAdd8)
  3848  		v.AddArg(i)
  3849  		v0 := b.NewValue0(v.Pos, OpSub8, t)
  3850  		v0.AddArg(x)
  3851  		v0.AddArg(z)
  3852  		v.AddArg(v0)
  3853  		return true
  3854  	}
  3855  	// match: (Add8 x (Sub8 i:(Const8 <t>) z))
  3856  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3857  	// result: (Add8 i (Sub8 <t> x z))
  3858  	for {
  3859  		_ = v.Args[1]
  3860  		x := v.Args[0]
  3861  		v_1 := v.Args[1]
  3862  		if v_1.Op != OpSub8 {
  3863  			break
  3864  		}
  3865  		z := v_1.Args[1]
  3866  		i := v_1.Args[0]
  3867  		if i.Op != OpConst8 {
  3868  			break
  3869  		}
  3870  		t := i.Type
  3871  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3872  			break
  3873  		}
  3874  		v.reset(OpAdd8)
  3875  		v.AddArg(i)
  3876  		v0 := b.NewValue0(v.Pos, OpSub8, t)
  3877  		v0.AddArg(x)
  3878  		v0.AddArg(z)
  3879  		v.AddArg(v0)
  3880  		return true
  3881  	}
  3882  	return false
  3883  }
  3884  func rewriteValuegeneric_OpAdd8_20(v *Value) bool {
  3885  	b := v.Block
  3886  	// match: (Add8 x (Sub8 i:(Const8 <t>) z))
  3887  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3888  	// result: (Add8 i (Sub8 <t> x z))
  3889  	for {
  3890  		_ = v.Args[1]
  3891  		x := v.Args[0]
  3892  		v_1 := v.Args[1]
  3893  		if v_1.Op != OpSub8 {
  3894  			break
  3895  		}
  3896  		z := v_1.Args[1]
  3897  		i := v_1.Args[0]
  3898  		if i.Op != OpConst8 {
  3899  			break
  3900  		}
  3901  		t := i.Type
  3902  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3903  			break
  3904  		}
  3905  		v.reset(OpAdd8)
  3906  		v.AddArg(i)
  3907  		v0 := b.NewValue0(v.Pos, OpSub8, t)
  3908  		v0.AddArg(x)
  3909  		v0.AddArg(z)
  3910  		v.AddArg(v0)
  3911  		return true
  3912  	}
  3913  	// match: (Add8 (Sub8 i:(Const8 <t>) z) x)
  3914  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3915  	// result: (Add8 i (Sub8 <t> x z))
  3916  	for {
  3917  		x := v.Args[1]
  3918  		v_0 := v.Args[0]
  3919  		if v_0.Op != OpSub8 {
  3920  			break
  3921  		}
  3922  		z := v_0.Args[1]
  3923  		i := v_0.Args[0]
  3924  		if i.Op != OpConst8 {
  3925  			break
  3926  		}
  3927  		t := i.Type
  3928  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3929  			break
  3930  		}
  3931  		v.reset(OpAdd8)
  3932  		v.AddArg(i)
  3933  		v0 := b.NewValue0(v.Pos, OpSub8, t)
  3934  		v0.AddArg(x)
  3935  		v0.AddArg(z)
  3936  		v.AddArg(v0)
  3937  		return true
  3938  	}
  3939  	// match: (Add8 (Sub8 z i:(Const8 <t>)) x)
  3940  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3941  	// result: (Sub8 (Add8 <t> x z) i)
  3942  	for {
  3943  		x := v.Args[1]
  3944  		v_0 := v.Args[0]
  3945  		if v_0.Op != OpSub8 {
  3946  			break
  3947  		}
  3948  		_ = v_0.Args[1]
  3949  		z := v_0.Args[0]
  3950  		i := v_0.Args[1]
  3951  		if i.Op != OpConst8 {
  3952  			break
  3953  		}
  3954  		t := i.Type
  3955  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3956  			break
  3957  		}
  3958  		v.reset(OpSub8)
  3959  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3960  		v0.AddArg(x)
  3961  		v0.AddArg(z)
  3962  		v.AddArg(v0)
  3963  		v.AddArg(i)
  3964  		return true
  3965  	}
  3966  	// match: (Add8 x (Sub8 z i:(Const8 <t>)))
  3967  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3968  	// result: (Sub8 (Add8 <t> x z) i)
  3969  	for {
  3970  		_ = v.Args[1]
  3971  		x := v.Args[0]
  3972  		v_1 := v.Args[1]
  3973  		if v_1.Op != OpSub8 {
  3974  			break
  3975  		}
  3976  		_ = v_1.Args[1]
  3977  		z := v_1.Args[0]
  3978  		i := v_1.Args[1]
  3979  		if i.Op != OpConst8 {
  3980  			break
  3981  		}
  3982  		t := i.Type
  3983  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  3984  			break
  3985  		}
  3986  		v.reset(OpSub8)
  3987  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  3988  		v0.AddArg(x)
  3989  		v0.AddArg(z)
  3990  		v.AddArg(v0)
  3991  		v.AddArg(i)
  3992  		return true
  3993  	}
  3994  	// match: (Add8 x (Sub8 z i:(Const8 <t>)))
  3995  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  3996  	// result: (Sub8 (Add8 <t> x z) i)
  3997  	for {
  3998  		_ = v.Args[1]
  3999  		x := v.Args[0]
  4000  		v_1 := v.Args[1]
  4001  		if v_1.Op != OpSub8 {
  4002  			break
  4003  		}
  4004  		_ = v_1.Args[1]
  4005  		z := v_1.Args[0]
  4006  		i := v_1.Args[1]
  4007  		if i.Op != OpConst8 {
  4008  			break
  4009  		}
  4010  		t := i.Type
  4011  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  4012  			break
  4013  		}
  4014  		v.reset(OpSub8)
  4015  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  4016  		v0.AddArg(x)
  4017  		v0.AddArg(z)
  4018  		v.AddArg(v0)
  4019  		v.AddArg(i)
  4020  		return true
  4021  	}
  4022  	// match: (Add8 (Sub8 z i:(Const8 <t>)) x)
  4023  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  4024  	// result: (Sub8 (Add8 <t> x z) i)
  4025  	for {
  4026  		x := v.Args[1]
  4027  		v_0 := v.Args[0]
  4028  		if v_0.Op != OpSub8 {
  4029  			break
  4030  		}
  4031  		_ = v_0.Args[1]
  4032  		z := v_0.Args[0]
  4033  		i := v_0.Args[1]
  4034  		if i.Op != OpConst8 {
  4035  			break
  4036  		}
  4037  		t := i.Type
  4038  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  4039  			break
  4040  		}
  4041  		v.reset(OpSub8)
  4042  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  4043  		v0.AddArg(x)
  4044  		v0.AddArg(z)
  4045  		v.AddArg(v0)
  4046  		v.AddArg(i)
  4047  		return true
  4048  	}
  4049  	// match: (Add8 (Const8 <t> [c]) (Add8 (Const8 <t> [d]) x))
  4050  	// cond:
  4051  	// result: (Add8 (Const8 <t> [int64(int8(c+d))]) x)
  4052  	for {
  4053  		_ = v.Args[1]
  4054  		v_0 := v.Args[0]
  4055  		if v_0.Op != OpConst8 {
  4056  			break
  4057  		}
  4058  		t := v_0.Type
  4059  		c := v_0.AuxInt
  4060  		v_1 := v.Args[1]
  4061  		if v_1.Op != OpAdd8 {
  4062  			break
  4063  		}
  4064  		x := v_1.Args[1]
  4065  		v_1_0 := v_1.Args[0]
  4066  		if v_1_0.Op != OpConst8 {
  4067  			break
  4068  		}
  4069  		if v_1_0.Type != t {
  4070  			break
  4071  		}
  4072  		d := v_1_0.AuxInt
  4073  		v.reset(OpAdd8)
  4074  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4075  		v0.AuxInt = int64(int8(c + d))
  4076  		v.AddArg(v0)
  4077  		v.AddArg(x)
  4078  		return true
  4079  	}
  4080  	// match: (Add8 (Const8 <t> [c]) (Add8 x (Const8 <t> [d])))
  4081  	// cond:
  4082  	// result: (Add8 (Const8 <t> [int64(int8(c+d))]) x)
  4083  	for {
  4084  		_ = v.Args[1]
  4085  		v_0 := v.Args[0]
  4086  		if v_0.Op != OpConst8 {
  4087  			break
  4088  		}
  4089  		t := v_0.Type
  4090  		c := v_0.AuxInt
  4091  		v_1 := v.Args[1]
  4092  		if v_1.Op != OpAdd8 {
  4093  			break
  4094  		}
  4095  		_ = v_1.Args[1]
  4096  		x := v_1.Args[0]
  4097  		v_1_1 := v_1.Args[1]
  4098  		if v_1_1.Op != OpConst8 {
  4099  			break
  4100  		}
  4101  		if v_1_1.Type != t {
  4102  			break
  4103  		}
  4104  		d := v_1_1.AuxInt
  4105  		v.reset(OpAdd8)
  4106  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4107  		v0.AuxInt = int64(int8(c + d))
  4108  		v.AddArg(v0)
  4109  		v.AddArg(x)
  4110  		return true
  4111  	}
  4112  	// match: (Add8 (Add8 (Const8 <t> [d]) x) (Const8 <t> [c]))
  4113  	// cond:
  4114  	// result: (Add8 (Const8 <t> [int64(int8(c+d))]) x)
  4115  	for {
  4116  		_ = v.Args[1]
  4117  		v_0 := v.Args[0]
  4118  		if v_0.Op != OpAdd8 {
  4119  			break
  4120  		}
  4121  		x := v_0.Args[1]
  4122  		v_0_0 := v_0.Args[0]
  4123  		if v_0_0.Op != OpConst8 {
  4124  			break
  4125  		}
  4126  		t := v_0_0.Type
  4127  		d := v_0_0.AuxInt
  4128  		v_1 := v.Args[1]
  4129  		if v_1.Op != OpConst8 {
  4130  			break
  4131  		}
  4132  		if v_1.Type != t {
  4133  			break
  4134  		}
  4135  		c := v_1.AuxInt
  4136  		v.reset(OpAdd8)
  4137  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4138  		v0.AuxInt = int64(int8(c + d))
  4139  		v.AddArg(v0)
  4140  		v.AddArg(x)
  4141  		return true
  4142  	}
  4143  	// match: (Add8 (Add8 x (Const8 <t> [d])) (Const8 <t> [c]))
  4144  	// cond:
  4145  	// result: (Add8 (Const8 <t> [int64(int8(c+d))]) x)
  4146  	for {
  4147  		_ = v.Args[1]
  4148  		v_0 := v.Args[0]
  4149  		if v_0.Op != OpAdd8 {
  4150  			break
  4151  		}
  4152  		_ = v_0.Args[1]
  4153  		x := v_0.Args[0]
  4154  		v_0_1 := v_0.Args[1]
  4155  		if v_0_1.Op != OpConst8 {
  4156  			break
  4157  		}
  4158  		t := v_0_1.Type
  4159  		d := v_0_1.AuxInt
  4160  		v_1 := v.Args[1]
  4161  		if v_1.Op != OpConst8 {
  4162  			break
  4163  		}
  4164  		if v_1.Type != t {
  4165  			break
  4166  		}
  4167  		c := v_1.AuxInt
  4168  		v.reset(OpAdd8)
  4169  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4170  		v0.AuxInt = int64(int8(c + d))
  4171  		v.AddArg(v0)
  4172  		v.AddArg(x)
  4173  		return true
  4174  	}
  4175  	return false
  4176  }
  4177  func rewriteValuegeneric_OpAdd8_30(v *Value) bool {
  4178  	b := v.Block
  4179  	// match: (Add8 (Const8 <t> [c]) (Sub8 (Const8 <t> [d]) x))
  4180  	// cond:
  4181  	// result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x)
  4182  	for {
  4183  		_ = v.Args[1]
  4184  		v_0 := v.Args[0]
  4185  		if v_0.Op != OpConst8 {
  4186  			break
  4187  		}
  4188  		t := v_0.Type
  4189  		c := v_0.AuxInt
  4190  		v_1 := v.Args[1]
  4191  		if v_1.Op != OpSub8 {
  4192  			break
  4193  		}
  4194  		x := v_1.Args[1]
  4195  		v_1_0 := v_1.Args[0]
  4196  		if v_1_0.Op != OpConst8 {
  4197  			break
  4198  		}
  4199  		if v_1_0.Type != t {
  4200  			break
  4201  		}
  4202  		d := v_1_0.AuxInt
  4203  		v.reset(OpSub8)
  4204  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4205  		v0.AuxInt = int64(int8(c + d))
  4206  		v.AddArg(v0)
  4207  		v.AddArg(x)
  4208  		return true
  4209  	}
  4210  	// match: (Add8 (Sub8 (Const8 <t> [d]) x) (Const8 <t> [c]))
  4211  	// cond:
  4212  	// result: (Sub8 (Const8 <t> [int64(int8(c+d))]) x)
  4213  	for {
  4214  		_ = v.Args[1]
  4215  		v_0 := v.Args[0]
  4216  		if v_0.Op != OpSub8 {
  4217  			break
  4218  		}
  4219  		x := v_0.Args[1]
  4220  		v_0_0 := v_0.Args[0]
  4221  		if v_0_0.Op != OpConst8 {
  4222  			break
  4223  		}
  4224  		t := v_0_0.Type
  4225  		d := v_0_0.AuxInt
  4226  		v_1 := v.Args[1]
  4227  		if v_1.Op != OpConst8 {
  4228  			break
  4229  		}
  4230  		if v_1.Type != t {
  4231  			break
  4232  		}
  4233  		c := v_1.AuxInt
  4234  		v.reset(OpSub8)
  4235  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4236  		v0.AuxInt = int64(int8(c + d))
  4237  		v.AddArg(v0)
  4238  		v.AddArg(x)
  4239  		return true
  4240  	}
  4241  	// match: (Add8 (Const8 <t> [c]) (Sub8 x (Const8 <t> [d])))
  4242  	// cond:
  4243  	// result: (Add8 (Const8 <t> [int64(int8(c-d))]) x)
  4244  	for {
  4245  		_ = v.Args[1]
  4246  		v_0 := v.Args[0]
  4247  		if v_0.Op != OpConst8 {
  4248  			break
  4249  		}
  4250  		t := v_0.Type
  4251  		c := v_0.AuxInt
  4252  		v_1 := v.Args[1]
  4253  		if v_1.Op != OpSub8 {
  4254  			break
  4255  		}
  4256  		_ = v_1.Args[1]
  4257  		x := v_1.Args[0]
  4258  		v_1_1 := v_1.Args[1]
  4259  		if v_1_1.Op != OpConst8 {
  4260  			break
  4261  		}
  4262  		if v_1_1.Type != t {
  4263  			break
  4264  		}
  4265  		d := v_1_1.AuxInt
  4266  		v.reset(OpAdd8)
  4267  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4268  		v0.AuxInt = int64(int8(c - d))
  4269  		v.AddArg(v0)
  4270  		v.AddArg(x)
  4271  		return true
  4272  	}
  4273  	// match: (Add8 (Sub8 x (Const8 <t> [d])) (Const8 <t> [c]))
  4274  	// cond:
  4275  	// result: (Add8 (Const8 <t> [int64(int8(c-d))]) x)
  4276  	for {
  4277  		_ = v.Args[1]
  4278  		v_0 := v.Args[0]
  4279  		if v_0.Op != OpSub8 {
  4280  			break
  4281  		}
  4282  		_ = v_0.Args[1]
  4283  		x := v_0.Args[0]
  4284  		v_0_1 := v_0.Args[1]
  4285  		if v_0_1.Op != OpConst8 {
  4286  			break
  4287  		}
  4288  		t := v_0_1.Type
  4289  		d := v_0_1.AuxInt
  4290  		v_1 := v.Args[1]
  4291  		if v_1.Op != OpConst8 {
  4292  			break
  4293  		}
  4294  		if v_1.Type != t {
  4295  			break
  4296  		}
  4297  		c := v_1.AuxInt
  4298  		v.reset(OpAdd8)
  4299  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  4300  		v0.AuxInt = int64(int8(c - d))
  4301  		v.AddArg(v0)
  4302  		v.AddArg(x)
  4303  		return true
  4304  	}
  4305  	return false
  4306  }
  4307  func rewriteValuegeneric_OpAddPtr_0(v *Value) bool {
  4308  	// match: (AddPtr <t> x (Const64 [c]))
  4309  	// cond:
  4310  	// result: (OffPtr <t> x [c])
  4311  	for {
  4312  		t := v.Type
  4313  		_ = v.Args[1]
  4314  		x := v.Args[0]
  4315  		v_1 := v.Args[1]
  4316  		if v_1.Op != OpConst64 {
  4317  			break
  4318  		}
  4319  		c := v_1.AuxInt
  4320  		v.reset(OpOffPtr)
  4321  		v.Type = t
  4322  		v.AuxInt = c
  4323  		v.AddArg(x)
  4324  		return true
  4325  	}
  4326  	// match: (AddPtr <t> x (Const32 [c]))
  4327  	// cond:
  4328  	// result: (OffPtr <t> x [c])
  4329  	for {
  4330  		t := v.Type
  4331  		_ = v.Args[1]
  4332  		x := v.Args[0]
  4333  		v_1 := v.Args[1]
  4334  		if v_1.Op != OpConst32 {
  4335  			break
  4336  		}
  4337  		c := v_1.AuxInt
  4338  		v.reset(OpOffPtr)
  4339  		v.Type = t
  4340  		v.AuxInt = c
  4341  		v.AddArg(x)
  4342  		return true
  4343  	}
  4344  	return false
  4345  }
  4346  func rewriteValuegeneric_OpAnd16_0(v *Value) bool {
  4347  	// match: (And16 (Const16 [c]) (Const16 [d]))
  4348  	// cond:
  4349  	// result: (Const16 [int64(int16(c&d))])
  4350  	for {
  4351  		_ = v.Args[1]
  4352  		v_0 := v.Args[0]
  4353  		if v_0.Op != OpConst16 {
  4354  			break
  4355  		}
  4356  		c := v_0.AuxInt
  4357  		v_1 := v.Args[1]
  4358  		if v_1.Op != OpConst16 {
  4359  			break
  4360  		}
  4361  		d := v_1.AuxInt
  4362  		v.reset(OpConst16)
  4363  		v.AuxInt = int64(int16(c & d))
  4364  		return true
  4365  	}
  4366  	// match: (And16 (Const16 [d]) (Const16 [c]))
  4367  	// cond:
  4368  	// result: (Const16 [int64(int16(c&d))])
  4369  	for {
  4370  		_ = v.Args[1]
  4371  		v_0 := v.Args[0]
  4372  		if v_0.Op != OpConst16 {
  4373  			break
  4374  		}
  4375  		d := v_0.AuxInt
  4376  		v_1 := v.Args[1]
  4377  		if v_1.Op != OpConst16 {
  4378  			break
  4379  		}
  4380  		c := v_1.AuxInt
  4381  		v.reset(OpConst16)
  4382  		v.AuxInt = int64(int16(c & d))
  4383  		return true
  4384  	}
  4385  	// match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c])))
  4386  	// cond: c >= 64-ntz(m)
  4387  	// result: (Const16 [0])
  4388  	for {
  4389  		_ = v.Args[1]
  4390  		v_0 := v.Args[0]
  4391  		if v_0.Op != OpConst16 {
  4392  			break
  4393  		}
  4394  		m := v_0.AuxInt
  4395  		v_1 := v.Args[1]
  4396  		if v_1.Op != OpRsh16Ux64 {
  4397  			break
  4398  		}
  4399  		_ = v_1.Args[1]
  4400  		v_1_1 := v_1.Args[1]
  4401  		if v_1_1.Op != OpConst64 {
  4402  			break
  4403  		}
  4404  		c := v_1_1.AuxInt
  4405  		if !(c >= 64-ntz(m)) {
  4406  			break
  4407  		}
  4408  		v.reset(OpConst16)
  4409  		v.AuxInt = 0
  4410  		return true
  4411  	}
  4412  	// match: (And16 (Rsh16Ux64 _ (Const64 [c])) (Const16 [m]))
  4413  	// cond: c >= 64-ntz(m)
  4414  	// result: (Const16 [0])
  4415  	for {
  4416  		_ = v.Args[1]
  4417  		v_0 := v.Args[0]
  4418  		if v_0.Op != OpRsh16Ux64 {
  4419  			break
  4420  		}
  4421  		_ = v_0.Args[1]
  4422  		v_0_1 := v_0.Args[1]
  4423  		if v_0_1.Op != OpConst64 {
  4424  			break
  4425  		}
  4426  		c := v_0_1.AuxInt
  4427  		v_1 := v.Args[1]
  4428  		if v_1.Op != OpConst16 {
  4429  			break
  4430  		}
  4431  		m := v_1.AuxInt
  4432  		if !(c >= 64-ntz(m)) {
  4433  			break
  4434  		}
  4435  		v.reset(OpConst16)
  4436  		v.AuxInt = 0
  4437  		return true
  4438  	}
  4439  	// match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c])))
  4440  	// cond: c >= 64-nlz(m)
  4441  	// result: (Const16 [0])
  4442  	for {
  4443  		_ = v.Args[1]
  4444  		v_0 := v.Args[0]
  4445  		if v_0.Op != OpConst16 {
  4446  			break
  4447  		}
  4448  		m := v_0.AuxInt
  4449  		v_1 := v.Args[1]
  4450  		if v_1.Op != OpLsh16x64 {
  4451  			break
  4452  		}
  4453  		_ = v_1.Args[1]
  4454  		v_1_1 := v_1.Args[1]
  4455  		if v_1_1.Op != OpConst64 {
  4456  			break
  4457  		}
  4458  		c := v_1_1.AuxInt
  4459  		if !(c >= 64-nlz(m)) {
  4460  			break
  4461  		}
  4462  		v.reset(OpConst16)
  4463  		v.AuxInt = 0
  4464  		return true
  4465  	}
  4466  	// match: (And16 (Lsh16x64 _ (Const64 [c])) (Const16 [m]))
  4467  	// cond: c >= 64-nlz(m)
  4468  	// result: (Const16 [0])
  4469  	for {
  4470  		_ = v.Args[1]
  4471  		v_0 := v.Args[0]
  4472  		if v_0.Op != OpLsh16x64 {
  4473  			break
  4474  		}
  4475  		_ = v_0.Args[1]
  4476  		v_0_1 := v_0.Args[1]
  4477  		if v_0_1.Op != OpConst64 {
  4478  			break
  4479  		}
  4480  		c := v_0_1.AuxInt
  4481  		v_1 := v.Args[1]
  4482  		if v_1.Op != OpConst16 {
  4483  			break
  4484  		}
  4485  		m := v_1.AuxInt
  4486  		if !(c >= 64-nlz(m)) {
  4487  			break
  4488  		}
  4489  		v.reset(OpConst16)
  4490  		v.AuxInt = 0
  4491  		return true
  4492  	}
  4493  	// match: (And16 x x)
  4494  	// cond:
  4495  	// result: x
  4496  	for {
  4497  		x := v.Args[1]
  4498  		if x != v.Args[0] {
  4499  			break
  4500  		}
  4501  		v.reset(OpCopy)
  4502  		v.Type = x.Type
  4503  		v.AddArg(x)
  4504  		return true
  4505  	}
  4506  	// match: (And16 (Const16 [-1]) x)
  4507  	// cond:
  4508  	// result: x
  4509  	for {
  4510  		x := v.Args[1]
  4511  		v_0 := v.Args[0]
  4512  		if v_0.Op != OpConst16 {
  4513  			break
  4514  		}
  4515  		if v_0.AuxInt != -1 {
  4516  			break
  4517  		}
  4518  		v.reset(OpCopy)
  4519  		v.Type = x.Type
  4520  		v.AddArg(x)
  4521  		return true
  4522  	}
  4523  	// match: (And16 x (Const16 [-1]))
  4524  	// cond:
  4525  	// result: x
  4526  	for {
  4527  		_ = v.Args[1]
  4528  		x := v.Args[0]
  4529  		v_1 := v.Args[1]
  4530  		if v_1.Op != OpConst16 {
  4531  			break
  4532  		}
  4533  		if v_1.AuxInt != -1 {
  4534  			break
  4535  		}
  4536  		v.reset(OpCopy)
  4537  		v.Type = x.Type
  4538  		v.AddArg(x)
  4539  		return true
  4540  	}
  4541  	// match: (And16 (Const16 [0]) _)
  4542  	// cond:
  4543  	// result: (Const16 [0])
  4544  	for {
  4545  		_ = v.Args[1]
  4546  		v_0 := v.Args[0]
  4547  		if v_0.Op != OpConst16 {
  4548  			break
  4549  		}
  4550  		if v_0.AuxInt != 0 {
  4551  			break
  4552  		}
  4553  		v.reset(OpConst16)
  4554  		v.AuxInt = 0
  4555  		return true
  4556  	}
  4557  	return false
  4558  }
  4559  func rewriteValuegeneric_OpAnd16_10(v *Value) bool {
  4560  	b := v.Block
  4561  	// match: (And16 _ (Const16 [0]))
  4562  	// cond:
  4563  	// result: (Const16 [0])
  4564  	for {
  4565  		_ = v.Args[1]
  4566  		v_1 := v.Args[1]
  4567  		if v_1.Op != OpConst16 {
  4568  			break
  4569  		}
  4570  		if v_1.AuxInt != 0 {
  4571  			break
  4572  		}
  4573  		v.reset(OpConst16)
  4574  		v.AuxInt = 0
  4575  		return true
  4576  	}
  4577  	// match: (And16 x (And16 x y))
  4578  	// cond:
  4579  	// result: (And16 x y)
  4580  	for {
  4581  		_ = v.Args[1]
  4582  		x := v.Args[0]
  4583  		v_1 := v.Args[1]
  4584  		if v_1.Op != OpAnd16 {
  4585  			break
  4586  		}
  4587  		y := v_1.Args[1]
  4588  		if x != v_1.Args[0] {
  4589  			break
  4590  		}
  4591  		v.reset(OpAnd16)
  4592  		v.AddArg(x)
  4593  		v.AddArg(y)
  4594  		return true
  4595  	}
  4596  	// match: (And16 x (And16 y x))
  4597  	// cond:
  4598  	// result: (And16 x y)
  4599  	for {
  4600  		_ = v.Args[1]
  4601  		x := v.Args[0]
  4602  		v_1 := v.Args[1]
  4603  		if v_1.Op != OpAnd16 {
  4604  			break
  4605  		}
  4606  		_ = v_1.Args[1]
  4607  		y := v_1.Args[0]
  4608  		if x != v_1.Args[1] {
  4609  			break
  4610  		}
  4611  		v.reset(OpAnd16)
  4612  		v.AddArg(x)
  4613  		v.AddArg(y)
  4614  		return true
  4615  	}
  4616  	// match: (And16 (And16 x y) x)
  4617  	// cond:
  4618  	// result: (And16 x y)
  4619  	for {
  4620  		x := v.Args[1]
  4621  		v_0 := v.Args[0]
  4622  		if v_0.Op != OpAnd16 {
  4623  			break
  4624  		}
  4625  		y := v_0.Args[1]
  4626  		if x != v_0.Args[0] {
  4627  			break
  4628  		}
  4629  		v.reset(OpAnd16)
  4630  		v.AddArg(x)
  4631  		v.AddArg(y)
  4632  		return true
  4633  	}
  4634  	// match: (And16 (And16 y x) x)
  4635  	// cond:
  4636  	// result: (And16 x y)
  4637  	for {
  4638  		x := v.Args[1]
  4639  		v_0 := v.Args[0]
  4640  		if v_0.Op != OpAnd16 {
  4641  			break
  4642  		}
  4643  		_ = v_0.Args[1]
  4644  		y := v_0.Args[0]
  4645  		if x != v_0.Args[1] {
  4646  			break
  4647  		}
  4648  		v.reset(OpAnd16)
  4649  		v.AddArg(x)
  4650  		v.AddArg(y)
  4651  		return true
  4652  	}
  4653  	// match: (And16 (And16 i:(Const16 <t>) z) x)
  4654  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  4655  	// result: (And16 i (And16 <t> z x))
  4656  	for {
  4657  		x := v.Args[1]
  4658  		v_0 := v.Args[0]
  4659  		if v_0.Op != OpAnd16 {
  4660  			break
  4661  		}
  4662  		z := v_0.Args[1]
  4663  		i := v_0.Args[0]
  4664  		if i.Op != OpConst16 {
  4665  			break
  4666  		}
  4667  		t := i.Type
  4668  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  4669  			break
  4670  		}
  4671  		v.reset(OpAnd16)
  4672  		v.AddArg(i)
  4673  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  4674  		v0.AddArg(z)
  4675  		v0.AddArg(x)
  4676  		v.AddArg(v0)
  4677  		return true
  4678  	}
  4679  	// match: (And16 (And16 z i:(Const16 <t>)) x)
  4680  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  4681  	// result: (And16 i (And16 <t> z x))
  4682  	for {
  4683  		x := v.Args[1]
  4684  		v_0 := v.Args[0]
  4685  		if v_0.Op != OpAnd16 {
  4686  			break
  4687  		}
  4688  		_ = v_0.Args[1]
  4689  		z := v_0.Args[0]
  4690  		i := v_0.Args[1]
  4691  		if i.Op != OpConst16 {
  4692  			break
  4693  		}
  4694  		t := i.Type
  4695  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  4696  			break
  4697  		}
  4698  		v.reset(OpAnd16)
  4699  		v.AddArg(i)
  4700  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  4701  		v0.AddArg(z)
  4702  		v0.AddArg(x)
  4703  		v.AddArg(v0)
  4704  		return true
  4705  	}
  4706  	// match: (And16 x (And16 i:(Const16 <t>) z))
  4707  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  4708  	// result: (And16 i (And16 <t> z x))
  4709  	for {
  4710  		_ = v.Args[1]
  4711  		x := v.Args[0]
  4712  		v_1 := v.Args[1]
  4713  		if v_1.Op != OpAnd16 {
  4714  			break
  4715  		}
  4716  		z := v_1.Args[1]
  4717  		i := v_1.Args[0]
  4718  		if i.Op != OpConst16 {
  4719  			break
  4720  		}
  4721  		t := i.Type
  4722  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  4723  			break
  4724  		}
  4725  		v.reset(OpAnd16)
  4726  		v.AddArg(i)
  4727  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  4728  		v0.AddArg(z)
  4729  		v0.AddArg(x)
  4730  		v.AddArg(v0)
  4731  		return true
  4732  	}
  4733  	// match: (And16 x (And16 z i:(Const16 <t>)))
  4734  	// cond: (z.Op != OpConst16 && x.Op != OpConst16)
  4735  	// result: (And16 i (And16 <t> z x))
  4736  	for {
  4737  		_ = v.Args[1]
  4738  		x := v.Args[0]
  4739  		v_1 := v.Args[1]
  4740  		if v_1.Op != OpAnd16 {
  4741  			break
  4742  		}
  4743  		_ = v_1.Args[1]
  4744  		z := v_1.Args[0]
  4745  		i := v_1.Args[1]
  4746  		if i.Op != OpConst16 {
  4747  			break
  4748  		}
  4749  		t := i.Type
  4750  		if !(z.Op != OpConst16 && x.Op != OpConst16) {
  4751  			break
  4752  		}
  4753  		v.reset(OpAnd16)
  4754  		v.AddArg(i)
  4755  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  4756  		v0.AddArg(z)
  4757  		v0.AddArg(x)
  4758  		v.AddArg(v0)
  4759  		return true
  4760  	}
  4761  	// match: (And16 (Const16 <t> [c]) (And16 (Const16 <t> [d]) x))
  4762  	// cond:
  4763  	// result: (And16 (Const16 <t> [int64(int16(c&d))]) x)
  4764  	for {
  4765  		_ = v.Args[1]
  4766  		v_0 := v.Args[0]
  4767  		if v_0.Op != OpConst16 {
  4768  			break
  4769  		}
  4770  		t := v_0.Type
  4771  		c := v_0.AuxInt
  4772  		v_1 := v.Args[1]
  4773  		if v_1.Op != OpAnd16 {
  4774  			break
  4775  		}
  4776  		x := v_1.Args[1]
  4777  		v_1_0 := v_1.Args[0]
  4778  		if v_1_0.Op != OpConst16 {
  4779  			break
  4780  		}
  4781  		if v_1_0.Type != t {
  4782  			break
  4783  		}
  4784  		d := v_1_0.AuxInt
  4785  		v.reset(OpAnd16)
  4786  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  4787  		v0.AuxInt = int64(int16(c & d))
  4788  		v.AddArg(v0)
  4789  		v.AddArg(x)
  4790  		return true
  4791  	}
  4792  	return false
  4793  }
  4794  func rewriteValuegeneric_OpAnd16_20(v *Value) bool {
  4795  	b := v.Block
  4796  	// match: (And16 (Const16 <t> [c]) (And16 x (Const16 <t> [d])))
  4797  	// cond:
  4798  	// result: (And16 (Const16 <t> [int64(int16(c&d))]) x)
  4799  	for {
  4800  		_ = v.Args[1]
  4801  		v_0 := v.Args[0]
  4802  		if v_0.Op != OpConst16 {
  4803  			break
  4804  		}
  4805  		t := v_0.Type
  4806  		c := v_0.AuxInt
  4807  		v_1 := v.Args[1]
  4808  		if v_1.Op != OpAnd16 {
  4809  			break
  4810  		}
  4811  		_ = v_1.Args[1]
  4812  		x := v_1.Args[0]
  4813  		v_1_1 := v_1.Args[1]
  4814  		if v_1_1.Op != OpConst16 {
  4815  			break
  4816  		}
  4817  		if v_1_1.Type != t {
  4818  			break
  4819  		}
  4820  		d := v_1_1.AuxInt
  4821  		v.reset(OpAnd16)
  4822  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  4823  		v0.AuxInt = int64(int16(c & d))
  4824  		v.AddArg(v0)
  4825  		v.AddArg(x)
  4826  		return true
  4827  	}
  4828  	// match: (And16 (And16 (Const16 <t> [d]) x) (Const16 <t> [c]))
  4829  	// cond:
  4830  	// result: (And16 (Const16 <t> [int64(int16(c&d))]) x)
  4831  	for {
  4832  		_ = v.Args[1]
  4833  		v_0 := v.Args[0]
  4834  		if v_0.Op != OpAnd16 {
  4835  			break
  4836  		}
  4837  		x := v_0.Args[1]
  4838  		v_0_0 := v_0.Args[0]
  4839  		if v_0_0.Op != OpConst16 {
  4840  			break
  4841  		}
  4842  		t := v_0_0.Type
  4843  		d := v_0_0.AuxInt
  4844  		v_1 := v.Args[1]
  4845  		if v_1.Op != OpConst16 {
  4846  			break
  4847  		}
  4848  		if v_1.Type != t {
  4849  			break
  4850  		}
  4851  		c := v_1.AuxInt
  4852  		v.reset(OpAnd16)
  4853  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  4854  		v0.AuxInt = int64(int16(c & d))
  4855  		v.AddArg(v0)
  4856  		v.AddArg(x)
  4857  		return true
  4858  	}
  4859  	// match: (And16 (And16 x (Const16 <t> [d])) (Const16 <t> [c]))
  4860  	// cond:
  4861  	// result: (And16 (Const16 <t> [int64(int16(c&d))]) x)
  4862  	for {
  4863  		_ = v.Args[1]
  4864  		v_0 := v.Args[0]
  4865  		if v_0.Op != OpAnd16 {
  4866  			break
  4867  		}
  4868  		_ = v_0.Args[1]
  4869  		x := v_0.Args[0]
  4870  		v_0_1 := v_0.Args[1]
  4871  		if v_0_1.Op != OpConst16 {
  4872  			break
  4873  		}
  4874  		t := v_0_1.Type
  4875  		d := v_0_1.AuxInt
  4876  		v_1 := v.Args[1]
  4877  		if v_1.Op != OpConst16 {
  4878  			break
  4879  		}
  4880  		if v_1.Type != t {
  4881  			break
  4882  		}
  4883  		c := v_1.AuxInt
  4884  		v.reset(OpAnd16)
  4885  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  4886  		v0.AuxInt = int64(int16(c & d))
  4887  		v.AddArg(v0)
  4888  		v.AddArg(x)
  4889  		return true
  4890  	}
  4891  	return false
  4892  }
  4893  func rewriteValuegeneric_OpAnd32_0(v *Value) bool {
  4894  	// match: (And32 (Const32 [c]) (Const32 [d]))
  4895  	// cond:
  4896  	// result: (Const32 [int64(int32(c&d))])
  4897  	for {
  4898  		_ = v.Args[1]
  4899  		v_0 := v.Args[0]
  4900  		if v_0.Op != OpConst32 {
  4901  			break
  4902  		}
  4903  		c := v_0.AuxInt
  4904  		v_1 := v.Args[1]
  4905  		if v_1.Op != OpConst32 {
  4906  			break
  4907  		}
  4908  		d := v_1.AuxInt
  4909  		v.reset(OpConst32)
  4910  		v.AuxInt = int64(int32(c & d))
  4911  		return true
  4912  	}
  4913  	// match: (And32 (Const32 [d]) (Const32 [c]))
  4914  	// cond:
  4915  	// result: (Const32 [int64(int32(c&d))])
  4916  	for {
  4917  		_ = v.Args[1]
  4918  		v_0 := v.Args[0]
  4919  		if v_0.Op != OpConst32 {
  4920  			break
  4921  		}
  4922  		d := v_0.AuxInt
  4923  		v_1 := v.Args[1]
  4924  		if v_1.Op != OpConst32 {
  4925  			break
  4926  		}
  4927  		c := v_1.AuxInt
  4928  		v.reset(OpConst32)
  4929  		v.AuxInt = int64(int32(c & d))
  4930  		return true
  4931  	}
  4932  	// match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c])))
  4933  	// cond: c >= 64-ntz(m)
  4934  	// result: (Const32 [0])
  4935  	for {
  4936  		_ = v.Args[1]
  4937  		v_0 := v.Args[0]
  4938  		if v_0.Op != OpConst32 {
  4939  			break
  4940  		}
  4941  		m := v_0.AuxInt
  4942  		v_1 := v.Args[1]
  4943  		if v_1.Op != OpRsh32Ux64 {
  4944  			break
  4945  		}
  4946  		_ = v_1.Args[1]
  4947  		v_1_1 := v_1.Args[1]
  4948  		if v_1_1.Op != OpConst64 {
  4949  			break
  4950  		}
  4951  		c := v_1_1.AuxInt
  4952  		if !(c >= 64-ntz(m)) {
  4953  			break
  4954  		}
  4955  		v.reset(OpConst32)
  4956  		v.AuxInt = 0
  4957  		return true
  4958  	}
  4959  	// match: (And32 (Rsh32Ux64 _ (Const64 [c])) (Const32 [m]))
  4960  	// cond: c >= 64-ntz(m)
  4961  	// result: (Const32 [0])
  4962  	for {
  4963  		_ = v.Args[1]
  4964  		v_0 := v.Args[0]
  4965  		if v_0.Op != OpRsh32Ux64 {
  4966  			break
  4967  		}
  4968  		_ = v_0.Args[1]
  4969  		v_0_1 := v_0.Args[1]
  4970  		if v_0_1.Op != OpConst64 {
  4971  			break
  4972  		}
  4973  		c := v_0_1.AuxInt
  4974  		v_1 := v.Args[1]
  4975  		if v_1.Op != OpConst32 {
  4976  			break
  4977  		}
  4978  		m := v_1.AuxInt
  4979  		if !(c >= 64-ntz(m)) {
  4980  			break
  4981  		}
  4982  		v.reset(OpConst32)
  4983  		v.AuxInt = 0
  4984  		return true
  4985  	}
  4986  	// match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c])))
  4987  	// cond: c >= 64-nlz(m)
  4988  	// result: (Const32 [0])
  4989  	for {
  4990  		_ = v.Args[1]
  4991  		v_0 := v.Args[0]
  4992  		if v_0.Op != OpConst32 {
  4993  			break
  4994  		}
  4995  		m := v_0.AuxInt
  4996  		v_1 := v.Args[1]
  4997  		if v_1.Op != OpLsh32x64 {
  4998  			break
  4999  		}
  5000  		_ = v_1.Args[1]
  5001  		v_1_1 := v_1.Args[1]
  5002  		if v_1_1.Op != OpConst64 {
  5003  			break
  5004  		}
  5005  		c := v_1_1.AuxInt
  5006  		if !(c >= 64-nlz(m)) {
  5007  			break
  5008  		}
  5009  		v.reset(OpConst32)
  5010  		v.AuxInt = 0
  5011  		return true
  5012  	}
  5013  	// match: (And32 (Lsh32x64 _ (Const64 [c])) (Const32 [m]))
  5014  	// cond: c >= 64-nlz(m)
  5015  	// result: (Const32 [0])
  5016  	for {
  5017  		_ = v.Args[1]
  5018  		v_0 := v.Args[0]
  5019  		if v_0.Op != OpLsh32x64 {
  5020  			break
  5021  		}
  5022  		_ = v_0.Args[1]
  5023  		v_0_1 := v_0.Args[1]
  5024  		if v_0_1.Op != OpConst64 {
  5025  			break
  5026  		}
  5027  		c := v_0_1.AuxInt
  5028  		v_1 := v.Args[1]
  5029  		if v_1.Op != OpConst32 {
  5030  			break
  5031  		}
  5032  		m := v_1.AuxInt
  5033  		if !(c >= 64-nlz(m)) {
  5034  			break
  5035  		}
  5036  		v.reset(OpConst32)
  5037  		v.AuxInt = 0
  5038  		return true
  5039  	}
  5040  	// match: (And32 x x)
  5041  	// cond:
  5042  	// result: x
  5043  	for {
  5044  		x := v.Args[1]
  5045  		if x != v.Args[0] {
  5046  			break
  5047  		}
  5048  		v.reset(OpCopy)
  5049  		v.Type = x.Type
  5050  		v.AddArg(x)
  5051  		return true
  5052  	}
  5053  	// match: (And32 (Const32 [-1]) x)
  5054  	// cond:
  5055  	// result: x
  5056  	for {
  5057  		x := v.Args[1]
  5058  		v_0 := v.Args[0]
  5059  		if v_0.Op != OpConst32 {
  5060  			break
  5061  		}
  5062  		if v_0.AuxInt != -1 {
  5063  			break
  5064  		}
  5065  		v.reset(OpCopy)
  5066  		v.Type = x.Type
  5067  		v.AddArg(x)
  5068  		return true
  5069  	}
  5070  	// match: (And32 x (Const32 [-1]))
  5071  	// cond:
  5072  	// result: x
  5073  	for {
  5074  		_ = v.Args[1]
  5075  		x := v.Args[0]
  5076  		v_1 := v.Args[1]
  5077  		if v_1.Op != OpConst32 {
  5078  			break
  5079  		}
  5080  		if v_1.AuxInt != -1 {
  5081  			break
  5082  		}
  5083  		v.reset(OpCopy)
  5084  		v.Type = x.Type
  5085  		v.AddArg(x)
  5086  		return true
  5087  	}
  5088  	// match: (And32 (Const32 [0]) _)
  5089  	// cond:
  5090  	// result: (Const32 [0])
  5091  	for {
  5092  		_ = v.Args[1]
  5093  		v_0 := v.Args[0]
  5094  		if v_0.Op != OpConst32 {
  5095  			break
  5096  		}
  5097  		if v_0.AuxInt != 0 {
  5098  			break
  5099  		}
  5100  		v.reset(OpConst32)
  5101  		v.AuxInt = 0
  5102  		return true
  5103  	}
  5104  	return false
  5105  }
  5106  func rewriteValuegeneric_OpAnd32_10(v *Value) bool {
  5107  	b := v.Block
  5108  	// match: (And32 _ (Const32 [0]))
  5109  	// cond:
  5110  	// result: (Const32 [0])
  5111  	for {
  5112  		_ = v.Args[1]
  5113  		v_1 := v.Args[1]
  5114  		if v_1.Op != OpConst32 {
  5115  			break
  5116  		}
  5117  		if v_1.AuxInt != 0 {
  5118  			break
  5119  		}
  5120  		v.reset(OpConst32)
  5121  		v.AuxInt = 0
  5122  		return true
  5123  	}
  5124  	// match: (And32 x (And32 x y))
  5125  	// cond:
  5126  	// result: (And32 x y)
  5127  	for {
  5128  		_ = v.Args[1]
  5129  		x := v.Args[0]
  5130  		v_1 := v.Args[1]
  5131  		if v_1.Op != OpAnd32 {
  5132  			break
  5133  		}
  5134  		y := v_1.Args[1]
  5135  		if x != v_1.Args[0] {
  5136  			break
  5137  		}
  5138  		v.reset(OpAnd32)
  5139  		v.AddArg(x)
  5140  		v.AddArg(y)
  5141  		return true
  5142  	}
  5143  	// match: (And32 x (And32 y x))
  5144  	// cond:
  5145  	// result: (And32 x y)
  5146  	for {
  5147  		_ = v.Args[1]
  5148  		x := v.Args[0]
  5149  		v_1 := v.Args[1]
  5150  		if v_1.Op != OpAnd32 {
  5151  			break
  5152  		}
  5153  		_ = v_1.Args[1]
  5154  		y := v_1.Args[0]
  5155  		if x != v_1.Args[1] {
  5156  			break
  5157  		}
  5158  		v.reset(OpAnd32)
  5159  		v.AddArg(x)
  5160  		v.AddArg(y)
  5161  		return true
  5162  	}
  5163  	// match: (And32 (And32 x y) x)
  5164  	// cond:
  5165  	// result: (And32 x y)
  5166  	for {
  5167  		x := v.Args[1]
  5168  		v_0 := v.Args[0]
  5169  		if v_0.Op != OpAnd32 {
  5170  			break
  5171  		}
  5172  		y := v_0.Args[1]
  5173  		if x != v_0.Args[0] {
  5174  			break
  5175  		}
  5176  		v.reset(OpAnd32)
  5177  		v.AddArg(x)
  5178  		v.AddArg(y)
  5179  		return true
  5180  	}
  5181  	// match: (And32 (And32 y x) x)
  5182  	// cond:
  5183  	// result: (And32 x y)
  5184  	for {
  5185  		x := v.Args[1]
  5186  		v_0 := v.Args[0]
  5187  		if v_0.Op != OpAnd32 {
  5188  			break
  5189  		}
  5190  		_ = v_0.Args[1]
  5191  		y := v_0.Args[0]
  5192  		if x != v_0.Args[1] {
  5193  			break
  5194  		}
  5195  		v.reset(OpAnd32)
  5196  		v.AddArg(x)
  5197  		v.AddArg(y)
  5198  		return true
  5199  	}
  5200  	// match: (And32 (And32 i:(Const32 <t>) z) x)
  5201  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  5202  	// result: (And32 i (And32 <t> z x))
  5203  	for {
  5204  		x := v.Args[1]
  5205  		v_0 := v.Args[0]
  5206  		if v_0.Op != OpAnd32 {
  5207  			break
  5208  		}
  5209  		z := v_0.Args[1]
  5210  		i := v_0.Args[0]
  5211  		if i.Op != OpConst32 {
  5212  			break
  5213  		}
  5214  		t := i.Type
  5215  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  5216  			break
  5217  		}
  5218  		v.reset(OpAnd32)
  5219  		v.AddArg(i)
  5220  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  5221  		v0.AddArg(z)
  5222  		v0.AddArg(x)
  5223  		v.AddArg(v0)
  5224  		return true
  5225  	}
  5226  	// match: (And32 (And32 z i:(Const32 <t>)) x)
  5227  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  5228  	// result: (And32 i (And32 <t> z x))
  5229  	for {
  5230  		x := v.Args[1]
  5231  		v_0 := v.Args[0]
  5232  		if v_0.Op != OpAnd32 {
  5233  			break
  5234  		}
  5235  		_ = v_0.Args[1]
  5236  		z := v_0.Args[0]
  5237  		i := v_0.Args[1]
  5238  		if i.Op != OpConst32 {
  5239  			break
  5240  		}
  5241  		t := i.Type
  5242  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  5243  			break
  5244  		}
  5245  		v.reset(OpAnd32)
  5246  		v.AddArg(i)
  5247  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  5248  		v0.AddArg(z)
  5249  		v0.AddArg(x)
  5250  		v.AddArg(v0)
  5251  		return true
  5252  	}
  5253  	// match: (And32 x (And32 i:(Const32 <t>) z))
  5254  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  5255  	// result: (And32 i (And32 <t> z x))
  5256  	for {
  5257  		_ = v.Args[1]
  5258  		x := v.Args[0]
  5259  		v_1 := v.Args[1]
  5260  		if v_1.Op != OpAnd32 {
  5261  			break
  5262  		}
  5263  		z := v_1.Args[1]
  5264  		i := v_1.Args[0]
  5265  		if i.Op != OpConst32 {
  5266  			break
  5267  		}
  5268  		t := i.Type
  5269  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  5270  			break
  5271  		}
  5272  		v.reset(OpAnd32)
  5273  		v.AddArg(i)
  5274  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  5275  		v0.AddArg(z)
  5276  		v0.AddArg(x)
  5277  		v.AddArg(v0)
  5278  		return true
  5279  	}
  5280  	// match: (And32 x (And32 z i:(Const32 <t>)))
  5281  	// cond: (z.Op != OpConst32 && x.Op != OpConst32)
  5282  	// result: (And32 i (And32 <t> z x))
  5283  	for {
  5284  		_ = v.Args[1]
  5285  		x := v.Args[0]
  5286  		v_1 := v.Args[1]
  5287  		if v_1.Op != OpAnd32 {
  5288  			break
  5289  		}
  5290  		_ = v_1.Args[1]
  5291  		z := v_1.Args[0]
  5292  		i := v_1.Args[1]
  5293  		if i.Op != OpConst32 {
  5294  			break
  5295  		}
  5296  		t := i.Type
  5297  		if !(z.Op != OpConst32 && x.Op != OpConst32) {
  5298  			break
  5299  		}
  5300  		v.reset(OpAnd32)
  5301  		v.AddArg(i)
  5302  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  5303  		v0.AddArg(z)
  5304  		v0.AddArg(x)
  5305  		v.AddArg(v0)
  5306  		return true
  5307  	}
  5308  	// match: (And32 (Const32 <t> [c]) (And32 (Const32 <t> [d]) x))
  5309  	// cond:
  5310  	// result: (And32 (Const32 <t> [int64(int32(c&d))]) x)
  5311  	for {
  5312  		_ = v.Args[1]
  5313  		v_0 := v.Args[0]
  5314  		if v_0.Op != OpConst32 {
  5315  			break
  5316  		}
  5317  		t := v_0.Type
  5318  		c := v_0.AuxInt
  5319  		v_1 := v.Args[1]
  5320  		if v_1.Op != OpAnd32 {
  5321  			break
  5322  		}
  5323  		x := v_1.Args[1]
  5324  		v_1_0 := v_1.Args[0]
  5325  		if v_1_0.Op != OpConst32 {
  5326  			break
  5327  		}
  5328  		if v_1_0.Type != t {
  5329  			break
  5330  		}
  5331  		d := v_1_0.AuxInt
  5332  		v.reset(OpAnd32)
  5333  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5334  		v0.AuxInt = int64(int32(c & d))
  5335  		v.AddArg(v0)
  5336  		v.AddArg(x)
  5337  		return true
  5338  	}
  5339  	return false
  5340  }
  5341  func rewriteValuegeneric_OpAnd32_20(v *Value) bool {
  5342  	b := v.Block
  5343  	// match: (And32 (Const32 <t> [c]) (And32 x (Const32 <t> [d])))
  5344  	// cond:
  5345  	// result: (And32 (Const32 <t> [int64(int32(c&d))]) x)
  5346  	for {
  5347  		_ = v.Args[1]
  5348  		v_0 := v.Args[0]
  5349  		if v_0.Op != OpConst32 {
  5350  			break
  5351  		}
  5352  		t := v_0.Type
  5353  		c := v_0.AuxInt
  5354  		v_1 := v.Args[1]
  5355  		if v_1.Op != OpAnd32 {
  5356  			break
  5357  		}
  5358  		_ = v_1.Args[1]
  5359  		x := v_1.Args[0]
  5360  		v_1_1 := v_1.Args[1]
  5361  		if v_1_1.Op != OpConst32 {
  5362  			break
  5363  		}
  5364  		if v_1_1.Type != t {
  5365  			break
  5366  		}
  5367  		d := v_1_1.AuxInt
  5368  		v.reset(OpAnd32)
  5369  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5370  		v0.AuxInt = int64(int32(c & d))
  5371  		v.AddArg(v0)
  5372  		v.AddArg(x)
  5373  		return true
  5374  	}
  5375  	// match: (And32 (And32 (Const32 <t> [d]) x) (Const32 <t> [c]))
  5376  	// cond:
  5377  	// result: (And32 (Const32 <t> [int64(int32(c&d))]) x)
  5378  	for {
  5379  		_ = v.Args[1]
  5380  		v_0 := v.Args[0]
  5381  		if v_0.Op != OpAnd32 {
  5382  			break
  5383  		}
  5384  		x := v_0.Args[1]
  5385  		v_0_0 := v_0.Args[0]
  5386  		if v_0_0.Op != OpConst32 {
  5387  			break
  5388  		}
  5389  		t := v_0_0.Type
  5390  		d := v_0_0.AuxInt
  5391  		v_1 := v.Args[1]
  5392  		if v_1.Op != OpConst32 {
  5393  			break
  5394  		}
  5395  		if v_1.Type != t {
  5396  			break
  5397  		}
  5398  		c := v_1.AuxInt
  5399  		v.reset(OpAnd32)
  5400  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5401  		v0.AuxInt = int64(int32(c & d))
  5402  		v.AddArg(v0)
  5403  		v.AddArg(x)
  5404  		return true
  5405  	}
  5406  	// match: (And32 (And32 x (Const32 <t> [d])) (Const32 <t> [c]))
  5407  	// cond:
  5408  	// result: (And32 (Const32 <t> [int64(int32(c&d))]) x)
  5409  	for {
  5410  		_ = v.Args[1]
  5411  		v_0 := v.Args[0]
  5412  		if v_0.Op != OpAnd32 {
  5413  			break
  5414  		}
  5415  		_ = v_0.Args[1]
  5416  		x := v_0.Args[0]
  5417  		v_0_1 := v_0.Args[1]
  5418  		if v_0_1.Op != OpConst32 {
  5419  			break
  5420  		}
  5421  		t := v_0_1.Type
  5422  		d := v_0_1.AuxInt
  5423  		v_1 := v.Args[1]
  5424  		if v_1.Op != OpConst32 {
  5425  			break
  5426  		}
  5427  		if v_1.Type != t {
  5428  			break
  5429  		}
  5430  		c := v_1.AuxInt
  5431  		v.reset(OpAnd32)
  5432  		v0 := b.NewValue0(v.Pos, OpConst32, t)
  5433  		v0.AuxInt = int64(int32(c & d))
  5434  		v.AddArg(v0)
  5435  		v.AddArg(x)
  5436  		return true
  5437  	}
  5438  	return false
  5439  }
  5440  func rewriteValuegeneric_OpAnd64_0(v *Value) bool {
  5441  	// match: (And64 (Const64 [c]) (Const64 [d]))
  5442  	// cond:
  5443  	// result: (Const64 [c&d])
  5444  	for {
  5445  		_ = v.Args[1]
  5446  		v_0 := v.Args[0]
  5447  		if v_0.Op != OpConst64 {
  5448  			break
  5449  		}
  5450  		c := v_0.AuxInt
  5451  		v_1 := v.Args[1]
  5452  		if v_1.Op != OpConst64 {
  5453  			break
  5454  		}
  5455  		d := v_1.AuxInt
  5456  		v.reset(OpConst64)
  5457  		v.AuxInt = c & d
  5458  		return true
  5459  	}
  5460  	// match: (And64 (Const64 [d]) (Const64 [c]))
  5461  	// cond:
  5462  	// result: (Const64 [c&d])
  5463  	for {
  5464  		_ = v.Args[1]
  5465  		v_0 := v.Args[0]
  5466  		if v_0.Op != OpConst64 {
  5467  			break
  5468  		}
  5469  		d := v_0.AuxInt
  5470  		v_1 := v.Args[1]
  5471  		if v_1.Op != OpConst64 {
  5472  			break
  5473  		}
  5474  		c := v_1.AuxInt
  5475  		v.reset(OpConst64)
  5476  		v.AuxInt = c & d
  5477  		return true
  5478  	}
  5479  	// match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c])))
  5480  	// cond: c >= 64-ntz(m)
  5481  	// result: (Const64 [0])
  5482  	for {
  5483  		_ = v.Args[1]
  5484  		v_0 := v.Args[0]
  5485  		if v_0.Op != OpConst64 {
  5486  			break
  5487  		}
  5488  		m := v_0.AuxInt
  5489  		v_1 := v.Args[1]
  5490  		if v_1.Op != OpRsh64Ux64 {
  5491  			break
  5492  		}
  5493  		_ = v_1.Args[1]
  5494  		v_1_1 := v_1.Args[1]
  5495  		if v_1_1.Op != OpConst64 {
  5496  			break
  5497  		}
  5498  		c := v_1_1.AuxInt
  5499  		if !(c >= 64-ntz(m)) {
  5500  			break
  5501  		}
  5502  		v.reset(OpConst64)
  5503  		v.AuxInt = 0
  5504  		return true
  5505  	}
  5506  	// match: (And64 (Rsh64Ux64 _ (Const64 [c])) (Const64 [m]))
  5507  	// cond: c >= 64-ntz(m)
  5508  	// result: (Const64 [0])
  5509  	for {
  5510  		_ = v.Args[1]
  5511  		v_0 := v.Args[0]
  5512  		if v_0.Op != OpRsh64Ux64 {
  5513  			break
  5514  		}
  5515  		_ = v_0.Args[1]
  5516  		v_0_1 := v_0.Args[1]
  5517  		if v_0_1.Op != OpConst64 {
  5518  			break
  5519  		}
  5520  		c := v_0_1.AuxInt
  5521  		v_1 := v.Args[1]
  5522  		if v_1.Op != OpConst64 {
  5523  			break
  5524  		}
  5525  		m := v_1.AuxInt
  5526  		if !(c >= 64-ntz(m)) {
  5527  			break
  5528  		}
  5529  		v.reset(OpConst64)
  5530  		v.AuxInt = 0
  5531  		return true
  5532  	}
  5533  	// match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c])))
  5534  	// cond: c >= 64-nlz(m)
  5535  	// result: (Const64 [0])
  5536  	for {
  5537  		_ = v.Args[1]
  5538  		v_0 := v.Args[0]
  5539  		if v_0.Op != OpConst64 {
  5540  			break
  5541  		}
  5542  		m := v_0.AuxInt
  5543  		v_1 := v.Args[1]
  5544  		if v_1.Op != OpLsh64x64 {
  5545  			break
  5546  		}
  5547  		_ = v_1.Args[1]
  5548  		v_1_1 := v_1.Args[1]
  5549  		if v_1_1.Op != OpConst64 {
  5550  			break
  5551  		}
  5552  		c := v_1_1.AuxInt
  5553  		if !(c >= 64-nlz(m)) {
  5554  			break
  5555  		}
  5556  		v.reset(OpConst64)
  5557  		v.AuxInt = 0
  5558  		return true
  5559  	}
  5560  	// match: (And64 (Lsh64x64 _ (Const64 [c])) (Const64 [m]))
  5561  	// cond: c >= 64-nlz(m)
  5562  	// result: (Const64 [0])
  5563  	for {
  5564  		_ = v.Args[1]
  5565  		v_0 := v.Args[0]
  5566  		if v_0.Op != OpLsh64x64 {
  5567  			break
  5568  		}
  5569  		_ = v_0.Args[1]
  5570  		v_0_1 := v_0.Args[1]
  5571  		if v_0_1.Op != OpConst64 {
  5572  			break
  5573  		}
  5574  		c := v_0_1.AuxInt
  5575  		v_1 := v.Args[1]
  5576  		if v_1.Op != OpConst64 {
  5577  			break
  5578  		}
  5579  		m := v_1.AuxInt
  5580  		if !(c >= 64-nlz(m)) {
  5581  			break
  5582  		}
  5583  		v.reset(OpConst64)
  5584  		v.AuxInt = 0
  5585  		return true
  5586  	}
  5587  	// match: (And64 x x)
  5588  	// cond:
  5589  	// result: x
  5590  	for {
  5591  		x := v.Args[1]
  5592  		if x != v.Args[0] {
  5593  			break
  5594  		}
  5595  		v.reset(OpCopy)
  5596  		v.Type = x.Type
  5597  		v.AddArg(x)
  5598  		return true
  5599  	}
  5600  	// match: (And64 (Const64 [-1]) x)
  5601  	// cond:
  5602  	// result: x
  5603  	for {
  5604  		x := v.Args[1]
  5605  		v_0 := v.Args[0]
  5606  		if v_0.Op != OpConst64 {
  5607  			break
  5608  		}
  5609  		if v_0.AuxInt != -1 {
  5610  			break
  5611  		}
  5612  		v.reset(OpCopy)
  5613  		v.Type = x.Type
  5614  		v.AddArg(x)
  5615  		return true
  5616  	}
  5617  	// match: (And64 x (Const64 [-1]))
  5618  	// cond:
  5619  	// result: x
  5620  	for {
  5621  		_ = v.Args[1]
  5622  		x := v.Args[0]
  5623  		v_1 := v.Args[1]
  5624  		if v_1.Op != OpConst64 {
  5625  			break
  5626  		}
  5627  		if v_1.AuxInt != -1 {
  5628  			break
  5629  		}
  5630  		v.reset(OpCopy)
  5631  		v.Type = x.Type
  5632  		v.AddArg(x)
  5633  		return true
  5634  	}
  5635  	// match: (And64 (Const64 [0]) _)
  5636  	// cond:
  5637  	// result: (Const64 [0])
  5638  	for {
  5639  		_ = v.Args[1]
  5640  		v_0 := v.Args[0]
  5641  		if v_0.Op != OpConst64 {
  5642  			break
  5643  		}
  5644  		if v_0.AuxInt != 0 {
  5645  			break
  5646  		}
  5647  		v.reset(OpConst64)
  5648  		v.AuxInt = 0
  5649  		return true
  5650  	}
  5651  	return false
  5652  }
  5653  func rewriteValuegeneric_OpAnd64_10(v *Value) bool {
  5654  	b := v.Block
  5655  	// match: (And64 _ (Const64 [0]))
  5656  	// cond:
  5657  	// result: (Const64 [0])
  5658  	for {
  5659  		_ = v.Args[1]
  5660  		v_1 := v.Args[1]
  5661  		if v_1.Op != OpConst64 {
  5662  			break
  5663  		}
  5664  		if v_1.AuxInt != 0 {
  5665  			break
  5666  		}
  5667  		v.reset(OpConst64)
  5668  		v.AuxInt = 0
  5669  		return true
  5670  	}
  5671  	// match: (And64 x (And64 x y))
  5672  	// cond:
  5673  	// result: (And64 x y)
  5674  	for {
  5675  		_ = v.Args[1]
  5676  		x := v.Args[0]
  5677  		v_1 := v.Args[1]
  5678  		if v_1.Op != OpAnd64 {
  5679  			break
  5680  		}
  5681  		y := v_1.Args[1]
  5682  		if x != v_1.Args[0] {
  5683  			break
  5684  		}
  5685  		v.reset(OpAnd64)
  5686  		v.AddArg(x)
  5687  		v.AddArg(y)
  5688  		return true
  5689  	}
  5690  	// match: (And64 x (And64 y x))
  5691  	// cond:
  5692  	// result: (And64 x y)
  5693  	for {
  5694  		_ = v.Args[1]
  5695  		x := v.Args[0]
  5696  		v_1 := v.Args[1]
  5697  		if v_1.Op != OpAnd64 {
  5698  			break
  5699  		}
  5700  		_ = v_1.Args[1]
  5701  		y := v_1.Args[0]
  5702  		if x != v_1.Args[1] {
  5703  			break
  5704  		}
  5705  		v.reset(OpAnd64)
  5706  		v.AddArg(x)
  5707  		v.AddArg(y)
  5708  		return true
  5709  	}
  5710  	// match: (And64 (And64 x y) x)
  5711  	// cond:
  5712  	// result: (And64 x y)
  5713  	for {
  5714  		x := v.Args[1]
  5715  		v_0 := v.Args[0]
  5716  		if v_0.Op != OpAnd64 {
  5717  			break
  5718  		}
  5719  		y := v_0.Args[1]
  5720  		if x != v_0.Args[0] {
  5721  			break
  5722  		}
  5723  		v.reset(OpAnd64)
  5724  		v.AddArg(x)
  5725  		v.AddArg(y)
  5726  		return true
  5727  	}
  5728  	// match: (And64 (And64 y x) x)
  5729  	// cond:
  5730  	// result: (And64 x y)
  5731  	for {
  5732  		x := v.Args[1]
  5733  		v_0 := v.Args[0]
  5734  		if v_0.Op != OpAnd64 {
  5735  			break
  5736  		}
  5737  		_ = v_0.Args[1]
  5738  		y := v_0.Args[0]
  5739  		if x != v_0.Args[1] {
  5740  			break
  5741  		}
  5742  		v.reset(OpAnd64)
  5743  		v.AddArg(x)
  5744  		v.AddArg(y)
  5745  		return true
  5746  	}
  5747  	// match: (And64 <t> (Const64 [y]) x)
  5748  	// cond: nlz(y) + nto(y) == 64 && nto(y) >= 32
  5749  	// result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
  5750  	for {
  5751  		t := v.Type
  5752  		x := v.Args[1]
  5753  		v_0 := v.Args[0]
  5754  		if v_0.Op != OpConst64 {
  5755  			break
  5756  		}
  5757  		y := v_0.AuxInt
  5758  		if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) {
  5759  			break
  5760  		}
  5761  		v.reset(OpRsh64Ux64)
  5762  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  5763  		v0.AddArg(x)
  5764  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  5765  		v1.AuxInt = nlz(y)
  5766  		v0.AddArg(v1)
  5767  		v.AddArg(v0)
  5768  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5769  		v2.AuxInt = nlz(y)
  5770  		v.AddArg(v2)
  5771  		return true
  5772  	}
  5773  	// match: (And64 <t> x (Const64 [y]))
  5774  	// cond: nlz(y) + nto(y) == 64 && nto(y) >= 32
  5775  	// result: (Rsh64Ux64 (Lsh64x64 <t> x (Const64 <t> [nlz(y)])) (Const64 <t> [nlz(y)]))
  5776  	for {
  5777  		t := v.Type
  5778  		_ = v.Args[1]
  5779  		x := v.Args[0]
  5780  		v_1 := v.Args[1]
  5781  		if v_1.Op != OpConst64 {
  5782  			break
  5783  		}
  5784  		y := v_1.AuxInt
  5785  		if !(nlz(y)+nto(y) == 64 && nto(y) >= 32) {
  5786  			break
  5787  		}
  5788  		v.reset(OpRsh64Ux64)
  5789  		v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
  5790  		v0.AddArg(x)
  5791  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  5792  		v1.AuxInt = nlz(y)
  5793  		v0.AddArg(v1)
  5794  		v.AddArg(v0)
  5795  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5796  		v2.AuxInt = nlz(y)
  5797  		v.AddArg(v2)
  5798  		return true
  5799  	}
  5800  	// match: (And64 <t> (Const64 [y]) x)
  5801  	// cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32
  5802  	// result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
  5803  	for {
  5804  		t := v.Type
  5805  		x := v.Args[1]
  5806  		v_0 := v.Args[0]
  5807  		if v_0.Op != OpConst64 {
  5808  			break
  5809  		}
  5810  		y := v_0.AuxInt
  5811  		if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) {
  5812  			break
  5813  		}
  5814  		v.reset(OpLsh64x64)
  5815  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  5816  		v0.AddArg(x)
  5817  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  5818  		v1.AuxInt = ntz(y)
  5819  		v0.AddArg(v1)
  5820  		v.AddArg(v0)
  5821  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5822  		v2.AuxInt = ntz(y)
  5823  		v.AddArg(v2)
  5824  		return true
  5825  	}
  5826  	// match: (And64 <t> x (Const64 [y]))
  5827  	// cond: nlo(y) + ntz(y) == 64 && ntz(y) >= 32
  5828  	// result: (Lsh64x64 (Rsh64Ux64 <t> x (Const64 <t> [ntz(y)])) (Const64 <t> [ntz(y)]))
  5829  	for {
  5830  		t := v.Type
  5831  		_ = v.Args[1]
  5832  		x := v.Args[0]
  5833  		v_1 := v.Args[1]
  5834  		if v_1.Op != OpConst64 {
  5835  			break
  5836  		}
  5837  		y := v_1.AuxInt
  5838  		if !(nlo(y)+ntz(y) == 64 && ntz(y) >= 32) {
  5839  			break
  5840  		}
  5841  		v.reset(OpLsh64x64)
  5842  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  5843  		v0.AddArg(x)
  5844  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  5845  		v1.AuxInt = ntz(y)
  5846  		v0.AddArg(v1)
  5847  		v.AddArg(v0)
  5848  		v2 := b.NewValue0(v.Pos, OpConst64, t)
  5849  		v2.AuxInt = ntz(y)
  5850  		v.AddArg(v2)
  5851  		return true
  5852  	}
  5853  	// match: (And64 (And64 i:(Const64 <t>) z) x)
  5854  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  5855  	// result: (And64 i (And64 <t> z x))
  5856  	for {
  5857  		x := v.Args[1]
  5858  		v_0 := v.Args[0]
  5859  		if v_0.Op != OpAnd64 {
  5860  			break
  5861  		}
  5862  		z := v_0.Args[1]
  5863  		i := v_0.Args[0]
  5864  		if i.Op != OpConst64 {
  5865  			break
  5866  		}
  5867  		t := i.Type
  5868  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  5869  			break
  5870  		}
  5871  		v.reset(OpAnd64)
  5872  		v.AddArg(i)
  5873  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  5874  		v0.AddArg(z)
  5875  		v0.AddArg(x)
  5876  		v.AddArg(v0)
  5877  		return true
  5878  	}
  5879  	return false
  5880  }
  5881  func rewriteValuegeneric_OpAnd64_20(v *Value) bool {
  5882  	b := v.Block
  5883  	// match: (And64 (And64 z i:(Const64 <t>)) x)
  5884  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  5885  	// result: (And64 i (And64 <t> z x))
  5886  	for {
  5887  		x := v.Args[1]
  5888  		v_0 := v.Args[0]
  5889  		if v_0.Op != OpAnd64 {
  5890  			break
  5891  		}
  5892  		_ = v_0.Args[1]
  5893  		z := v_0.Args[0]
  5894  		i := v_0.Args[1]
  5895  		if i.Op != OpConst64 {
  5896  			break
  5897  		}
  5898  		t := i.Type
  5899  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  5900  			break
  5901  		}
  5902  		v.reset(OpAnd64)
  5903  		v.AddArg(i)
  5904  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  5905  		v0.AddArg(z)
  5906  		v0.AddArg(x)
  5907  		v.AddArg(v0)
  5908  		return true
  5909  	}
  5910  	// match: (And64 x (And64 i:(Const64 <t>) z))
  5911  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  5912  	// result: (And64 i (And64 <t> z x))
  5913  	for {
  5914  		_ = v.Args[1]
  5915  		x := v.Args[0]
  5916  		v_1 := v.Args[1]
  5917  		if v_1.Op != OpAnd64 {
  5918  			break
  5919  		}
  5920  		z := v_1.Args[1]
  5921  		i := v_1.Args[0]
  5922  		if i.Op != OpConst64 {
  5923  			break
  5924  		}
  5925  		t := i.Type
  5926  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  5927  			break
  5928  		}
  5929  		v.reset(OpAnd64)
  5930  		v.AddArg(i)
  5931  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  5932  		v0.AddArg(z)
  5933  		v0.AddArg(x)
  5934  		v.AddArg(v0)
  5935  		return true
  5936  	}
  5937  	// match: (And64 x (And64 z i:(Const64 <t>)))
  5938  	// cond: (z.Op != OpConst64 && x.Op != OpConst64)
  5939  	// result: (And64 i (And64 <t> z x))
  5940  	for {
  5941  		_ = v.Args[1]
  5942  		x := v.Args[0]
  5943  		v_1 := v.Args[1]
  5944  		if v_1.Op != OpAnd64 {
  5945  			break
  5946  		}
  5947  		_ = v_1.Args[1]
  5948  		z := v_1.Args[0]
  5949  		i := v_1.Args[1]
  5950  		if i.Op != OpConst64 {
  5951  			break
  5952  		}
  5953  		t := i.Type
  5954  		if !(z.Op != OpConst64 && x.Op != OpConst64) {
  5955  			break
  5956  		}
  5957  		v.reset(OpAnd64)
  5958  		v.AddArg(i)
  5959  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  5960  		v0.AddArg(z)
  5961  		v0.AddArg(x)
  5962  		v.AddArg(v0)
  5963  		return true
  5964  	}
  5965  	// match: (And64 (Const64 <t> [c]) (And64 (Const64 <t> [d]) x))
  5966  	// cond:
  5967  	// result: (And64 (Const64 <t> [c&d]) x)
  5968  	for {
  5969  		_ = v.Args[1]
  5970  		v_0 := v.Args[0]
  5971  		if v_0.Op != OpConst64 {
  5972  			break
  5973  		}
  5974  		t := v_0.Type
  5975  		c := v_0.AuxInt
  5976  		v_1 := v.Args[1]
  5977  		if v_1.Op != OpAnd64 {
  5978  			break
  5979  		}
  5980  		x := v_1.Args[1]
  5981  		v_1_0 := v_1.Args[0]
  5982  		if v_1_0.Op != OpConst64 {
  5983  			break
  5984  		}
  5985  		if v_1_0.Type != t {
  5986  			break
  5987  		}
  5988  		d := v_1_0.AuxInt
  5989  		v.reset(OpAnd64)
  5990  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  5991  		v0.AuxInt = c & d
  5992  		v.AddArg(v0)
  5993  		v.AddArg(x)
  5994  		return true
  5995  	}
  5996  	// match: (And64 (Const64 <t> [c]) (And64 x (Const64 <t> [d])))
  5997  	// cond:
  5998  	// result: (And64 (Const64 <t> [c&d]) x)
  5999  	for {
  6000  		_ = v.Args[1]
  6001  		v_0 := v.Args[0]
  6002  		if v_0.Op != OpConst64 {
  6003  			break
  6004  		}
  6005  		t := v_0.Type
  6006  		c := v_0.AuxInt
  6007  		v_1 := v.Args[1]
  6008  		if v_1.Op != OpAnd64 {
  6009  			break
  6010  		}
  6011  		_ = v_1.Args[1]
  6012  		x := v_1.Args[0]
  6013  		v_1_1 := v_1.Args[1]
  6014  		if v_1_1.Op != OpConst64 {
  6015  			break
  6016  		}
  6017  		if v_1_1.Type != t {
  6018  			break
  6019  		}
  6020  		d := v_1_1.AuxInt
  6021  		v.reset(OpAnd64)
  6022  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6023  		v0.AuxInt = c & d
  6024  		v.AddArg(v0)
  6025  		v.AddArg(x)
  6026  		return true
  6027  	}
  6028  	// match: (And64 (And64 (Const64 <t> [d]) x) (Const64 <t> [c]))
  6029  	// cond:
  6030  	// result: (And64 (Const64 <t> [c&d]) x)
  6031  	for {
  6032  		_ = v.Args[1]
  6033  		v_0 := v.Args[0]
  6034  		if v_0.Op != OpAnd64 {
  6035  			break
  6036  		}
  6037  		x := v_0.Args[1]
  6038  		v_0_0 := v_0.Args[0]
  6039  		if v_0_0.Op != OpConst64 {
  6040  			break
  6041  		}
  6042  		t := v_0_0.Type
  6043  		d := v_0_0.AuxInt
  6044  		v_1 := v.Args[1]
  6045  		if v_1.Op != OpConst64 {
  6046  			break
  6047  		}
  6048  		if v_1.Type != t {
  6049  			break
  6050  		}
  6051  		c := v_1.AuxInt
  6052  		v.reset(OpAnd64)
  6053  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6054  		v0.AuxInt = c & d
  6055  		v.AddArg(v0)
  6056  		v.AddArg(x)
  6057  		return true
  6058  	}
  6059  	// match: (And64 (And64 x (Const64 <t> [d])) (Const64 <t> [c]))
  6060  	// cond:
  6061  	// result: (And64 (Const64 <t> [c&d]) x)
  6062  	for {
  6063  		_ = v.Args[1]
  6064  		v_0 := v.Args[0]
  6065  		if v_0.Op != OpAnd64 {
  6066  			break
  6067  		}
  6068  		_ = v_0.Args[1]
  6069  		x := v_0.Args[0]
  6070  		v_0_1 := v_0.Args[1]
  6071  		if v_0_1.Op != OpConst64 {
  6072  			break
  6073  		}
  6074  		t := v_0_1.Type
  6075  		d := v_0_1.AuxInt
  6076  		v_1 := v.Args[1]
  6077  		if v_1.Op != OpConst64 {
  6078  			break
  6079  		}
  6080  		if v_1.Type != t {
  6081  			break
  6082  		}
  6083  		c := v_1.AuxInt
  6084  		v.reset(OpAnd64)
  6085  		v0 := b.NewValue0(v.Pos, OpConst64, t)
  6086  		v0.AuxInt = c & d
  6087  		v.AddArg(v0)
  6088  		v.AddArg(x)
  6089  		return true
  6090  	}
  6091  	return false
  6092  }
  6093  func rewriteValuegeneric_OpAnd8_0(v *Value) bool {
  6094  	// match: (And8 (Const8 [c]) (Const8 [d]))
  6095  	// cond:
  6096  	// result: (Const8 [int64(int8(c&d))])
  6097  	for {
  6098  		_ = v.Args[1]
  6099  		v_0 := v.Args[0]
  6100  		if v_0.Op != OpConst8 {
  6101  			break
  6102  		}
  6103  		c := v_0.AuxInt
  6104  		v_1 := v.Args[1]
  6105  		if v_1.Op != OpConst8 {
  6106  			break
  6107  		}
  6108  		d := v_1.AuxInt
  6109  		v.reset(OpConst8)
  6110  		v.AuxInt = int64(int8(c & d))
  6111  		return true
  6112  	}
  6113  	// match: (And8 (Const8 [d]) (Const8 [c]))
  6114  	// cond:
  6115  	// result: (Const8 [int64(int8(c&d))])
  6116  	for {
  6117  		_ = v.Args[1]
  6118  		v_0 := v.Args[0]
  6119  		if v_0.Op != OpConst8 {
  6120  			break
  6121  		}
  6122  		d := v_0.AuxInt
  6123  		v_1 := v.Args[1]
  6124  		if v_1.Op != OpConst8 {
  6125  			break
  6126  		}
  6127  		c := v_1.AuxInt
  6128  		v.reset(OpConst8)
  6129  		v.AuxInt = int64(int8(c & d))
  6130  		return true
  6131  	}
  6132  	// match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c])))
  6133  	// cond: c >= 64-ntz(m)
  6134  	// result: (Const8 [0])
  6135  	for {
  6136  		_ = v.Args[1]
  6137  		v_0 := v.Args[0]
  6138  		if v_0.Op != OpConst8 {
  6139  			break
  6140  		}
  6141  		m := v_0.AuxInt
  6142  		v_1 := v.Args[1]
  6143  		if v_1.Op != OpRsh8Ux64 {
  6144  			break
  6145  		}
  6146  		_ = v_1.Args[1]
  6147  		v_1_1 := v_1.Args[1]
  6148  		if v_1_1.Op != OpConst64 {
  6149  			break
  6150  		}
  6151  		c := v_1_1.AuxInt
  6152  		if !(c >= 64-ntz(m)) {
  6153  			break
  6154  		}
  6155  		v.reset(OpConst8)
  6156  		v.AuxInt = 0
  6157  		return true
  6158  	}
  6159  	// match: (And8 (Rsh8Ux64 _ (Const64 [c])) (Const8 [m]))
  6160  	// cond: c >= 64-ntz(m)
  6161  	// result: (Const8 [0])
  6162  	for {
  6163  		_ = v.Args[1]
  6164  		v_0 := v.Args[0]
  6165  		if v_0.Op != OpRsh8Ux64 {
  6166  			break
  6167  		}
  6168  		_ = v_0.Args[1]
  6169  		v_0_1 := v_0.Args[1]
  6170  		if v_0_1.Op != OpConst64 {
  6171  			break
  6172  		}
  6173  		c := v_0_1.AuxInt
  6174  		v_1 := v.Args[1]
  6175  		if v_1.Op != OpConst8 {
  6176  			break
  6177  		}
  6178  		m := v_1.AuxInt
  6179  		if !(c >= 64-ntz(m)) {
  6180  			break
  6181  		}
  6182  		v.reset(OpConst8)
  6183  		v.AuxInt = 0
  6184  		return true
  6185  	}
  6186  	// match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c])))
  6187  	// cond: c >= 64-nlz(m)
  6188  	// result: (Const8 [0])
  6189  	for {
  6190  		_ = v.Args[1]
  6191  		v_0 := v.Args[0]
  6192  		if v_0.Op != OpConst8 {
  6193  			break
  6194  		}
  6195  		m := v_0.AuxInt
  6196  		v_1 := v.Args[1]
  6197  		if v_1.Op != OpLsh8x64 {
  6198  			break
  6199  		}
  6200  		_ = v_1.Args[1]
  6201  		v_1_1 := v_1.Args[1]
  6202  		if v_1_1.Op != OpConst64 {
  6203  			break
  6204  		}
  6205  		c := v_1_1.AuxInt
  6206  		if !(c >= 64-nlz(m)) {
  6207  			break
  6208  		}
  6209  		v.reset(OpConst8)
  6210  		v.AuxInt = 0
  6211  		return true
  6212  	}
  6213  	// match: (And8 (Lsh8x64 _ (Const64 [c])) (Const8 [m]))
  6214  	// cond: c >= 64-nlz(m)
  6215  	// result: (Const8 [0])
  6216  	for {
  6217  		_ = v.Args[1]
  6218  		v_0 := v.Args[0]
  6219  		if v_0.Op != OpLsh8x64 {
  6220  			break
  6221  		}
  6222  		_ = v_0.Args[1]
  6223  		v_0_1 := v_0.Args[1]
  6224  		if v_0_1.Op != OpConst64 {
  6225  			break
  6226  		}
  6227  		c := v_0_1.AuxInt
  6228  		v_1 := v.Args[1]
  6229  		if v_1.Op != OpConst8 {
  6230  			break
  6231  		}
  6232  		m := v_1.AuxInt
  6233  		if !(c >= 64-nlz(m)) {
  6234  			break
  6235  		}
  6236  		v.reset(OpConst8)
  6237  		v.AuxInt = 0
  6238  		return true
  6239  	}
  6240  	// match: (And8 x x)
  6241  	// cond:
  6242  	// result: x
  6243  	for {
  6244  		x := v.Args[1]
  6245  		if x != v.Args[0] {
  6246  			break
  6247  		}
  6248  		v.reset(OpCopy)
  6249  		v.Type = x.Type
  6250  		v.AddArg(x)
  6251  		return true
  6252  	}
  6253  	// match: (And8 (Const8 [-1]) x)
  6254  	// cond:
  6255  	// result: x
  6256  	for {
  6257  		x := v.Args[1]
  6258  		v_0 := v.Args[0]
  6259  		if v_0.Op != OpConst8 {
  6260  			break
  6261  		}
  6262  		if v_0.AuxInt != -1 {
  6263  			break
  6264  		}
  6265  		v.reset(OpCopy)
  6266  		v.Type = x.Type
  6267  		v.AddArg(x)
  6268  		return true
  6269  	}
  6270  	// match: (And8 x (Const8 [-1]))
  6271  	// cond:
  6272  	// result: x
  6273  	for {
  6274  		_ = v.Args[1]
  6275  		x := v.Args[0]
  6276  		v_1 := v.Args[1]
  6277  		if v_1.Op != OpConst8 {
  6278  			break
  6279  		}
  6280  		if v_1.AuxInt != -1 {
  6281  			break
  6282  		}
  6283  		v.reset(OpCopy)
  6284  		v.Type = x.Type
  6285  		v.AddArg(x)
  6286  		return true
  6287  	}
  6288  	// match: (And8 (Const8 [0]) _)
  6289  	// cond:
  6290  	// result: (Const8 [0])
  6291  	for {
  6292  		_ = v.Args[1]
  6293  		v_0 := v.Args[0]
  6294  		if v_0.Op != OpConst8 {
  6295  			break
  6296  		}
  6297  		if v_0.AuxInt != 0 {
  6298  			break
  6299  		}
  6300  		v.reset(OpConst8)
  6301  		v.AuxInt = 0
  6302  		return true
  6303  	}
  6304  	return false
  6305  }
  6306  func rewriteValuegeneric_OpAnd8_10(v *Value) bool {
  6307  	b := v.Block
  6308  	// match: (And8 _ (Const8 [0]))
  6309  	// cond:
  6310  	// result: (Const8 [0])
  6311  	for {
  6312  		_ = v.Args[1]
  6313  		v_1 := v.Args[1]
  6314  		if v_1.Op != OpConst8 {
  6315  			break
  6316  		}
  6317  		if v_1.AuxInt != 0 {
  6318  			break
  6319  		}
  6320  		v.reset(OpConst8)
  6321  		v.AuxInt = 0
  6322  		return true
  6323  	}
  6324  	// match: (And8 x (And8 x y))
  6325  	// cond:
  6326  	// result: (And8 x y)
  6327  	for {
  6328  		_ = v.Args[1]
  6329  		x := v.Args[0]
  6330  		v_1 := v.Args[1]
  6331  		if v_1.Op != OpAnd8 {
  6332  			break
  6333  		}
  6334  		y := v_1.Args[1]
  6335  		if x != v_1.Args[0] {
  6336  			break
  6337  		}
  6338  		v.reset(OpAnd8)
  6339  		v.AddArg(x)
  6340  		v.AddArg(y)
  6341  		return true
  6342  	}
  6343  	// match: (And8 x (And8 y x))
  6344  	// cond:
  6345  	// result: (And8 x y)
  6346  	for {
  6347  		_ = v.Args[1]
  6348  		x := v.Args[0]
  6349  		v_1 := v.Args[1]
  6350  		if v_1.Op != OpAnd8 {
  6351  			break
  6352  		}
  6353  		_ = v_1.Args[1]
  6354  		y := v_1.Args[0]
  6355  		if x != v_1.Args[1] {
  6356  			break
  6357  		}
  6358  		v.reset(OpAnd8)
  6359  		v.AddArg(x)
  6360  		v.AddArg(y)
  6361  		return true
  6362  	}
  6363  	// match: (And8 (And8 x y) x)
  6364  	// cond:
  6365  	// result: (And8 x y)
  6366  	for {
  6367  		x := v.Args[1]
  6368  		v_0 := v.Args[0]
  6369  		if v_0.Op != OpAnd8 {
  6370  			break
  6371  		}
  6372  		y := v_0.Args[1]
  6373  		if x != v_0.Args[0] {
  6374  			break
  6375  		}
  6376  		v.reset(OpAnd8)
  6377  		v.AddArg(x)
  6378  		v.AddArg(y)
  6379  		return true
  6380  	}
  6381  	// match: (And8 (And8 y x) x)
  6382  	// cond:
  6383  	// result: (And8 x y)
  6384  	for {
  6385  		x := v.Args[1]
  6386  		v_0 := v.Args[0]
  6387  		if v_0.Op != OpAnd8 {
  6388  			break
  6389  		}
  6390  		_ = v_0.Args[1]
  6391  		y := v_0.Args[0]
  6392  		if x != v_0.Args[1] {
  6393  			break
  6394  		}
  6395  		v.reset(OpAnd8)
  6396  		v.AddArg(x)
  6397  		v.AddArg(y)
  6398  		return true
  6399  	}
  6400  	// match: (And8 (And8 i:(Const8 <t>) z) x)
  6401  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  6402  	// result: (And8 i (And8 <t> z x))
  6403  	for {
  6404  		x := v.Args[1]
  6405  		v_0 := v.Args[0]
  6406  		if v_0.Op != OpAnd8 {
  6407  			break
  6408  		}
  6409  		z := v_0.Args[1]
  6410  		i := v_0.Args[0]
  6411  		if i.Op != OpConst8 {
  6412  			break
  6413  		}
  6414  		t := i.Type
  6415  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  6416  			break
  6417  		}
  6418  		v.reset(OpAnd8)
  6419  		v.AddArg(i)
  6420  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  6421  		v0.AddArg(z)
  6422  		v0.AddArg(x)
  6423  		v.AddArg(v0)
  6424  		return true
  6425  	}
  6426  	// match: (And8 (And8 z i:(Const8 <t>)) x)
  6427  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  6428  	// result: (And8 i (And8 <t> z x))
  6429  	for {
  6430  		x := v.Args[1]
  6431  		v_0 := v.Args[0]
  6432  		if v_0.Op != OpAnd8 {
  6433  			break
  6434  		}
  6435  		_ = v_0.Args[1]
  6436  		z := v_0.Args[0]
  6437  		i := v_0.Args[1]
  6438  		if i.Op != OpConst8 {
  6439  			break
  6440  		}
  6441  		t := i.Type
  6442  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  6443  			break
  6444  		}
  6445  		v.reset(OpAnd8)
  6446  		v.AddArg(i)
  6447  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  6448  		v0.AddArg(z)
  6449  		v0.AddArg(x)
  6450  		v.AddArg(v0)
  6451  		return true
  6452  	}
  6453  	// match: (And8 x (And8 i:(Const8 <t>) z))
  6454  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  6455  	// result: (And8 i (And8 <t> z x))
  6456  	for {
  6457  		_ = v.Args[1]
  6458  		x := v.Args[0]
  6459  		v_1 := v.Args[1]
  6460  		if v_1.Op != OpAnd8 {
  6461  			break
  6462  		}
  6463  		z := v_1.Args[1]
  6464  		i := v_1.Args[0]
  6465  		if i.Op != OpConst8 {
  6466  			break
  6467  		}
  6468  		t := i.Type
  6469  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  6470  			break
  6471  		}
  6472  		v.reset(OpAnd8)
  6473  		v.AddArg(i)
  6474  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  6475  		v0.AddArg(z)
  6476  		v0.AddArg(x)
  6477  		v.AddArg(v0)
  6478  		return true
  6479  	}
  6480  	// match: (And8 x (And8 z i:(Const8 <t>)))
  6481  	// cond: (z.Op != OpConst8 && x.Op != OpConst8)
  6482  	// result: (And8 i (And8 <t> z x))
  6483  	for {
  6484  		_ = v.Args[1]
  6485  		x := v.Args[0]
  6486  		v_1 := v.Args[1]
  6487  		if v_1.Op != OpAnd8 {
  6488  			break
  6489  		}
  6490  		_ = v_1.Args[1]
  6491  		z := v_1.Args[0]
  6492  		i := v_1.Args[1]
  6493  		if i.Op != OpConst8 {
  6494  			break
  6495  		}
  6496  		t := i.Type
  6497  		if !(z.Op != OpConst8 && x.Op != OpConst8) {
  6498  			break
  6499  		}
  6500  		v.reset(OpAnd8)
  6501  		v.AddArg(i)
  6502  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  6503  		v0.AddArg(z)
  6504  		v0.AddArg(x)
  6505  		v.AddArg(v0)
  6506  		return true
  6507  	}
  6508  	// match: (And8 (Const8 <t> [c]) (And8 (Const8 <t> [d]) x))
  6509  	// cond:
  6510  	// result: (And8 (Const8 <t> [int64(int8(c&d))]) x)
  6511  	for {
  6512  		_ = v.Args[1]
  6513  		v_0 := v.Args[0]
  6514  		if v_0.Op != OpConst8 {
  6515  			break
  6516  		}
  6517  		t := v_0.Type
  6518  		c := v_0.AuxInt
  6519  		v_1 := v.Args[1]
  6520  		if v_1.Op != OpAnd8 {
  6521  			break
  6522  		}
  6523  		x := v_1.Args[1]
  6524  		v_1_0 := v_1.Args[0]
  6525  		if v_1_0.Op != OpConst8 {
  6526  			break
  6527  		}
  6528  		if v_1_0.Type != t {
  6529  			break
  6530  		}
  6531  		d := v_1_0.AuxInt
  6532  		v.reset(OpAnd8)
  6533  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6534  		v0.AuxInt = int64(int8(c & d))
  6535  		v.AddArg(v0)
  6536  		v.AddArg(x)
  6537  		return true
  6538  	}
  6539  	return false
  6540  }
  6541  func rewriteValuegeneric_OpAnd8_20(v *Value) bool {
  6542  	b := v.Block
  6543  	// match: (And8 (Const8 <t> [c]) (And8 x (Const8 <t> [d])))
  6544  	// cond:
  6545  	// result: (And8 (Const8 <t> [int64(int8(c&d))]) x)
  6546  	for {
  6547  		_ = v.Args[1]
  6548  		v_0 := v.Args[0]
  6549  		if v_0.Op != OpConst8 {
  6550  			break
  6551  		}
  6552  		t := v_0.Type
  6553  		c := v_0.AuxInt
  6554  		v_1 := v.Args[1]
  6555  		if v_1.Op != OpAnd8 {
  6556  			break
  6557  		}
  6558  		_ = v_1.Args[1]
  6559  		x := v_1.Args[0]
  6560  		v_1_1 := v_1.Args[1]
  6561  		if v_1_1.Op != OpConst8 {
  6562  			break
  6563  		}
  6564  		if v_1_1.Type != t {
  6565  			break
  6566  		}
  6567  		d := v_1_1.AuxInt
  6568  		v.reset(OpAnd8)
  6569  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6570  		v0.AuxInt = int64(int8(c & d))
  6571  		v.AddArg(v0)
  6572  		v.AddArg(x)
  6573  		return true
  6574  	}
  6575  	// match: (And8 (And8 (Const8 <t> [d]) x) (Const8 <t> [c]))
  6576  	// cond:
  6577  	// result: (And8 (Const8 <t> [int64(int8(c&d))]) x)
  6578  	for {
  6579  		_ = v.Args[1]
  6580  		v_0 := v.Args[0]
  6581  		if v_0.Op != OpAnd8 {
  6582  			break
  6583  		}
  6584  		x := v_0.Args[1]
  6585  		v_0_0 := v_0.Args[0]
  6586  		if v_0_0.Op != OpConst8 {
  6587  			break
  6588  		}
  6589  		t := v_0_0.Type
  6590  		d := v_0_0.AuxInt
  6591  		v_1 := v.Args[1]
  6592  		if v_1.Op != OpConst8 {
  6593  			break
  6594  		}
  6595  		if v_1.Type != t {
  6596  			break
  6597  		}
  6598  		c := v_1.AuxInt
  6599  		v.reset(OpAnd8)
  6600  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6601  		v0.AuxInt = int64(int8(c & d))
  6602  		v.AddArg(v0)
  6603  		v.AddArg(x)
  6604  		return true
  6605  	}
  6606  	// match: (And8 (And8 x (Const8 <t> [d])) (Const8 <t> [c]))
  6607  	// cond:
  6608  	// result: (And8 (Const8 <t> [int64(int8(c&d))]) x)
  6609  	for {
  6610  		_ = v.Args[1]
  6611  		v_0 := v.Args[0]
  6612  		if v_0.Op != OpAnd8 {
  6613  			break
  6614  		}
  6615  		_ = v_0.Args[1]
  6616  		x := v_0.Args[0]
  6617  		v_0_1 := v_0.Args[1]
  6618  		if v_0_1.Op != OpConst8 {
  6619  			break
  6620  		}
  6621  		t := v_0_1.Type
  6622  		d := v_0_1.AuxInt
  6623  		v_1 := v.Args[1]
  6624  		if v_1.Op != OpConst8 {
  6625  			break
  6626  		}
  6627  		if v_1.Type != t {
  6628  			break
  6629  		}
  6630  		c := v_1.AuxInt
  6631  		v.reset(OpAnd8)
  6632  		v0 := b.NewValue0(v.Pos, OpConst8, t)
  6633  		v0.AuxInt = int64(int8(c & d))
  6634  		v.AddArg(v0)
  6635  		v.AddArg(x)
  6636  		return true
  6637  	}
  6638  	return false
  6639  }
  6640  func rewriteValuegeneric_OpArraySelect_0(v *Value) bool {
  6641  	// match: (ArraySelect (ArrayMake1 x))
  6642  	// cond:
  6643  	// result: x
  6644  	for {
  6645  		v_0 := v.Args[0]
  6646  		if v_0.Op != OpArrayMake1 {
  6647  			break
  6648  		}
  6649  		x := v_0.Args[0]
  6650  		v.reset(OpCopy)
  6651  		v.Type = x.Type
  6652  		v.AddArg(x)
  6653  		return true
  6654  	}
  6655  	// match: (ArraySelect [0] x:(IData _))
  6656  	// cond:
  6657  	// result: x
  6658  	for {
  6659  		if v.AuxInt != 0 {
  6660  			break
  6661  		}
  6662  		x := v.Args[0]
  6663  		if x.Op != OpIData {
  6664  			break
  6665  		}
  6666  		v.reset(OpCopy)
  6667  		v.Type = x.Type
  6668  		v.AddArg(x)
  6669  		return true
  6670  	}
  6671  	return false
  6672  }
  6673  func rewriteValuegeneric_OpCom16_0(v *Value) bool {
  6674  	// match: (Com16 (Com16 x))
  6675  	// cond:
  6676  	// result: x
  6677  	for {
  6678  		v_0 := v.Args[0]
  6679  		if v_0.Op != OpCom16 {
  6680  			break
  6681  		}
  6682  		x := v_0.Args[0]
  6683  		v.reset(OpCopy)
  6684  		v.Type = x.Type
  6685  		v.AddArg(x)
  6686  		return true
  6687  	}
  6688  	// match: (Com16 (Const16 [c]))
  6689  	// cond:
  6690  	// result: (Const16 [^c])
  6691  	for {
  6692  		v_0 := v.Args[0]
  6693  		if v_0.Op != OpConst16 {
  6694  			break
  6695  		}
  6696  		c := v_0.AuxInt
  6697  		v.reset(OpConst16)
  6698  		v.AuxInt = ^c
  6699  		return true
  6700  	}
  6701  	return false
  6702  }
  6703  func rewriteValuegeneric_OpCom32_0(v *Value) bool {
  6704  	// match: (Com32 (Com32 x))
  6705  	// cond:
  6706  	// result: x
  6707  	for {
  6708  		v_0 := v.Args[0]
  6709  		if v_0.Op != OpCom32 {
  6710  			break
  6711  		}
  6712  		x := v_0.Args[0]
  6713  		v.reset(OpCopy)
  6714  		v.Type = x.Type
  6715  		v.AddArg(x)
  6716  		return true
  6717  	}
  6718  	// match: (Com32 (Const32 [c]))
  6719  	// cond:
  6720  	// result: (Const32 [^c])
  6721  	for {
  6722  		v_0 := v.Args[0]
  6723  		if v_0.Op != OpConst32 {
  6724  			break
  6725  		}
  6726  		c := v_0.AuxInt
  6727  		v.reset(OpConst32)
  6728  		v.AuxInt = ^c
  6729  		return true
  6730  	}
  6731  	return false
  6732  }
  6733  func rewriteValuegeneric_OpCom64_0(v *Value) bool {
  6734  	// match: (Com64 (Com64 x))
  6735  	// cond:
  6736  	// result: x
  6737  	for {
  6738  		v_0 := v.Args[0]
  6739  		if v_0.Op != OpCom64 {
  6740  			break
  6741  		}
  6742  		x := v_0.Args[0]
  6743  		v.reset(OpCopy)
  6744  		v.Type = x.Type
  6745  		v.AddArg(x)
  6746  		return true
  6747  	}
  6748  	// match: (Com64 (Const64 [c]))
  6749  	// cond:
  6750  	// result: (Const64 [^c])
  6751  	for {
  6752  		v_0 := v.Args[0]
  6753  		if v_0.Op != OpConst64 {
  6754  			break
  6755  		}
  6756  		c := v_0.AuxInt
  6757  		v.reset(OpConst64)
  6758  		v.AuxInt = ^c
  6759  		return true
  6760  	}
  6761  	return false
  6762  }
  6763  func rewriteValuegeneric_OpCom8_0(v *Value) bool {
  6764  	// match: (Com8 (Com8 x))
  6765  	// cond:
  6766  	// result: x
  6767  	for {
  6768  		v_0 := v.Args[0]
  6769  		if v_0.Op != OpCom8 {
  6770  			break
  6771  		}
  6772  		x := v_0.Args[0]
  6773  		v.reset(OpCopy)
  6774  		v.Type = x.Type
  6775  		v.AddArg(x)
  6776  		return true
  6777  	}
  6778  	// match: (Com8 (Const8 [c]))
  6779  	// cond:
  6780  	// result: (Const8 [^c])
  6781  	for {
  6782  		v_0 := v.Args[0]
  6783  		if v_0.Op != OpConst8 {
  6784  			break
  6785  		}
  6786  		c := v_0.AuxInt
  6787  		v.reset(OpConst8)
  6788  		v.AuxInt = ^c
  6789  		return true
  6790  	}
  6791  	return false
  6792  }
  6793  func rewriteValuegeneric_OpConstInterface_0(v *Value) bool {
  6794  	b := v.Block
  6795  	typ := &b.Func.Config.Types
  6796  	// match: (ConstInterface)
  6797  	// cond:
  6798  	// result: (IMake (ConstNil <typ.Uintptr>) (ConstNil <typ.BytePtr>))
  6799  	for {
  6800  		v.reset(OpIMake)
  6801  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.Uintptr)
  6802  		v.AddArg(v0)
  6803  		v1 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  6804  		v.AddArg(v1)
  6805  		return true
  6806  	}
  6807  }
  6808  func rewriteValuegeneric_OpConstSlice_0(v *Value) bool {
  6809  	b := v.Block
  6810  	config := b.Func.Config
  6811  	typ := &b.Func.Config.Types
  6812  	// match: (ConstSlice)
  6813  	// cond: config.PtrSize == 4
  6814  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const32 <typ.Int> [0]) (Const32 <typ.Int> [0]))
  6815  	for {
  6816  		if !(config.PtrSize == 4) {
  6817  			break
  6818  		}
  6819  		v.reset(OpSliceMake)
  6820  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  6821  		v.AddArg(v0)
  6822  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  6823  		v1.AuxInt = 0
  6824  		v.AddArg(v1)
  6825  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  6826  		v2.AuxInt = 0
  6827  		v.AddArg(v2)
  6828  		return true
  6829  	}
  6830  	// match: (ConstSlice)
  6831  	// cond: config.PtrSize == 8
  6832  	// result: (SliceMake (ConstNil <v.Type.Elem().PtrTo()>) (Const64 <typ.Int> [0]) (Const64 <typ.Int> [0]))
  6833  	for {
  6834  		if !(config.PtrSize == 8) {
  6835  			break
  6836  		}
  6837  		v.reset(OpSliceMake)
  6838  		v0 := b.NewValue0(v.Pos, OpConstNil, v.Type.Elem().PtrTo())
  6839  		v.AddArg(v0)
  6840  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  6841  		v1.AuxInt = 0
  6842  		v.AddArg(v1)
  6843  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  6844  		v2.AuxInt = 0
  6845  		v.AddArg(v2)
  6846  		return true
  6847  	}
  6848  	return false
  6849  }
  6850  func rewriteValuegeneric_OpConstString_0(v *Value) bool {
  6851  	b := v.Block
  6852  	config := b.Func.Config
  6853  	fe := b.Func.fe
  6854  	typ := &b.Func.Config.Types
  6855  	// match: (ConstString {s})
  6856  	// cond: config.PtrSize == 4 && s.(string) == ""
  6857  	// result: (StringMake (ConstNil) (Const32 <typ.Int> [0]))
  6858  	for {
  6859  		s := v.Aux
  6860  		if !(config.PtrSize == 4 && s.(string) == "") {
  6861  			break
  6862  		}
  6863  		v.reset(OpStringMake)
  6864  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  6865  		v.AddArg(v0)
  6866  		v1 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  6867  		v1.AuxInt = 0
  6868  		v.AddArg(v1)
  6869  		return true
  6870  	}
  6871  	// match: (ConstString {s})
  6872  	// cond: config.PtrSize == 8 && s.(string) == ""
  6873  	// result: (StringMake (ConstNil) (Const64 <typ.Int> [0]))
  6874  	for {
  6875  		s := v.Aux
  6876  		if !(config.PtrSize == 8 && s.(string) == "") {
  6877  			break
  6878  		}
  6879  		v.reset(OpStringMake)
  6880  		v0 := b.NewValue0(v.Pos, OpConstNil, typ.BytePtr)
  6881  		v.AddArg(v0)
  6882  		v1 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  6883  		v1.AuxInt = 0
  6884  		v.AddArg(v1)
  6885  		return true
  6886  	}
  6887  	// match: (ConstString {s})
  6888  	// cond: config.PtrSize == 4 && s.(string) != ""
  6889  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(s.(string))} (SB)) (Const32 <typ.Int> [int64(len(s.(string)))]))
  6890  	for {
  6891  		s := v.Aux
  6892  		if !(config.PtrSize == 4 && s.(string) != "") {
  6893  			break
  6894  		}
  6895  		v.reset(OpStringMake)
  6896  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  6897  		v0.Aux = fe.StringData(s.(string))
  6898  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  6899  		v0.AddArg(v1)
  6900  		v.AddArg(v0)
  6901  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int)
  6902  		v2.AuxInt = int64(len(s.(string)))
  6903  		v.AddArg(v2)
  6904  		return true
  6905  	}
  6906  	// match: (ConstString {s})
  6907  	// cond: config.PtrSize == 8 && s.(string) != ""
  6908  	// result: (StringMake (Addr <typ.BytePtr> {fe.StringData(s.(string))} (SB)) (Const64 <typ.Int> [int64(len(s.(string)))]))
  6909  	for {
  6910  		s := v.Aux
  6911  		if !(config.PtrSize == 8 && s.(string) != "") {
  6912  			break
  6913  		}
  6914  		v.reset(OpStringMake)
  6915  		v0 := b.NewValue0(v.Pos, OpAddr, typ.BytePtr)
  6916  		v0.Aux = fe.StringData(s.(string))
  6917  		v1 := b.NewValue0(v.Pos, OpSB, typ.Uintptr)
  6918  		v0.AddArg(v1)
  6919  		v.AddArg(v0)
  6920  		v2 := b.NewValue0(v.Pos, OpConst64, typ.Int)
  6921  		v2.AuxInt = int64(len(s.(string)))
  6922  		v.AddArg(v2)
  6923  		return true
  6924  	}
  6925  	return false
  6926  }
  6927  func rewriteValuegeneric_OpConvert_0(v *Value) bool {
  6928  	// match: (Convert (Add64 (Convert ptr mem) off) mem)
  6929  	// cond:
  6930  	// result: (Add64 ptr off)
  6931  	for {
  6932  		mem := v.Args[1]
  6933  		v_0 := v.Args[0]
  6934  		if v_0.Op != OpAdd64 {
  6935  			break
  6936  		}
  6937  		off := v_0.Args[1]
  6938  		v_0_0 := v_0.Args[0]
  6939  		if v_0_0.Op != OpConvert {
  6940  			break
  6941  		}
  6942  		_ = v_0_0.Args[1]
  6943  		ptr := v_0_0.Args[0]
  6944  		if mem != v_0_0.Args[1] {
  6945  			break
  6946  		}
  6947  		v.reset(OpAdd64)
  6948  		v.AddArg(ptr)
  6949  		v.AddArg(off)
  6950  		return true
  6951  	}
  6952  	// match: (Convert (Add64 off (Convert ptr mem)) mem)
  6953  	// cond:
  6954  	// result: (Add64 ptr off)
  6955  	for {
  6956  		mem := v.Args[1]
  6957  		v_0 := v.Args[0]
  6958  		if v_0.Op != OpAdd64 {
  6959  			break
  6960  		}
  6961  		_ = v_0.Args[1]
  6962  		off := v_0.Args[0]
  6963  		v_0_1 := v_0.Args[1]
  6964  		if v_0_1.Op != OpConvert {
  6965  			break
  6966  		}
  6967  		_ = v_0_1.Args[1]
  6968  		ptr := v_0_1.Args[0]
  6969  		if mem != v_0_1.Args[1] {
  6970  			break
  6971  		}
  6972  		v.reset(OpAdd64)
  6973  		v.AddArg(ptr)
  6974  		v.AddArg(off)
  6975  		return true
  6976  	}
  6977  	// match: (Convert (Add32 (Convert ptr mem) off) mem)
  6978  	// cond:
  6979  	// result: (Add32 ptr off)
  6980  	for {
  6981  		mem := v.Args[1]
  6982  		v_0 := v.Args[0]
  6983  		if v_0.Op != OpAdd32 {
  6984  			break
  6985  		}
  6986  		off := v_0.Args[1]
  6987  		v_0_0 := v_0.Args[0]
  6988  		if v_0_0.Op != OpConvert {
  6989  			break
  6990  		}
  6991  		_ = v_0_0.Args[1]
  6992  		ptr := v_0_0.Args[0]
  6993  		if mem != v_0_0.Args[1] {
  6994  			break
  6995  		}
  6996  		v.reset(OpAdd32)
  6997  		v.AddArg(ptr)
  6998  		v.AddArg(off)
  6999  		return true
  7000  	}
  7001  	// match: (Convert (Add32 off (Convert ptr mem)) mem)
  7002  	// cond:
  7003  	// result: (Add32 ptr off)
  7004  	for {
  7005  		mem := v.Args[1]
  7006  		v_0 := v.Args[0]
  7007  		if v_0.Op != OpAdd32 {
  7008  			break
  7009  		}
  7010  		_ = v_0.Args[1]
  7011  		off := v_0.Args[0]
  7012  		v_0_1 := v_0.Args[1]
  7013  		if v_0_1.Op != OpConvert {
  7014  			break
  7015  		}
  7016  		_ = v_0_1.Args[1]
  7017  		ptr := v_0_1.Args[0]
  7018  		if mem != v_0_1.Args[1] {
  7019  			break
  7020  		}
  7021  		v.reset(OpAdd32)
  7022  		v.AddArg(ptr)
  7023  		v.AddArg(off)
  7024  		return true
  7025  	}
  7026  	// match: (Convert (Convert ptr mem) mem)
  7027  	// cond:
  7028  	// result: ptr
  7029  	for {
  7030  		mem := v.Args[1]
  7031  		v_0 := v.Args[0]
  7032  		if v_0.Op != OpConvert {
  7033  			break
  7034  		}
  7035  		_ = v_0.Args[1]
  7036  		ptr := v_0.Args[0]
  7037  		if mem != v_0.Args[1] {
  7038  			break
  7039  		}
  7040  		v.reset(OpCopy)
  7041  		v.Type = ptr.Type
  7042  		v.AddArg(ptr)
  7043  		return true
  7044  	}
  7045  	return false
  7046  }
  7047  func rewriteValuegeneric_OpCvt32Fto32_0(v *Value) bool {
  7048  	// match: (Cvt32Fto32 (Const32F [c]))
  7049  	// cond:
  7050  	// result: (Const32 [int64(int32(auxTo32F(c)))])
  7051  	for {
  7052  		v_0 := v.Args[0]
  7053  		if v_0.Op != OpConst32F {
  7054  			break
  7055  		}
  7056  		c := v_0.AuxInt
  7057  		v.reset(OpConst32)
  7058  		v.AuxInt = int64(int32(auxTo32F(c)))
  7059  		return true
  7060  	}
  7061  	return false
  7062  }
  7063  func rewriteValuegeneric_OpCvt32Fto64_0(v *Value) bool {
  7064  	// match: (Cvt32Fto64 (Const32F [c]))
  7065  	// cond:
  7066  	// result: (Const64 [int64(auxTo32F(c))])
  7067  	for {
  7068  		v_0 := v.Args[0]
  7069  		if v_0.Op != OpConst32F {
  7070  			break
  7071  		}
  7072  		c := v_0.AuxInt
  7073  		v.reset(OpConst64)
  7074  		v.AuxInt = int64(auxTo32F(c))
  7075  		return true
  7076  	}
  7077  	return false
  7078  }
  7079  func rewriteValuegeneric_OpCvt32Fto64F_0(v *Value) bool {
  7080  	// match: (Cvt32Fto64F (Const32F [c]))
  7081  	// cond:
  7082  	// result: (Const64F [c])
  7083  	for {
  7084  		v_0 := v.Args[0]
  7085  		if v_0.Op != OpConst32F {
  7086  			break
  7087  		}
  7088  		c := v_0.AuxInt
  7089  		v.reset(OpConst64F)
  7090  		v.AuxInt = c
  7091  		return true
  7092  	}
  7093  	return false
  7094  }
  7095  func rewriteValuegeneric_OpCvt32to32F_0(v *Value) bool {
  7096  	// match: (Cvt32to32F (Const32 [c]))
  7097  	// cond:
  7098  	// result: (Const32F [auxFrom32F(float32(int32(c)))])
  7099  	for {
  7100  		v_0 := v.Args[0]
  7101  		if v_0.Op != OpConst32 {
  7102  			break
  7103  		}
  7104  		c := v_0.AuxInt
  7105  		v.reset(OpConst32F)
  7106  		v.AuxInt = auxFrom32F(float32(int32(c)))
  7107  		return true
  7108  	}
  7109  	return false
  7110  }
  7111  func rewriteValuegeneric_OpCvt32to64F_0(v *Value) bool {
  7112  	// match: (Cvt32to64F (Const32 [c]))
  7113  	// cond:
  7114  	// result: (Const64F [auxFrom64F(float64(int32(c)))])
  7115  	for {
  7116  		v_0 := v.Args[0]
  7117  		if v_0.Op != OpConst32 {
  7118  			break
  7119  		}
  7120  		c := v_0.AuxInt
  7121  		v.reset(OpConst64F)
  7122  		v.AuxInt = auxFrom64F(float64(int32(c)))
  7123  		return true
  7124  	}
  7125  	return false
  7126  }
  7127  func rewriteValuegeneric_OpCvt64Fto32_0(v *Value) bool {
  7128  	// match: (Cvt64Fto32 (Const64F [c]))
  7129  	// cond:
  7130  	// result: (Const32 [int64(int32(auxTo64F(c)))])
  7131  	for {
  7132  		v_0 := v.Args[0]
  7133  		if v_0.Op != OpConst64F {
  7134  			break
  7135  		}
  7136  		c := v_0.AuxInt
  7137  		v.reset(OpConst32)
  7138  		v.AuxInt = int64(int32(auxTo64F(c)))
  7139  		return true
  7140  	}
  7141  	return false
  7142  }
  7143  func rewriteValuegeneric_OpCvt64Fto32F_0(v *Value) bool {
  7144  	// match: (Cvt64Fto32F (Const64F [c]))
  7145  	// cond:
  7146  	// result: (Const32F [auxFrom32F(float32(auxTo64F(c)))])
  7147  	for {
  7148  		v_0 := v.Args[0]
  7149  		if v_0.Op != OpConst64F {
  7150  			break
  7151  		}
  7152  		c := v_0.AuxInt
  7153  		v.reset(OpConst32F)
  7154  		v.AuxInt = auxFrom32F(float32(auxTo64F(c)))
  7155  		return true
  7156  	}
  7157  	return false
  7158  }
  7159  func rewriteValuegeneric_OpCvt64Fto64_0(v *Value) bool {
  7160  	// match: (Cvt64Fto64 (Const64F [c]))
  7161  	// cond:
  7162  	// result: (Const64 [int64(auxTo64F(c))])
  7163  	for {
  7164  		v_0 := v.Args[0]
  7165  		if v_0.Op != OpConst64F {
  7166  			break
  7167  		}
  7168  		c := v_0.AuxInt
  7169  		v.reset(OpConst64)
  7170  		v.AuxInt = int64(auxTo64F(c))
  7171  		return true
  7172  	}
  7173  	return false
  7174  }
  7175  func rewriteValuegeneric_OpCvt64to32F_0(v *Value) bool {
  7176  	// match: (Cvt64to32F (Const64 [c]))
  7177  	// cond:
  7178  	// result: (Const32F [auxFrom32F(float32(c))])
  7179  	for {
  7180  		v_0 := v.Args[0]
  7181  		if v_0.Op != OpConst64 {
  7182  			break
  7183  		}
  7184  		c := v_0.AuxInt
  7185  		v.reset(OpConst32F)
  7186  		v.AuxInt = auxFrom32F(float32(c))
  7187  		return true
  7188  	}
  7189  	return false
  7190  }
  7191  func rewriteValuegeneric_OpCvt64to64F_0(v *Value) bool {
  7192  	// match: (Cvt64to64F (Const64 [c]))
  7193  	// cond:
  7194  	// result: (Const64F [auxFrom64F(float64(c))])
  7195  	for {
  7196  		v_0 := v.Args[0]
  7197  		if v_0.Op != OpConst64 {
  7198  			break
  7199  		}
  7200  		c := v_0.AuxInt
  7201  		v.reset(OpConst64F)
  7202  		v.AuxInt = auxFrom64F(float64(c))
  7203  		return true
  7204  	}
  7205  	return false
  7206  }
  7207  func rewriteValuegeneric_OpDiv16_0(v *Value) bool {
  7208  	b := v.Block
  7209  	typ := &b.Func.Config.Types
  7210  	// match: (Div16 (Const16 [c]) (Const16 [d]))
  7211  	// cond: d != 0
  7212  	// result: (Const16 [int64(int16(c)/int16(d))])
  7213  	for {
  7214  		_ = v.Args[1]
  7215  		v_0 := v.Args[0]
  7216  		if v_0.Op != OpConst16 {
  7217  			break
  7218  		}
  7219  		c := v_0.AuxInt
  7220  		v_1 := v.Args[1]
  7221  		if v_1.Op != OpConst16 {
  7222  			break
  7223  		}
  7224  		d := v_1.AuxInt
  7225  		if !(d != 0) {
  7226  			break
  7227  		}
  7228  		v.reset(OpConst16)
  7229  		v.AuxInt = int64(int16(c) / int16(d))
  7230  		return true
  7231  	}
  7232  	// match: (Div16 n (Const16 [c]))
  7233  	// cond: isNonNegative(n) && isPowerOfTwo(c&0xffff)
  7234  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log2(c&0xffff)]))
  7235  	for {
  7236  		_ = v.Args[1]
  7237  		n := v.Args[0]
  7238  		v_1 := v.Args[1]
  7239  		if v_1.Op != OpConst16 {
  7240  			break
  7241  		}
  7242  		c := v_1.AuxInt
  7243  		if !(isNonNegative(n) && isPowerOfTwo(c&0xffff)) {
  7244  			break
  7245  		}
  7246  		v.reset(OpRsh16Ux64)
  7247  		v.AddArg(n)
  7248  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7249  		v0.AuxInt = log2(c & 0xffff)
  7250  		v.AddArg(v0)
  7251  		return true
  7252  	}
  7253  	// match: (Div16 <t> n (Const16 [c]))
  7254  	// cond: c < 0 && c != -1<<15
  7255  	// result: (Neg16 (Div16 <t> n (Const16 <t> [-c])))
  7256  	for {
  7257  		t := v.Type
  7258  		_ = v.Args[1]
  7259  		n := v.Args[0]
  7260  		v_1 := v.Args[1]
  7261  		if v_1.Op != OpConst16 {
  7262  			break
  7263  		}
  7264  		c := v_1.AuxInt
  7265  		if !(c < 0 && c != -1<<15) {
  7266  			break
  7267  		}
  7268  		v.reset(OpNeg16)
  7269  		v0 := b.NewValue0(v.Pos, OpDiv16, t)
  7270  		v0.AddArg(n)
  7271  		v1 := b.NewValue0(v.Pos, OpConst16, t)
  7272  		v1.AuxInt = -c
  7273  		v0.AddArg(v1)
  7274  		v.AddArg(v0)
  7275  		return true
  7276  	}
  7277  	// match: (Div16 <t> x (Const16 [-1<<15]))
  7278  	// cond:
  7279  	// result: (Rsh16Ux64 (And16 <t> x (Neg16 <t> x)) (Const64 <typ.UInt64> [15]))
  7280  	for {
  7281  		t := v.Type
  7282  		_ = v.Args[1]
  7283  		x := v.Args[0]
  7284  		v_1 := v.Args[1]
  7285  		if v_1.Op != OpConst16 {
  7286  			break
  7287  		}
  7288  		if v_1.AuxInt != -1<<15 {
  7289  			break
  7290  		}
  7291  		v.reset(OpRsh16Ux64)
  7292  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
  7293  		v0.AddArg(x)
  7294  		v1 := b.NewValue0(v.Pos, OpNeg16, t)
  7295  		v1.AddArg(x)
  7296  		v0.AddArg(v1)
  7297  		v.AddArg(v0)
  7298  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7299  		v2.AuxInt = 15
  7300  		v.AddArg(v2)
  7301  		return true
  7302  	}
  7303  	// match: (Div16 <t> n (Const16 [c]))
  7304  	// cond: isPowerOfTwo(c)
  7305  	// result: (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [16-log2(c)]))) (Const64 <typ.UInt64> [log2(c)]))
  7306  	for {
  7307  		t := v.Type
  7308  		_ = v.Args[1]
  7309  		n := v.Args[0]
  7310  		v_1 := v.Args[1]
  7311  		if v_1.Op != OpConst16 {
  7312  			break
  7313  		}
  7314  		c := v_1.AuxInt
  7315  		if !(isPowerOfTwo(c)) {
  7316  			break
  7317  		}
  7318  		v.reset(OpRsh16x64)
  7319  		v0 := b.NewValue0(v.Pos, OpAdd16, t)
  7320  		v0.AddArg(n)
  7321  		v1 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
  7322  		v2 := b.NewValue0(v.Pos, OpRsh16x64, t)
  7323  		v2.AddArg(n)
  7324  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7325  		v3.AuxInt = 15
  7326  		v2.AddArg(v3)
  7327  		v1.AddArg(v2)
  7328  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7329  		v4.AuxInt = 16 - log2(c)
  7330  		v1.AddArg(v4)
  7331  		v0.AddArg(v1)
  7332  		v.AddArg(v0)
  7333  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7334  		v5.AuxInt = log2(c)
  7335  		v.AddArg(v5)
  7336  		return true
  7337  	}
  7338  	// match: (Div16 <t> x (Const16 [c]))
  7339  	// cond: smagicOK(16,c)
  7340  	// result: (Sub16 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(smagic(16,c).m)]) (SignExt16to32 x)) (Const64 <typ.UInt64> [16+smagic(16,c).s])) (Rsh32x64 <t> (SignExt16to32 x) (Const64 <typ.UInt64> [31])))
  7341  	for {
  7342  		t := v.Type
  7343  		_ = v.Args[1]
  7344  		x := v.Args[0]
  7345  		v_1 := v.Args[1]
  7346  		if v_1.Op != OpConst16 {
  7347  			break
  7348  		}
  7349  		c := v_1.AuxInt
  7350  		if !(smagicOK(16, c)) {
  7351  			break
  7352  		}
  7353  		v.reset(OpSub16)
  7354  		v.Type = t
  7355  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7356  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7357  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7358  		v2.AuxInt = int64(smagic(16, c).m)
  7359  		v1.AddArg(v2)
  7360  		v3 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7361  		v3.AddArg(x)
  7362  		v1.AddArg(v3)
  7363  		v0.AddArg(v1)
  7364  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7365  		v4.AuxInt = 16 + smagic(16, c).s
  7366  		v0.AddArg(v4)
  7367  		v.AddArg(v0)
  7368  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7369  		v6 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  7370  		v6.AddArg(x)
  7371  		v5.AddArg(v6)
  7372  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7373  		v7.AuxInt = 31
  7374  		v5.AddArg(v7)
  7375  		v.AddArg(v5)
  7376  		return true
  7377  	}
  7378  	return false
  7379  }
  7380  func rewriteValuegeneric_OpDiv16u_0(v *Value) bool {
  7381  	b := v.Block
  7382  	config := b.Func.Config
  7383  	typ := &b.Func.Config.Types
  7384  	// match: (Div16u (Const16 [c]) (Const16 [d]))
  7385  	// cond: d != 0
  7386  	// result: (Const16 [int64(int16(uint16(c)/uint16(d)))])
  7387  	for {
  7388  		_ = v.Args[1]
  7389  		v_0 := v.Args[0]
  7390  		if v_0.Op != OpConst16 {
  7391  			break
  7392  		}
  7393  		c := v_0.AuxInt
  7394  		v_1 := v.Args[1]
  7395  		if v_1.Op != OpConst16 {
  7396  			break
  7397  		}
  7398  		d := v_1.AuxInt
  7399  		if !(d != 0) {
  7400  			break
  7401  		}
  7402  		v.reset(OpConst16)
  7403  		v.AuxInt = int64(int16(uint16(c) / uint16(d)))
  7404  		return true
  7405  	}
  7406  	// match: (Div16u n (Const16 [c]))
  7407  	// cond: isPowerOfTwo(c&0xffff)
  7408  	// result: (Rsh16Ux64 n (Const64 <typ.UInt64> [log2(c&0xffff)]))
  7409  	for {
  7410  		_ = v.Args[1]
  7411  		n := v.Args[0]
  7412  		v_1 := v.Args[1]
  7413  		if v_1.Op != OpConst16 {
  7414  			break
  7415  		}
  7416  		c := v_1.AuxInt
  7417  		if !(isPowerOfTwo(c & 0xffff)) {
  7418  			break
  7419  		}
  7420  		v.reset(OpRsh16Ux64)
  7421  		v.AddArg(n)
  7422  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7423  		v0.AuxInt = log2(c & 0xffff)
  7424  		v.AddArg(v0)
  7425  		return true
  7426  	}
  7427  	// match: (Div16u x (Const16 [c]))
  7428  	// cond: umagicOK(16, c) && config.RegSize == 8
  7429  	// result: (Trunc64to16 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<16+umagic(16,c).m)]) (ZeroExt16to64 x)) (Const64 <typ.UInt64> [16+umagic(16,c).s])))
  7430  	for {
  7431  		_ = v.Args[1]
  7432  		x := v.Args[0]
  7433  		v_1 := v.Args[1]
  7434  		if v_1.Op != OpConst16 {
  7435  			break
  7436  		}
  7437  		c := v_1.AuxInt
  7438  		if !(umagicOK(16, c) && config.RegSize == 8) {
  7439  			break
  7440  		}
  7441  		v.reset(OpTrunc64to16)
  7442  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  7443  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  7444  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7445  		v2.AuxInt = int64(1<<16 + umagic(16, c).m)
  7446  		v1.AddArg(v2)
  7447  		v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
  7448  		v3.AddArg(x)
  7449  		v1.AddArg(v3)
  7450  		v0.AddArg(v1)
  7451  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7452  		v4.AuxInt = 16 + umagic(16, c).s
  7453  		v0.AddArg(v4)
  7454  		v.AddArg(v0)
  7455  		return true
  7456  	}
  7457  	// match: (Div16u x (Const16 [c]))
  7458  	// cond: umagicOK(16, c) && config.RegSize == 4 && umagic(16,c).m&1 == 0
  7459  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<15+umagic(16,c).m/2)]) (ZeroExt16to32 x)) (Const64 <typ.UInt64> [16+umagic(16,c).s-1])))
  7460  	for {
  7461  		_ = v.Args[1]
  7462  		x := v.Args[0]
  7463  		v_1 := v.Args[1]
  7464  		if v_1.Op != OpConst16 {
  7465  			break
  7466  		}
  7467  		c := v_1.AuxInt
  7468  		if !(umagicOK(16, c) && config.RegSize == 4 && umagic(16, c).m&1 == 0) {
  7469  			break
  7470  		}
  7471  		v.reset(OpTrunc32to16)
  7472  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7473  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7474  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7475  		v2.AuxInt = int64(1<<15 + umagic(16, c).m/2)
  7476  		v1.AddArg(v2)
  7477  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7478  		v3.AddArg(x)
  7479  		v1.AddArg(v3)
  7480  		v0.AddArg(v1)
  7481  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7482  		v4.AuxInt = 16 + umagic(16, c).s - 1
  7483  		v0.AddArg(v4)
  7484  		v.AddArg(v0)
  7485  		return true
  7486  	}
  7487  	// match: (Div16u x (Const16 [c]))
  7488  	// cond: umagicOK(16, c) && config.RegSize == 4 && c&1 == 0
  7489  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<15+(umagic(16,c).m+1)/2)]) (Rsh32Ux64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [16+umagic(16,c).s-2])))
  7490  	for {
  7491  		_ = v.Args[1]
  7492  		x := v.Args[0]
  7493  		v_1 := v.Args[1]
  7494  		if v_1.Op != OpConst16 {
  7495  			break
  7496  		}
  7497  		c := v_1.AuxInt
  7498  		if !(umagicOK(16, c) && config.RegSize == 4 && c&1 == 0) {
  7499  			break
  7500  		}
  7501  		v.reset(OpTrunc32to16)
  7502  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7503  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7504  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7505  		v2.AuxInt = int64(1<<15 + (umagic(16, c).m+1)/2)
  7506  		v1.AddArg(v2)
  7507  		v3 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7508  		v4 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7509  		v4.AddArg(x)
  7510  		v3.AddArg(v4)
  7511  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7512  		v5.AuxInt = 1
  7513  		v3.AddArg(v5)
  7514  		v1.AddArg(v3)
  7515  		v0.AddArg(v1)
  7516  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7517  		v6.AuxInt = 16 + umagic(16, c).s - 2
  7518  		v0.AddArg(v6)
  7519  		v.AddArg(v0)
  7520  		return true
  7521  	}
  7522  	// match: (Div16u x (Const16 [c]))
  7523  	// cond: umagicOK(16, c) && config.RegSize == 4 && config.useAvg
  7524  	// result: (Trunc32to16 (Rsh32Ux64 <typ.UInt32> (Avg32u (Lsh32x64 <typ.UInt32> (ZeroExt16to32 x) (Const64 <typ.UInt64> [16])) (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(umagic(16,c).m)]) (ZeroExt16to32 x))) (Const64 <typ.UInt64> [16+umagic(16,c).s-1])))
  7525  	for {
  7526  		_ = v.Args[1]
  7527  		x := v.Args[0]
  7528  		v_1 := v.Args[1]
  7529  		if v_1.Op != OpConst16 {
  7530  			break
  7531  		}
  7532  		c := v_1.AuxInt
  7533  		if !(umagicOK(16, c) && config.RegSize == 4 && config.useAvg) {
  7534  			break
  7535  		}
  7536  		v.reset(OpTrunc32to16)
  7537  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7538  		v1 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  7539  		v2 := b.NewValue0(v.Pos, OpLsh32x64, typ.UInt32)
  7540  		v3 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7541  		v3.AddArg(x)
  7542  		v2.AddArg(v3)
  7543  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7544  		v4.AuxInt = 16
  7545  		v2.AddArg(v4)
  7546  		v1.AddArg(v2)
  7547  		v5 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  7548  		v6 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7549  		v6.AuxInt = int64(umagic(16, c).m)
  7550  		v5.AddArg(v6)
  7551  		v7 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  7552  		v7.AddArg(x)
  7553  		v5.AddArg(v7)
  7554  		v1.AddArg(v5)
  7555  		v0.AddArg(v1)
  7556  		v8 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7557  		v8.AuxInt = 16 + umagic(16, c).s - 1
  7558  		v0.AddArg(v8)
  7559  		v.AddArg(v0)
  7560  		return true
  7561  	}
  7562  	return false
  7563  }
  7564  func rewriteValuegeneric_OpDiv32_0(v *Value) bool {
  7565  	b := v.Block
  7566  	config := b.Func.Config
  7567  	typ := &b.Func.Config.Types
  7568  	// match: (Div32 (Const32 [c]) (Const32 [d]))
  7569  	// cond: d != 0
  7570  	// result: (Const32 [int64(int32(c)/int32(d))])
  7571  	for {
  7572  		_ = v.Args[1]
  7573  		v_0 := v.Args[0]
  7574  		if v_0.Op != OpConst32 {
  7575  			break
  7576  		}
  7577  		c := v_0.AuxInt
  7578  		v_1 := v.Args[1]
  7579  		if v_1.Op != OpConst32 {
  7580  			break
  7581  		}
  7582  		d := v_1.AuxInt
  7583  		if !(d != 0) {
  7584  			break
  7585  		}
  7586  		v.reset(OpConst32)
  7587  		v.AuxInt = int64(int32(c) / int32(d))
  7588  		return true
  7589  	}
  7590  	// match: (Div32 n (Const32 [c]))
  7591  	// cond: isNonNegative(n) && isPowerOfTwo(c&0xffffffff)
  7592  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log2(c&0xffffffff)]))
  7593  	for {
  7594  		_ = v.Args[1]
  7595  		n := v.Args[0]
  7596  		v_1 := v.Args[1]
  7597  		if v_1.Op != OpConst32 {
  7598  			break
  7599  		}
  7600  		c := v_1.AuxInt
  7601  		if !(isNonNegative(n) && isPowerOfTwo(c&0xffffffff)) {
  7602  			break
  7603  		}
  7604  		v.reset(OpRsh32Ux64)
  7605  		v.AddArg(n)
  7606  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7607  		v0.AuxInt = log2(c & 0xffffffff)
  7608  		v.AddArg(v0)
  7609  		return true
  7610  	}
  7611  	// match: (Div32 <t> n (Const32 [c]))
  7612  	// cond: c < 0 && c != -1<<31
  7613  	// result: (Neg32 (Div32 <t> n (Const32 <t> [-c])))
  7614  	for {
  7615  		t := v.Type
  7616  		_ = v.Args[1]
  7617  		n := v.Args[0]
  7618  		v_1 := v.Args[1]
  7619  		if v_1.Op != OpConst32 {
  7620  			break
  7621  		}
  7622  		c := v_1.AuxInt
  7623  		if !(c < 0 && c != -1<<31) {
  7624  			break
  7625  		}
  7626  		v.reset(OpNeg32)
  7627  		v0 := b.NewValue0(v.Pos, OpDiv32, t)
  7628  		v0.AddArg(n)
  7629  		v1 := b.NewValue0(v.Pos, OpConst32, t)
  7630  		v1.AuxInt = -c
  7631  		v0.AddArg(v1)
  7632  		v.AddArg(v0)
  7633  		return true
  7634  	}
  7635  	// match: (Div32 <t> x (Const32 [-1<<31]))
  7636  	// cond:
  7637  	// result: (Rsh32Ux64 (And32 <t> x (Neg32 <t> x)) (Const64 <typ.UInt64> [31]))
  7638  	for {
  7639  		t := v.Type
  7640  		_ = v.Args[1]
  7641  		x := v.Args[0]
  7642  		v_1 := v.Args[1]
  7643  		if v_1.Op != OpConst32 {
  7644  			break
  7645  		}
  7646  		if v_1.AuxInt != -1<<31 {
  7647  			break
  7648  		}
  7649  		v.reset(OpRsh32Ux64)
  7650  		v0 := b.NewValue0(v.Pos, OpAnd32, t)
  7651  		v0.AddArg(x)
  7652  		v1 := b.NewValue0(v.Pos, OpNeg32, t)
  7653  		v1.AddArg(x)
  7654  		v0.AddArg(v1)
  7655  		v.AddArg(v0)
  7656  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7657  		v2.AuxInt = 31
  7658  		v.AddArg(v2)
  7659  		return true
  7660  	}
  7661  	// match: (Div32 <t> n (Const32 [c]))
  7662  	// cond: isPowerOfTwo(c)
  7663  	// result: (Rsh32x64 (Add32 <t> n (Rsh32Ux64 <t> (Rsh32x64 <t> n (Const64 <typ.UInt64> [31])) (Const64 <typ.UInt64> [32-log2(c)]))) (Const64 <typ.UInt64> [log2(c)]))
  7664  	for {
  7665  		t := v.Type
  7666  		_ = v.Args[1]
  7667  		n := v.Args[0]
  7668  		v_1 := v.Args[1]
  7669  		if v_1.Op != OpConst32 {
  7670  			break
  7671  		}
  7672  		c := v_1.AuxInt
  7673  		if !(isPowerOfTwo(c)) {
  7674  			break
  7675  		}
  7676  		v.reset(OpRsh32x64)
  7677  		v0 := b.NewValue0(v.Pos, OpAdd32, t)
  7678  		v0.AddArg(n)
  7679  		v1 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
  7680  		v2 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7681  		v2.AddArg(n)
  7682  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7683  		v3.AuxInt = 31
  7684  		v2.AddArg(v3)
  7685  		v1.AddArg(v2)
  7686  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7687  		v4.AuxInt = 32 - log2(c)
  7688  		v1.AddArg(v4)
  7689  		v0.AddArg(v1)
  7690  		v.AddArg(v0)
  7691  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7692  		v5.AuxInt = log2(c)
  7693  		v.AddArg(v5)
  7694  		return true
  7695  	}
  7696  	// match: (Div32 <t> x (Const32 [c]))
  7697  	// cond: smagicOK(32,c) && config.RegSize == 8
  7698  	// result: (Sub32 <t> (Rsh64x64 <t> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(smagic(32,c).m)]) (SignExt32to64 x)) (Const64 <typ.UInt64> [32+smagic(32,c).s])) (Rsh64x64 <t> (SignExt32to64 x) (Const64 <typ.UInt64> [63])))
  7699  	for {
  7700  		t := v.Type
  7701  		_ = v.Args[1]
  7702  		x := v.Args[0]
  7703  		v_1 := v.Args[1]
  7704  		if v_1.Op != OpConst32 {
  7705  			break
  7706  		}
  7707  		c := v_1.AuxInt
  7708  		if !(smagicOK(32, c) && config.RegSize == 8) {
  7709  			break
  7710  		}
  7711  		v.reset(OpSub32)
  7712  		v.Type = t
  7713  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7714  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  7715  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7716  		v2.AuxInt = int64(smagic(32, c).m)
  7717  		v1.AddArg(v2)
  7718  		v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7719  		v3.AddArg(x)
  7720  		v1.AddArg(v3)
  7721  		v0.AddArg(v1)
  7722  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7723  		v4.AuxInt = 32 + smagic(32, c).s
  7724  		v0.AddArg(v4)
  7725  		v.AddArg(v0)
  7726  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  7727  		v6 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
  7728  		v6.AddArg(x)
  7729  		v5.AddArg(v6)
  7730  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7731  		v7.AuxInt = 63
  7732  		v5.AddArg(v7)
  7733  		v.AddArg(v5)
  7734  		return true
  7735  	}
  7736  	// match: (Div32 <t> x (Const32 [c]))
  7737  	// cond: smagicOK(32,c) && config.RegSize == 4 && smagic(32,c).m&1 == 0 && config.useHmul
  7738  	// result: (Sub32 <t> (Rsh32x64 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int64(int32(smagic(32,c).m/2))]) x) (Const64 <typ.UInt64> [smagic(32,c).s-1])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  7739  	for {
  7740  		t := v.Type
  7741  		_ = v.Args[1]
  7742  		x := v.Args[0]
  7743  		v_1 := v.Args[1]
  7744  		if v_1.Op != OpConst32 {
  7745  			break
  7746  		}
  7747  		c := v_1.AuxInt
  7748  		if !(smagicOK(32, c) && config.RegSize == 4 && smagic(32, c).m&1 == 0 && config.useHmul) {
  7749  			break
  7750  		}
  7751  		v.reset(OpSub32)
  7752  		v.Type = t
  7753  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7754  		v1 := b.NewValue0(v.Pos, OpHmul32, t)
  7755  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7756  		v2.AuxInt = int64(int32(smagic(32, c).m / 2))
  7757  		v1.AddArg(v2)
  7758  		v1.AddArg(x)
  7759  		v0.AddArg(v1)
  7760  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7761  		v3.AuxInt = smagic(32, c).s - 1
  7762  		v0.AddArg(v3)
  7763  		v.AddArg(v0)
  7764  		v4 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7765  		v4.AddArg(x)
  7766  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7767  		v5.AuxInt = 31
  7768  		v4.AddArg(v5)
  7769  		v.AddArg(v4)
  7770  		return true
  7771  	}
  7772  	// match: (Div32 <t> x (Const32 [c]))
  7773  	// cond: smagicOK(32,c) && config.RegSize == 4 && smagic(32,c).m&1 != 0 && config.useHmul
  7774  	// result: (Sub32 <t> (Rsh32x64 <t> (Add32 <t> (Hmul32 <t> (Const32 <typ.UInt32> [int64(int32(smagic(32,c).m))]) x) x) (Const64 <typ.UInt64> [smagic(32,c).s])) (Rsh32x64 <t> x (Const64 <typ.UInt64> [31])))
  7775  	for {
  7776  		t := v.Type
  7777  		_ = v.Args[1]
  7778  		x := v.Args[0]
  7779  		v_1 := v.Args[1]
  7780  		if v_1.Op != OpConst32 {
  7781  			break
  7782  		}
  7783  		c := v_1.AuxInt
  7784  		if !(smagicOK(32, c) && config.RegSize == 4 && smagic(32, c).m&1 != 0 && config.useHmul) {
  7785  			break
  7786  		}
  7787  		v.reset(OpSub32)
  7788  		v.Type = t
  7789  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7790  		v1 := b.NewValue0(v.Pos, OpAdd32, t)
  7791  		v2 := b.NewValue0(v.Pos, OpHmul32, t)
  7792  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7793  		v3.AuxInt = int64(int32(smagic(32, c).m))
  7794  		v2.AddArg(v3)
  7795  		v2.AddArg(x)
  7796  		v1.AddArg(v2)
  7797  		v1.AddArg(x)
  7798  		v0.AddArg(v1)
  7799  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7800  		v4.AuxInt = smagic(32, c).s
  7801  		v0.AddArg(v4)
  7802  		v.AddArg(v0)
  7803  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  7804  		v5.AddArg(x)
  7805  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7806  		v6.AuxInt = 31
  7807  		v5.AddArg(v6)
  7808  		v.AddArg(v5)
  7809  		return true
  7810  	}
  7811  	return false
  7812  }
  7813  func rewriteValuegeneric_OpDiv32F_0(v *Value) bool {
  7814  	b := v.Block
  7815  	// match: (Div32F (Const32F [c]) (Const32F [d]))
  7816  	// cond:
  7817  	// result: (Const32F [auxFrom32F(auxTo32F(c) / auxTo32F(d))])
  7818  	for {
  7819  		_ = v.Args[1]
  7820  		v_0 := v.Args[0]
  7821  		if v_0.Op != OpConst32F {
  7822  			break
  7823  		}
  7824  		c := v_0.AuxInt
  7825  		v_1 := v.Args[1]
  7826  		if v_1.Op != OpConst32F {
  7827  			break
  7828  		}
  7829  		d := v_1.AuxInt
  7830  		v.reset(OpConst32F)
  7831  		v.AuxInt = auxFrom32F(auxTo32F(c) / auxTo32F(d))
  7832  		return true
  7833  	}
  7834  	// match: (Div32F x (Const32F <t> [c]))
  7835  	// cond: reciprocalExact32(auxTo32F(c))
  7836  	// result: (Mul32F x (Const32F <t> [auxFrom32F(1/auxTo32F(c))]))
  7837  	for {
  7838  		_ = v.Args[1]
  7839  		x := v.Args[0]
  7840  		v_1 := v.Args[1]
  7841  		if v_1.Op != OpConst32F {
  7842  			break
  7843  		}
  7844  		t := v_1.Type
  7845  		c := v_1.AuxInt
  7846  		if !(reciprocalExact32(auxTo32F(c))) {
  7847  			break
  7848  		}
  7849  		v.reset(OpMul32F)
  7850  		v.AddArg(x)
  7851  		v0 := b.NewValue0(v.Pos, OpConst32F, t)
  7852  		v0.AuxInt = auxFrom32F(1 / auxTo32F(c))
  7853  		v.AddArg(v0)
  7854  		return true
  7855  	}
  7856  	return false
  7857  }
  7858  func rewriteValuegeneric_OpDiv32u_0(v *Value) bool {
  7859  	b := v.Block
  7860  	config := b.Func.Config
  7861  	typ := &b.Func.Config.Types
  7862  	// match: (Div32u (Const32 [c]) (Const32 [d]))
  7863  	// cond: d != 0
  7864  	// result: (Const32 [int64(int32(uint32(c)/uint32(d)))])
  7865  	for {
  7866  		_ = v.Args[1]
  7867  		v_0 := v.Args[0]
  7868  		if v_0.Op != OpConst32 {
  7869  			break
  7870  		}
  7871  		c := v_0.AuxInt
  7872  		v_1 := v.Args[1]
  7873  		if v_1.Op != OpConst32 {
  7874  			break
  7875  		}
  7876  		d := v_1.AuxInt
  7877  		if !(d != 0) {
  7878  			break
  7879  		}
  7880  		v.reset(OpConst32)
  7881  		v.AuxInt = int64(int32(uint32(c) / uint32(d)))
  7882  		return true
  7883  	}
  7884  	// match: (Div32u n (Const32 [c]))
  7885  	// cond: isPowerOfTwo(c&0xffffffff)
  7886  	// result: (Rsh32Ux64 n (Const64 <typ.UInt64> [log2(c&0xffffffff)]))
  7887  	for {
  7888  		_ = v.Args[1]
  7889  		n := v.Args[0]
  7890  		v_1 := v.Args[1]
  7891  		if v_1.Op != OpConst32 {
  7892  			break
  7893  		}
  7894  		c := v_1.AuxInt
  7895  		if !(isPowerOfTwo(c & 0xffffffff)) {
  7896  			break
  7897  		}
  7898  		v.reset(OpRsh32Ux64)
  7899  		v.AddArg(n)
  7900  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7901  		v0.AuxInt = log2(c & 0xffffffff)
  7902  		v.AddArg(v0)
  7903  		return true
  7904  	}
  7905  	// match: (Div32u x (Const32 [c]))
  7906  	// cond: umagicOK(32, c) && config.RegSize == 4 && umagic(32,c).m&1 == 0 && config.useHmul
  7907  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(1<<31+umagic(32,c).m/2))]) x) (Const64 <typ.UInt64> [umagic(32,c).s-1]))
  7908  	for {
  7909  		_ = v.Args[1]
  7910  		x := v.Args[0]
  7911  		v_1 := v.Args[1]
  7912  		if v_1.Op != OpConst32 {
  7913  			break
  7914  		}
  7915  		c := v_1.AuxInt
  7916  		if !(umagicOK(32, c) && config.RegSize == 4 && umagic(32, c).m&1 == 0 && config.useHmul) {
  7917  			break
  7918  		}
  7919  		v.reset(OpRsh32Ux64)
  7920  		v.Type = typ.UInt32
  7921  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  7922  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7923  		v1.AuxInt = int64(int32(1<<31 + umagic(32, c).m/2))
  7924  		v0.AddArg(v1)
  7925  		v0.AddArg(x)
  7926  		v.AddArg(v0)
  7927  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7928  		v2.AuxInt = umagic(32, c).s - 1
  7929  		v.AddArg(v2)
  7930  		return true
  7931  	}
  7932  	// match: (Div32u x (Const32 [c]))
  7933  	// cond: umagicOK(32, c) && config.RegSize == 4 && c&1 == 0 && config.useHmul
  7934  	// result: (Rsh32Ux64 <typ.UInt32> (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(1<<31+(umagic(32,c).m+1)/2))]) (Rsh32Ux64 <typ.UInt32> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic(32,c).s-2]))
  7935  	for {
  7936  		_ = v.Args[1]
  7937  		x := v.Args[0]
  7938  		v_1 := v.Args[1]
  7939  		if v_1.Op != OpConst32 {
  7940  			break
  7941  		}
  7942  		c := v_1.AuxInt
  7943  		if !(umagicOK(32, c) && config.RegSize == 4 && c&1 == 0 && config.useHmul) {
  7944  			break
  7945  		}
  7946  		v.reset(OpRsh32Ux64)
  7947  		v.Type = typ.UInt32
  7948  		v0 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  7949  		v1 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7950  		v1.AuxInt = int64(int32(1<<31 + (umagic(32, c).m+1)/2))
  7951  		v0.AddArg(v1)
  7952  		v2 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  7953  		v2.AddArg(x)
  7954  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7955  		v3.AuxInt = 1
  7956  		v2.AddArg(v3)
  7957  		v0.AddArg(v2)
  7958  		v.AddArg(v0)
  7959  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7960  		v4.AuxInt = umagic(32, c).s - 2
  7961  		v.AddArg(v4)
  7962  		return true
  7963  	}
  7964  	// match: (Div32u x (Const32 [c]))
  7965  	// cond: umagicOK(32, c) && config.RegSize == 4 && config.useAvg && config.useHmul
  7966  	// result: (Rsh32Ux64 <typ.UInt32> (Avg32u x (Hmul32u <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(umagic(32,c).m))]) x)) (Const64 <typ.UInt64> [umagic(32,c).s-1]))
  7967  	for {
  7968  		_ = v.Args[1]
  7969  		x := v.Args[0]
  7970  		v_1 := v.Args[1]
  7971  		if v_1.Op != OpConst32 {
  7972  			break
  7973  		}
  7974  		c := v_1.AuxInt
  7975  		if !(umagicOK(32, c) && config.RegSize == 4 && config.useAvg && config.useHmul) {
  7976  			break
  7977  		}
  7978  		v.reset(OpRsh32Ux64)
  7979  		v.Type = typ.UInt32
  7980  		v0 := b.NewValue0(v.Pos, OpAvg32u, typ.UInt32)
  7981  		v0.AddArg(x)
  7982  		v1 := b.NewValue0(v.Pos, OpHmul32u, typ.UInt32)
  7983  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  7984  		v2.AuxInt = int64(int32(umagic(32, c).m))
  7985  		v1.AddArg(v2)
  7986  		v1.AddArg(x)
  7987  		v0.AddArg(v1)
  7988  		v.AddArg(v0)
  7989  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  7990  		v3.AuxInt = umagic(32, c).s - 1
  7991  		v.AddArg(v3)
  7992  		return true
  7993  	}
  7994  	// match: (Div32u x (Const32 [c]))
  7995  	// cond: umagicOK(32, c) && config.RegSize == 8 && umagic(32,c).m&1 == 0
  7996  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+umagic(32,c).m/2)]) (ZeroExt32to64 x)) (Const64 <typ.UInt64> [32+umagic(32,c).s-1])))
  7997  	for {
  7998  		_ = v.Args[1]
  7999  		x := v.Args[0]
  8000  		v_1 := v.Args[1]
  8001  		if v_1.Op != OpConst32 {
  8002  			break
  8003  		}
  8004  		c := v_1.AuxInt
  8005  		if !(umagicOK(32, c) && config.RegSize == 8 && umagic(32, c).m&1 == 0) {
  8006  			break
  8007  		}
  8008  		v.reset(OpTrunc64to32)
  8009  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  8010  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8011  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8012  		v2.AuxInt = int64(1<<31 + umagic(32, c).m/2)
  8013  		v1.AddArg(v2)
  8014  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8015  		v3.AddArg(x)
  8016  		v1.AddArg(v3)
  8017  		v0.AddArg(v1)
  8018  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8019  		v4.AuxInt = 32 + umagic(32, c).s - 1
  8020  		v0.AddArg(v4)
  8021  		v.AddArg(v0)
  8022  		return true
  8023  	}
  8024  	// match: (Div32u x (Const32 [c]))
  8025  	// cond: umagicOK(32, c) && config.RegSize == 8 && c&1 == 0
  8026  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Mul64 <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<31+(umagic(32,c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [32+umagic(32,c).s-2])))
  8027  	for {
  8028  		_ = v.Args[1]
  8029  		x := v.Args[0]
  8030  		v_1 := v.Args[1]
  8031  		if v_1.Op != OpConst32 {
  8032  			break
  8033  		}
  8034  		c := v_1.AuxInt
  8035  		if !(umagicOK(32, c) && config.RegSize == 8 && c&1 == 0) {
  8036  			break
  8037  		}
  8038  		v.reset(OpTrunc64to32)
  8039  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  8040  		v1 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8041  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8042  		v2.AuxInt = int64(1<<31 + (umagic(32, c).m+1)/2)
  8043  		v1.AddArg(v2)
  8044  		v3 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  8045  		v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8046  		v4.AddArg(x)
  8047  		v3.AddArg(v4)
  8048  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8049  		v5.AuxInt = 1
  8050  		v3.AddArg(v5)
  8051  		v1.AddArg(v3)
  8052  		v0.AddArg(v1)
  8053  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8054  		v6.AuxInt = 32 + umagic(32, c).s - 2
  8055  		v0.AddArg(v6)
  8056  		v.AddArg(v0)
  8057  		return true
  8058  	}
  8059  	// match: (Div32u x (Const32 [c]))
  8060  	// cond: umagicOK(32, c) && config.RegSize == 8 && config.useAvg
  8061  	// result: (Trunc64to32 (Rsh64Ux64 <typ.UInt64> (Avg64u (Lsh64x64 <typ.UInt64> (ZeroExt32to64 x) (Const64 <typ.UInt64> [32])) (Mul64 <typ.UInt64> (Const64 <typ.UInt32> [int64(umagic(32,c).m)]) (ZeroExt32to64 x))) (Const64 <typ.UInt64> [32+umagic(32,c).s-1])))
  8062  	for {
  8063  		_ = v.Args[1]
  8064  		x := v.Args[0]
  8065  		v_1 := v.Args[1]
  8066  		if v_1.Op != OpConst32 {
  8067  			break
  8068  		}
  8069  		c := v_1.AuxInt
  8070  		if !(umagicOK(32, c) && config.RegSize == 8 && config.useAvg) {
  8071  			break
  8072  		}
  8073  		v.reset(OpTrunc64to32)
  8074  		v0 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  8075  		v1 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  8076  		v2 := b.NewValue0(v.Pos, OpLsh64x64, typ.UInt64)
  8077  		v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8078  		v3.AddArg(x)
  8079  		v2.AddArg(v3)
  8080  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8081  		v4.AuxInt = 32
  8082  		v2.AddArg(v4)
  8083  		v1.AddArg(v2)
  8084  		v5 := b.NewValue0(v.Pos, OpMul64, typ.UInt64)
  8085  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt32)
  8086  		v6.AuxInt = int64(umagic(32, c).m)
  8087  		v5.AddArg(v6)
  8088  		v7 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
  8089  		v7.AddArg(x)
  8090  		v5.AddArg(v7)
  8091  		v1.AddArg(v5)
  8092  		v0.AddArg(v1)
  8093  		v8 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8094  		v8.AuxInt = 32 + umagic(32, c).s - 1
  8095  		v0.AddArg(v8)
  8096  		v.AddArg(v0)
  8097  		return true
  8098  	}
  8099  	return false
  8100  }
  8101  func rewriteValuegeneric_OpDiv64_0(v *Value) bool {
  8102  	b := v.Block
  8103  	config := b.Func.Config
  8104  	typ := &b.Func.Config.Types
  8105  	// match: (Div64 (Const64 [c]) (Const64 [d]))
  8106  	// cond: d != 0
  8107  	// result: (Const64 [c/d])
  8108  	for {
  8109  		_ = v.Args[1]
  8110  		v_0 := v.Args[0]
  8111  		if v_0.Op != OpConst64 {
  8112  			break
  8113  		}
  8114  		c := v_0.AuxInt
  8115  		v_1 := v.Args[1]
  8116  		if v_1.Op != OpConst64 {
  8117  			break
  8118  		}
  8119  		d := v_1.AuxInt
  8120  		if !(d != 0) {
  8121  			break
  8122  		}
  8123  		v.reset(OpConst64)
  8124  		v.AuxInt = c / d
  8125  		return true
  8126  	}
  8127  	// match: (Div64 n (Const64 [c]))
  8128  	// cond: isNonNegative(n) && isPowerOfTwo(c)
  8129  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log2(c)]))
  8130  	for {
  8131  		_ = v.Args[1]
  8132  		n := v.Args[0]
  8133  		v_1 := v.Args[1]
  8134  		if v_1.Op != OpConst64 {
  8135  			break
  8136  		}
  8137  		c := v_1.AuxInt
  8138  		if !(isNonNegative(n) && isPowerOfTwo(c)) {
  8139  			break
  8140  		}
  8141  		v.reset(OpRsh64Ux64)
  8142  		v.AddArg(n)
  8143  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8144  		v0.AuxInt = log2(c)
  8145  		v.AddArg(v0)
  8146  		return true
  8147  	}
  8148  	// match: (Div64 n (Const64 [-1<<63]))
  8149  	// cond: isNonNegative(n)
  8150  	// result: (Const64 [0])
  8151  	for {
  8152  		_ = v.Args[1]
  8153  		n := v.Args[0]
  8154  		v_1 := v.Args[1]
  8155  		if v_1.Op != OpConst64 {
  8156  			break
  8157  		}
  8158  		if v_1.AuxInt != -1<<63 {
  8159  			break
  8160  		}
  8161  		if !(isNonNegative(n)) {
  8162  			break
  8163  		}
  8164  		v.reset(OpConst64)
  8165  		v.AuxInt = 0
  8166  		return true
  8167  	}
  8168  	// match: (Div64 <t> n (Const64 [c]))
  8169  	// cond: c < 0 && c != -1<<63
  8170  	// result: (Neg64 (Div64 <t> n (Const64 <t> [-c])))
  8171  	for {
  8172  		t := v.Type
  8173  		_ = v.Args[1]
  8174  		n := v.Args[0]
  8175  		v_1 := v.Args[1]
  8176  		if v_1.Op != OpConst64 {
  8177  			break
  8178  		}
  8179  		c := v_1.AuxInt
  8180  		if !(c < 0 && c != -1<<63) {
  8181  			break
  8182  		}
  8183  		v.reset(OpNeg64)
  8184  		v0 := b.NewValue0(v.Pos, OpDiv64, t)
  8185  		v0.AddArg(n)
  8186  		v1 := b.NewValue0(v.Pos, OpConst64, t)
  8187  		v1.AuxInt = -c
  8188  		v0.AddArg(v1)
  8189  		v.AddArg(v0)
  8190  		return true
  8191  	}
  8192  	// match: (Div64 <t> x (Const64 [-1<<63]))
  8193  	// cond:
  8194  	// result: (Rsh64Ux64 (And64 <t> x (Neg64 <t> x)) (Const64 <typ.UInt64> [63]))
  8195  	for {
  8196  		t := v.Type
  8197  		_ = v.Args[1]
  8198  		x := v.Args[0]
  8199  		v_1 := v.Args[1]
  8200  		if v_1.Op != OpConst64 {
  8201  			break
  8202  		}
  8203  		if v_1.AuxInt != -1<<63 {
  8204  			break
  8205  		}
  8206  		v.reset(OpRsh64Ux64)
  8207  		v0 := b.NewValue0(v.Pos, OpAnd64, t)
  8208  		v0.AddArg(x)
  8209  		v1 := b.NewValue0(v.Pos, OpNeg64, t)
  8210  		v1.AddArg(x)
  8211  		v0.AddArg(v1)
  8212  		v.AddArg(v0)
  8213  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8214  		v2.AuxInt = 63
  8215  		v.AddArg(v2)
  8216  		return true
  8217  	}
  8218  	// match: (Div64 <t> n (Const64 [c]))
  8219  	// cond: isPowerOfTwo(c)
  8220  	// result: (Rsh64x64 (Add64 <t> n (Rsh64Ux64 <t> (Rsh64x64 <t> n (Const64 <typ.UInt64> [63])) (Const64 <typ.UInt64> [64-log2(c)]))) (Const64 <typ.UInt64> [log2(c)]))
  8221  	for {
  8222  		t := v.Type
  8223  		_ = v.Args[1]
  8224  		n := v.Args[0]
  8225  		v_1 := v.Args[1]
  8226  		if v_1.Op != OpConst64 {
  8227  			break
  8228  		}
  8229  		c := v_1.AuxInt
  8230  		if !(isPowerOfTwo(c)) {
  8231  			break
  8232  		}
  8233  		v.reset(OpRsh64x64)
  8234  		v0 := b.NewValue0(v.Pos, OpAdd64, t)
  8235  		v0.AddArg(n)
  8236  		v1 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
  8237  		v2 := b.NewValue0(v.Pos, OpRsh64x64, t)
  8238  		v2.AddArg(n)
  8239  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8240  		v3.AuxInt = 63
  8241  		v2.AddArg(v3)
  8242  		v1.AddArg(v2)
  8243  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8244  		v4.AuxInt = 64 - log2(c)
  8245  		v1.AddArg(v4)
  8246  		v0.AddArg(v1)
  8247  		v.AddArg(v0)
  8248  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8249  		v5.AuxInt = log2(c)
  8250  		v.AddArg(v5)
  8251  		return true
  8252  	}
  8253  	// match: (Div64 <t> x (Const64 [c]))
  8254  	// cond: smagicOK(64,c) && smagic(64,c).m&1 == 0 && config.useHmul
  8255  	// result: (Sub64 <t> (Rsh64x64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic(64,c).m/2)]) x) (Const64 <typ.UInt64> [smagic(64,c).s-1])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  8256  	for {
  8257  		t := v.Type
  8258  		_ = v.Args[1]
  8259  		x := v.Args[0]
  8260  		v_1 := v.Args[1]
  8261  		if v_1.Op != OpConst64 {
  8262  			break
  8263  		}
  8264  		c := v_1.AuxInt
  8265  		if !(smagicOK(64, c) && smagic(64, c).m&1 == 0 && config.useHmul) {
  8266  			break
  8267  		}
  8268  		v.reset(OpSub64)
  8269  		v.Type = t
  8270  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  8271  		v1 := b.NewValue0(v.Pos, OpHmul64, t)
  8272  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8273  		v2.AuxInt = int64(smagic(64, c).m / 2)
  8274  		v1.AddArg(v2)
  8275  		v1.AddArg(x)
  8276  		v0.AddArg(v1)
  8277  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8278  		v3.AuxInt = smagic(64, c).s - 1
  8279  		v0.AddArg(v3)
  8280  		v.AddArg(v0)
  8281  		v4 := b.NewValue0(v.Pos, OpRsh64x64, t)
  8282  		v4.AddArg(x)
  8283  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8284  		v5.AuxInt = 63
  8285  		v4.AddArg(v5)
  8286  		v.AddArg(v4)
  8287  		return true
  8288  	}
  8289  	// match: (Div64 <t> x (Const64 [c]))
  8290  	// cond: smagicOK(64,c) && smagic(64,c).m&1 != 0 && config.useHmul
  8291  	// result: (Sub64 <t> (Rsh64x64 <t> (Add64 <t> (Hmul64 <t> (Const64 <typ.UInt64> [int64(smagic(64,c).m)]) x) x) (Const64 <typ.UInt64> [smagic(64,c).s])) (Rsh64x64 <t> x (Const64 <typ.UInt64> [63])))
  8292  	for {
  8293  		t := v.Type
  8294  		_ = v.Args[1]
  8295  		x := v.Args[0]
  8296  		v_1 := v.Args[1]
  8297  		if v_1.Op != OpConst64 {
  8298  			break
  8299  		}
  8300  		c := v_1.AuxInt
  8301  		if !(smagicOK(64, c) && smagic(64, c).m&1 != 0 && config.useHmul) {
  8302  			break
  8303  		}
  8304  		v.reset(OpSub64)
  8305  		v.Type = t
  8306  		v0 := b.NewValue0(v.Pos, OpRsh64x64, t)
  8307  		v1 := b.NewValue0(v.Pos, OpAdd64, t)
  8308  		v2 := b.NewValue0(v.Pos, OpHmul64, t)
  8309  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8310  		v3.AuxInt = int64(smagic(64, c).m)
  8311  		v2.AddArg(v3)
  8312  		v2.AddArg(x)
  8313  		v1.AddArg(v2)
  8314  		v1.AddArg(x)
  8315  		v0.AddArg(v1)
  8316  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8317  		v4.AuxInt = smagic(64, c).s
  8318  		v0.AddArg(v4)
  8319  		v.AddArg(v0)
  8320  		v5 := b.NewValue0(v.Pos, OpRsh64x64, t)
  8321  		v5.AddArg(x)
  8322  		v6 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8323  		v6.AuxInt = 63
  8324  		v5.AddArg(v6)
  8325  		v.AddArg(v5)
  8326  		return true
  8327  	}
  8328  	return false
  8329  }
  8330  func rewriteValuegeneric_OpDiv64F_0(v *Value) bool {
  8331  	b := v.Block
  8332  	// match: (Div64F (Const64F [c]) (Const64F [d]))
  8333  	// cond:
  8334  	// result: (Const64F [auxFrom64F(auxTo64F(c) / auxTo64F(d))])
  8335  	for {
  8336  		_ = v.Args[1]
  8337  		v_0 := v.Args[0]
  8338  		if v_0.Op != OpConst64F {
  8339  			break
  8340  		}
  8341  		c := v_0.AuxInt
  8342  		v_1 := v.Args[1]
  8343  		if v_1.Op != OpConst64F {
  8344  			break
  8345  		}
  8346  		d := v_1.AuxInt
  8347  		v.reset(OpConst64F)
  8348  		v.AuxInt = auxFrom64F(auxTo64F(c) / auxTo64F(d))
  8349  		return true
  8350  	}
  8351  	// match: (Div64F x (Const64F <t> [c]))
  8352  	// cond: reciprocalExact64(auxTo64F(c))
  8353  	// result: (Mul64F x (Const64F <t> [auxFrom64F(1/auxTo64F(c))]))
  8354  	for {
  8355  		_ = v.Args[1]
  8356  		x := v.Args[0]
  8357  		v_1 := v.Args[1]
  8358  		if v_1.Op != OpConst64F {
  8359  			break
  8360  		}
  8361  		t := v_1.Type
  8362  		c := v_1.AuxInt
  8363  		if !(reciprocalExact64(auxTo64F(c))) {
  8364  			break
  8365  		}
  8366  		v.reset(OpMul64F)
  8367  		v.AddArg(x)
  8368  		v0 := b.NewValue0(v.Pos, OpConst64F, t)
  8369  		v0.AuxInt = auxFrom64F(1 / auxTo64F(c))
  8370  		v.AddArg(v0)
  8371  		return true
  8372  	}
  8373  	return false
  8374  }
  8375  func rewriteValuegeneric_OpDiv64u_0(v *Value) bool {
  8376  	b := v.Block
  8377  	config := b.Func.Config
  8378  	typ := &b.Func.Config.Types
  8379  	// match: (Div64u (Const64 [c]) (Const64 [d]))
  8380  	// cond: d != 0
  8381  	// result: (Const64 [int64(uint64(c)/uint64(d))])
  8382  	for {
  8383  		_ = v.Args[1]
  8384  		v_0 := v.Args[0]
  8385  		if v_0.Op != OpConst64 {
  8386  			break
  8387  		}
  8388  		c := v_0.AuxInt
  8389  		v_1 := v.Args[1]
  8390  		if v_1.Op != OpConst64 {
  8391  			break
  8392  		}
  8393  		d := v_1.AuxInt
  8394  		if !(d != 0) {
  8395  			break
  8396  		}
  8397  		v.reset(OpConst64)
  8398  		v.AuxInt = int64(uint64(c) / uint64(d))
  8399  		return true
  8400  	}
  8401  	// match: (Div64u n (Const64 [c]))
  8402  	// cond: isPowerOfTwo(c)
  8403  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [log2(c)]))
  8404  	for {
  8405  		_ = v.Args[1]
  8406  		n := v.Args[0]
  8407  		v_1 := v.Args[1]
  8408  		if v_1.Op != OpConst64 {
  8409  			break
  8410  		}
  8411  		c := v_1.AuxInt
  8412  		if !(isPowerOfTwo(c)) {
  8413  			break
  8414  		}
  8415  		v.reset(OpRsh64Ux64)
  8416  		v.AddArg(n)
  8417  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8418  		v0.AuxInt = log2(c)
  8419  		v.AddArg(v0)
  8420  		return true
  8421  	}
  8422  	// match: (Div64u n (Const64 [-1<<63]))
  8423  	// cond:
  8424  	// result: (Rsh64Ux64 n (Const64 <typ.UInt64> [63]))
  8425  	for {
  8426  		_ = v.Args[1]
  8427  		n := v.Args[0]
  8428  		v_1 := v.Args[1]
  8429  		if v_1.Op != OpConst64 {
  8430  			break
  8431  		}
  8432  		if v_1.AuxInt != -1<<63 {
  8433  			break
  8434  		}
  8435  		v.reset(OpRsh64Ux64)
  8436  		v.AddArg(n)
  8437  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8438  		v0.AuxInt = 63
  8439  		v.AddArg(v0)
  8440  		return true
  8441  	}
  8442  	// match: (Div64u x (Const64 [c]))
  8443  	// cond: umagicOK(64, c) && config.RegSize == 8 && umagic(64,c).m&1 == 0 && config.useHmul
  8444  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+umagic(64,c).m/2)]) x) (Const64 <typ.UInt64> [umagic(64,c).s-1]))
  8445  	for {
  8446  		_ = v.Args[1]
  8447  		x := v.Args[0]
  8448  		v_1 := v.Args[1]
  8449  		if v_1.Op != OpConst64 {
  8450  			break
  8451  		}
  8452  		c := v_1.AuxInt
  8453  		if !(umagicOK(64, c) && config.RegSize == 8 && umagic(64, c).m&1 == 0 && config.useHmul) {
  8454  			break
  8455  		}
  8456  		v.reset(OpRsh64Ux64)
  8457  		v.Type = typ.UInt64
  8458  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  8459  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8460  		v1.AuxInt = int64(1<<63 + umagic(64, c).m/2)
  8461  		v0.AddArg(v1)
  8462  		v0.AddArg(x)
  8463  		v.AddArg(v0)
  8464  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8465  		v2.AuxInt = umagic(64, c).s - 1
  8466  		v.AddArg(v2)
  8467  		return true
  8468  	}
  8469  	// match: (Div64u x (Const64 [c]))
  8470  	// cond: umagicOK(64, c) && config.RegSize == 8 && c&1 == 0 && config.useHmul
  8471  	// result: (Rsh64Ux64 <typ.UInt64> (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(1<<63+(umagic(64,c).m+1)/2)]) (Rsh64Ux64 <typ.UInt64> x (Const64 <typ.UInt64> [1]))) (Const64 <typ.UInt64> [umagic(64,c).s-2]))
  8472  	for {
  8473  		_ = v.Args[1]
  8474  		x := v.Args[0]
  8475  		v_1 := v.Args[1]
  8476  		if v_1.Op != OpConst64 {
  8477  			break
  8478  		}
  8479  		c := v_1.AuxInt
  8480  		if !(umagicOK(64, c) && config.RegSize == 8 && c&1 == 0 && config.useHmul) {
  8481  			break
  8482  		}
  8483  		v.reset(OpRsh64Ux64)
  8484  		v.Type = typ.UInt64
  8485  		v0 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  8486  		v1 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8487  		v1.AuxInt = int64(1<<63 + (umagic(64, c).m+1)/2)
  8488  		v0.AddArg(v1)
  8489  		v2 := b.NewValue0(v.Pos, OpRsh64Ux64, typ.UInt64)
  8490  		v2.AddArg(x)
  8491  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8492  		v3.AuxInt = 1
  8493  		v2.AddArg(v3)
  8494  		v0.AddArg(v2)
  8495  		v.AddArg(v0)
  8496  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8497  		v4.AuxInt = umagic(64, c).s - 2
  8498  		v.AddArg(v4)
  8499  		return true
  8500  	}
  8501  	// match: (Div64u x (Const64 [c]))
  8502  	// cond: umagicOK(64, c) && config.RegSize == 8 && config.useAvg && config.useHmul
  8503  	// result: (Rsh64Ux64 <typ.UInt64> (Avg64u x (Hmul64u <typ.UInt64> (Const64 <typ.UInt64> [int64(umagic(64,c).m)]) x)) (Const64 <typ.UInt64> [umagic(64,c).s-1]))
  8504  	for {
  8505  		_ = v.Args[1]
  8506  		x := v.Args[0]
  8507  		v_1 := v.Args[1]
  8508  		if v_1.Op != OpConst64 {
  8509  			break
  8510  		}
  8511  		c := v_1.AuxInt
  8512  		if !(umagicOK(64, c) && config.RegSize == 8 && config.useAvg && config.useHmul) {
  8513  			break
  8514  		}
  8515  		v.reset(OpRsh64Ux64)
  8516  		v.Type = typ.UInt64
  8517  		v0 := b.NewValue0(v.Pos, OpAvg64u, typ.UInt64)
  8518  		v0.AddArg(x)
  8519  		v1 := b.NewValue0(v.Pos, OpHmul64u, typ.UInt64)
  8520  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8521  		v2.AuxInt = int64(umagic(64, c).m)
  8522  		v1.AddArg(v2)
  8523  		v1.AddArg(x)
  8524  		v0.AddArg(v1)
  8525  		v.AddArg(v0)
  8526  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8527  		v3.AuxInt = umagic(64, c).s - 1
  8528  		v.AddArg(v3)
  8529  		return true
  8530  	}
  8531  	return false
  8532  }
  8533  func rewriteValuegeneric_OpDiv8_0(v *Value) bool {
  8534  	b := v.Block
  8535  	typ := &b.Func.Config.Types
  8536  	// match: (Div8 (Const8 [c]) (Const8 [d]))
  8537  	// cond: d != 0
  8538  	// result: (Const8 [int64(int8(c)/int8(d))])
  8539  	for {
  8540  		_ = v.Args[1]
  8541  		v_0 := v.Args[0]
  8542  		if v_0.Op != OpConst8 {
  8543  			break
  8544  		}
  8545  		c := v_0.AuxInt
  8546  		v_1 := v.Args[1]
  8547  		if v_1.Op != OpConst8 {
  8548  			break
  8549  		}
  8550  		d := v_1.AuxInt
  8551  		if !(d != 0) {
  8552  			break
  8553  		}
  8554  		v.reset(OpConst8)
  8555  		v.AuxInt = int64(int8(c) / int8(d))
  8556  		return true
  8557  	}
  8558  	// match: (Div8 n (Const8 [c]))
  8559  	// cond: isNonNegative(n) && isPowerOfTwo(c&0xff)
  8560  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log2(c&0xff)]))
  8561  	for {
  8562  		_ = v.Args[1]
  8563  		n := v.Args[0]
  8564  		v_1 := v.Args[1]
  8565  		if v_1.Op != OpConst8 {
  8566  			break
  8567  		}
  8568  		c := v_1.AuxInt
  8569  		if !(isNonNegative(n) && isPowerOfTwo(c&0xff)) {
  8570  			break
  8571  		}
  8572  		v.reset(OpRsh8Ux64)
  8573  		v.AddArg(n)
  8574  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8575  		v0.AuxInt = log2(c & 0xff)
  8576  		v.AddArg(v0)
  8577  		return true
  8578  	}
  8579  	// match: (Div8 <t> n (Const8 [c]))
  8580  	// cond: c < 0 && c != -1<<7
  8581  	// result: (Neg8 (Div8 <t> n (Const8 <t> [-c])))
  8582  	for {
  8583  		t := v.Type
  8584  		_ = v.Args[1]
  8585  		n := v.Args[0]
  8586  		v_1 := v.Args[1]
  8587  		if v_1.Op != OpConst8 {
  8588  			break
  8589  		}
  8590  		c := v_1.AuxInt
  8591  		if !(c < 0 && c != -1<<7) {
  8592  			break
  8593  		}
  8594  		v.reset(OpNeg8)
  8595  		v0 := b.NewValue0(v.Pos, OpDiv8, t)
  8596  		v0.AddArg(n)
  8597  		v1 := b.NewValue0(v.Pos, OpConst8, t)
  8598  		v1.AuxInt = -c
  8599  		v0.AddArg(v1)
  8600  		v.AddArg(v0)
  8601  		return true
  8602  	}
  8603  	// match: (Div8 <t> x (Const8 [-1<<7 ]))
  8604  	// cond:
  8605  	// result: (Rsh8Ux64 (And8 <t> x (Neg8 <t> x)) (Const64 <typ.UInt64> [7 ]))
  8606  	for {
  8607  		t := v.Type
  8608  		_ = v.Args[1]
  8609  		x := v.Args[0]
  8610  		v_1 := v.Args[1]
  8611  		if v_1.Op != OpConst8 {
  8612  			break
  8613  		}
  8614  		if v_1.AuxInt != -1<<7 {
  8615  			break
  8616  		}
  8617  		v.reset(OpRsh8Ux64)
  8618  		v0 := b.NewValue0(v.Pos, OpAnd8, t)
  8619  		v0.AddArg(x)
  8620  		v1 := b.NewValue0(v.Pos, OpNeg8, t)
  8621  		v1.AddArg(x)
  8622  		v0.AddArg(v1)
  8623  		v.AddArg(v0)
  8624  		v2 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8625  		v2.AuxInt = 7
  8626  		v.AddArg(v2)
  8627  		return true
  8628  	}
  8629  	// match: (Div8 <t> n (Const8 [c]))
  8630  	// cond: isPowerOfTwo(c)
  8631  	// result: (Rsh8x64 (Add8 <t> n (Rsh8Ux64 <t> (Rsh8x64 <t> n (Const64 <typ.UInt64> [ 7])) (Const64 <typ.UInt64> [ 8-log2(c)]))) (Const64 <typ.UInt64> [log2(c)]))
  8632  	for {
  8633  		t := v.Type
  8634  		_ = v.Args[1]
  8635  		n := v.Args[0]
  8636  		v_1 := v.Args[1]
  8637  		if v_1.Op != OpConst8 {
  8638  			break
  8639  		}
  8640  		c := v_1.AuxInt
  8641  		if !(isPowerOfTwo(c)) {
  8642  			break
  8643  		}
  8644  		v.reset(OpRsh8x64)
  8645  		v0 := b.NewValue0(v.Pos, OpAdd8, t)
  8646  		v0.AddArg(n)
  8647  		v1 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
  8648  		v2 := b.NewValue0(v.Pos, OpRsh8x64, t)
  8649  		v2.AddArg(n)
  8650  		v3 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8651  		v3.AuxInt = 7
  8652  		v2.AddArg(v3)
  8653  		v1.AddArg(v2)
  8654  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8655  		v4.AuxInt = 8 - log2(c)
  8656  		v1.AddArg(v4)
  8657  		v0.AddArg(v1)
  8658  		v.AddArg(v0)
  8659  		v5 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8660  		v5.AuxInt = log2(c)
  8661  		v.AddArg(v5)
  8662  		return true
  8663  	}
  8664  	// match: (Div8 <t> x (Const8 [c]))
  8665  	// cond: smagicOK(8,c)
  8666  	// result: (Sub8 <t> (Rsh32x64 <t> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(smagic(8,c).m)]) (SignExt8to32 x)) (Const64 <typ.UInt64> [8+smagic(8,c).s])) (Rsh32x64 <t> (SignExt8to32 x) (Const64 <typ.UInt64> [31])))
  8667  	for {
  8668  		t := v.Type
  8669  		_ = v.Args[1]
  8670  		x := v.Args[0]
  8671  		v_1 := v.Args[1]
  8672  		if v_1.Op != OpConst8 {
  8673  			break
  8674  		}
  8675  		c := v_1.AuxInt
  8676  		if !(smagicOK(8, c)) {
  8677  			break
  8678  		}
  8679  		v.reset(OpSub8)
  8680  		v.Type = t
  8681  		v0 := b.NewValue0(v.Pos, OpRsh32x64, t)
  8682  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8683  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8684  		v2.AuxInt = int64(smagic(8, c).m)
  8685  		v1.AddArg(v2)
  8686  		v3 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  8687  		v3.AddArg(x)
  8688  		v1.AddArg(v3)
  8689  		v0.AddArg(v1)
  8690  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8691  		v4.AuxInt = 8 + smagic(8, c).s
  8692  		v0.AddArg(v4)
  8693  		v.AddArg(v0)
  8694  		v5 := b.NewValue0(v.Pos, OpRsh32x64, t)
  8695  		v6 := b.NewValue0(v.Pos, OpSignExt8to32, typ.Int32)
  8696  		v6.AddArg(x)
  8697  		v5.AddArg(v6)
  8698  		v7 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8699  		v7.AuxInt = 31
  8700  		v5.AddArg(v7)
  8701  		v.AddArg(v5)
  8702  		return true
  8703  	}
  8704  	return false
  8705  }
  8706  func rewriteValuegeneric_OpDiv8u_0(v *Value) bool {
  8707  	b := v.Block
  8708  	typ := &b.Func.Config.Types
  8709  	// match: (Div8u (Const8 [c]) (Const8 [d]))
  8710  	// cond: d != 0
  8711  	// result: (Const8 [int64(int8(uint8(c)/uint8(d)))])
  8712  	for {
  8713  		_ = v.Args[1]
  8714  		v_0 := v.Args[0]
  8715  		if v_0.Op != OpConst8 {
  8716  			break
  8717  		}
  8718  		c := v_0.AuxInt
  8719  		v_1 := v.Args[1]
  8720  		if v_1.Op != OpConst8 {
  8721  			break
  8722  		}
  8723  		d := v_1.AuxInt
  8724  		if !(d != 0) {
  8725  			break
  8726  		}
  8727  		v.reset(OpConst8)
  8728  		v.AuxInt = int64(int8(uint8(c) / uint8(d)))
  8729  		return true
  8730  	}
  8731  	// match: (Div8u n (Const8 [c]))
  8732  	// cond: isPowerOfTwo(c&0xff)
  8733  	// result: (Rsh8Ux64 n (Const64 <typ.UInt64> [log2(c&0xff)]))
  8734  	for {
  8735  		_ = v.Args[1]
  8736  		n := v.Args[0]
  8737  		v_1 := v.Args[1]
  8738  		if v_1.Op != OpConst8 {
  8739  			break
  8740  		}
  8741  		c := v_1.AuxInt
  8742  		if !(isPowerOfTwo(c & 0xff)) {
  8743  			break
  8744  		}
  8745  		v.reset(OpRsh8Ux64)
  8746  		v.AddArg(n)
  8747  		v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8748  		v0.AuxInt = log2(c & 0xff)
  8749  		v.AddArg(v0)
  8750  		return true
  8751  	}
  8752  	// match: (Div8u x (Const8 [c]))
  8753  	// cond: umagicOK(8, c)
  8754  	// result: (Trunc32to8 (Rsh32Ux64 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(1<<8+umagic(8,c).m)]) (ZeroExt8to32 x)) (Const64 <typ.UInt64> [8+umagic(8,c).s])))
  8755  	for {
  8756  		_ = v.Args[1]
  8757  		x := v.Args[0]
  8758  		v_1 := v.Args[1]
  8759  		if v_1.Op != OpConst8 {
  8760  			break
  8761  		}
  8762  		c := v_1.AuxInt
  8763  		if !(umagicOK(8, c)) {
  8764  			break
  8765  		}
  8766  		v.reset(OpTrunc32to8)
  8767  		v0 := b.NewValue0(v.Pos, OpRsh32Ux64, typ.UInt32)
  8768  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
  8769  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8770  		v2.AuxInt = int64(1<<8 + umagic(8, c).m)
  8771  		v1.AddArg(v2)
  8772  		v3 := b.NewValue0(v.Pos, OpZeroExt8to32, typ.UInt32)
  8773  		v3.AddArg(x)
  8774  		v1.AddArg(v3)
  8775  		v0.AddArg(v1)
  8776  		v4 := b.NewValue0(v.Pos, OpConst64, typ.UInt64)
  8777  		v4.AuxInt = 8 + umagic(8, c).s
  8778  		v0.AddArg(v4)
  8779  		v.AddArg(v0)
  8780  		return true
  8781  	}
  8782  	return false
  8783  }
  8784  func rewriteValuegeneric_OpEq16_0(v *Value) bool {
  8785  	b := v.Block
  8786  	config := b.Func.Config
  8787  	typ := &b.Func.Config.Types
  8788  	// match: (Eq16 x x)
  8789  	// cond:
  8790  	// result: (ConstBool [1])
  8791  	for {
  8792  		x := v.Args[1]
  8793  		if x != v.Args[0] {
  8794  			break
  8795  		}
  8796  		v.reset(OpConstBool)
  8797  		v.AuxInt = 1
  8798  		return true
  8799  	}
  8800  	// match: (Eq16 (Const16 <t> [c]) (Add16 (Const16 <t> [d]) x))
  8801  	// cond:
  8802  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  8803  	for {
  8804  		_ = v.Args[1]
  8805  		v_0 := v.Args[0]
  8806  		if v_0.Op != OpConst16 {
  8807  			break
  8808  		}
  8809  		t := v_0.Type
  8810  		c := v_0.AuxInt
  8811  		v_1 := v.Args[1]
  8812  		if v_1.Op != OpAdd16 {
  8813  			break
  8814  		}
  8815  		x := v_1.Args[1]
  8816  		v_1_0 := v_1.Args[0]
  8817  		if v_1_0.Op != OpConst16 {
  8818  			break
  8819  		}
  8820  		if v_1_0.Type != t {
  8821  			break
  8822  		}
  8823  		d := v_1_0.AuxInt
  8824  		v.reset(OpEq16)
  8825  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  8826  		v0.AuxInt = int64(int16(c - d))
  8827  		v.AddArg(v0)
  8828  		v.AddArg(x)
  8829  		return true
  8830  	}
  8831  	// match: (Eq16 (Const16 <t> [c]) (Add16 x (Const16 <t> [d])))
  8832  	// cond:
  8833  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  8834  	for {
  8835  		_ = v.Args[1]
  8836  		v_0 := v.Args[0]
  8837  		if v_0.Op != OpConst16 {
  8838  			break
  8839  		}
  8840  		t := v_0.Type
  8841  		c := v_0.AuxInt
  8842  		v_1 := v.Args[1]
  8843  		if v_1.Op != OpAdd16 {
  8844  			break
  8845  		}
  8846  		_ = v_1.Args[1]
  8847  		x := v_1.Args[0]
  8848  		v_1_1 := v_1.Args[1]
  8849  		if v_1_1.Op != OpConst16 {
  8850  			break
  8851  		}
  8852  		if v_1_1.Type != t {
  8853  			break
  8854  		}
  8855  		d := v_1_1.AuxInt
  8856  		v.reset(OpEq16)
  8857  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  8858  		v0.AuxInt = int64(int16(c - d))
  8859  		v.AddArg(v0)
  8860  		v.AddArg(x)
  8861  		return true
  8862  	}
  8863  	// match: (Eq16 (Add16 (Const16 <t> [d]) x) (Const16 <t> [c]))
  8864  	// cond:
  8865  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  8866  	for {
  8867  		_ = v.Args[1]
  8868  		v_0 := v.Args[0]
  8869  		if v_0.Op != OpAdd16 {
  8870  			break
  8871  		}
  8872  		x := v_0.Args[1]
  8873  		v_0_0 := v_0.Args[0]
  8874  		if v_0_0.Op != OpConst16 {
  8875  			break
  8876  		}
  8877  		t := v_0_0.Type
  8878  		d := v_0_0.AuxInt
  8879  		v_1 := v.Args[1]
  8880  		if v_1.Op != OpConst16 {
  8881  			break
  8882  		}
  8883  		if v_1.Type != t {
  8884  			break
  8885  		}
  8886  		c := v_1.AuxInt
  8887  		v.reset(OpEq16)
  8888  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  8889  		v0.AuxInt = int64(int16(c - d))
  8890  		v.AddArg(v0)
  8891  		v.AddArg(x)
  8892  		return true
  8893  	}
  8894  	// match: (Eq16 (Add16 x (Const16 <t> [d])) (Const16 <t> [c]))
  8895  	// cond:
  8896  	// result: (Eq16 (Const16 <t> [int64(int16(c-d))]) x)
  8897  	for {
  8898  		_ = v.Args[1]
  8899  		v_0 := v.Args[0]
  8900  		if v_0.Op != OpAdd16 {
  8901  			break
  8902  		}
  8903  		_ = v_0.Args[1]
  8904  		x := v_0.Args[0]
  8905  		v_0_1 := v_0.Args[1]
  8906  		if v_0_1.Op != OpConst16 {
  8907  			break
  8908  		}
  8909  		t := v_0_1.Type
  8910  		d := v_0_1.AuxInt
  8911  		v_1 := v.Args[1]
  8912  		if v_1.Op != OpConst16 {
  8913  			break
  8914  		}
  8915  		if v_1.Type != t {
  8916  			break
  8917  		}
  8918  		c := v_1.AuxInt
  8919  		v.reset(OpEq16)
  8920  		v0 := b.NewValue0(v.Pos, OpConst16, t)
  8921  		v0.AuxInt = int64(int16(c - d))
  8922  		v.AddArg(v0)
  8923  		v.AddArg(x)
  8924  		return true
  8925  	}
  8926  	// match: (Eq16 (Const16 [c]) (Const16 [d]))
  8927  	// cond:
  8928  	// result: (ConstBool [b2i(c == d)])
  8929  	for {
  8930  		_ = v.Args[1]
  8931  		v_0 := v.Args[0]
  8932  		if v_0.Op != OpConst16 {
  8933  			break
  8934  		}
  8935  		c := v_0.AuxInt
  8936  		v_1 := v.Args[1]
  8937  		if v_1.Op != OpConst16 {
  8938  			break
  8939  		}
  8940  		d := v_1.AuxInt
  8941  		v.reset(OpConstBool)
  8942  		v.AuxInt = b2i(c == d)
  8943  		return true
  8944  	}
  8945  	// match: (Eq16 (Const16 [d]) (Const16 [c]))
  8946  	// cond:
  8947  	// result: (ConstBool [b2i(c == d)])
  8948  	for {
  8949  		_ = v.Args[1]
  8950  		v_0 := v.Args[0]
  8951  		if v_0.Op != OpConst16 {
  8952  			break
  8953  		}
  8954  		d := v_0.AuxInt
  8955  		v_1 := v.Args[1]
  8956  		if v_1.Op != OpConst16 {
  8957  			break
  8958  		}
  8959  		c := v_1.AuxInt
  8960  		v.reset(OpConstBool)
  8961  		v.AuxInt = b2i(c == d)
  8962  		return true
  8963  	}
  8964  	// match: (Eq16 (Mod16u x (Const16 [c])) (Const16 [0]))
  8965  	// cond: x.Op != OpConst16 && udivisibleOK(16,c) && !hasSmallRotate(config)
  8966  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xffff])) (Const32 <typ.UInt32> [0]))
  8967  	for {
  8968  		_ = v.Args[1]
  8969  		v_0 := v.Args[0]
  8970  		if v_0.Op != OpMod16u {
  8971  			break
  8972  		}
  8973  		_ = v_0.Args[1]
  8974  		x := v_0.Args[0]
  8975  		v_0_1 := v_0.Args[1]
  8976  		if v_0_1.Op != OpConst16 {
  8977  			break
  8978  		}
  8979  		c := v_0_1.AuxInt
  8980  		v_1 := v.Args[1]
  8981  		if v_1.Op != OpConst16 {
  8982  			break
  8983  		}
  8984  		if v_1.AuxInt != 0 {
  8985  			break
  8986  		}
  8987  		if !(x.Op != OpConst16 && udivisibleOK(16, c) && !hasSmallRotate(config)) {
  8988  			break
  8989  		}
  8990  		v.reset(OpEq32)
  8991  		v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  8992  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  8993  		v1.AddArg(x)
  8994  		v0.AddArg(v1)
  8995  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  8996  		v2.AuxInt = c & 0xffff
  8997  		v0.AddArg(v2)
  8998  		v.AddArg(v0)
  8999  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9000  		v3.AuxInt = 0
  9001  		v.AddArg(v3)
  9002  		return true
  9003  	}
  9004  	// match: (Eq16 (Const16 [0]) (Mod16u x (Const16 [c])))
  9005  	// cond: x.Op != OpConst16 && udivisibleOK(16,c) && !hasSmallRotate(config)
  9006  	// result: (Eq32 (Mod32u <typ.UInt32> (ZeroExt16to32 <typ.UInt32> x) (Const32 <typ.UInt32> [c&0xffff])) (Const32 <typ.UInt32> [0]))
  9007  	for {
  9008  		_ = v.Args[1]
  9009  		v_0 := v.Args[0]
  9010  		if v_0.Op != OpConst16 {
  9011  			break
  9012  		}
  9013  		if v_0.AuxInt != 0 {
  9014  			break
  9015  		}
  9016  		v_1 := v.Args[1]
  9017  		if v_1.Op != OpMod16u {
  9018  			break
  9019  		}
  9020  		_ = v_1.Args[1]
  9021  		x := v_1.Args[0]
  9022  		v_1_1 := v_1.Args[1]
  9023  		if v_1_1.Op != OpConst16 {
  9024  			break
  9025  		}
  9026  		c := v_1_1.AuxInt
  9027  		if !(x.Op != OpConst16 && udivisibleOK(16, c) && !hasSmallRotate(config)) {
  9028  			break
  9029  		}
  9030  		v.reset(OpEq32)
  9031  		v0 := b.NewValue0(v.Pos, OpMod32u, typ.UInt32)
  9032  		v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
  9033  		v1.AddArg(x)
  9034  		v0.AddArg(v1)
  9035  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9036  		v2.AuxInt = c & 0xffff
  9037  		v0.AddArg(v2)
  9038  		v.AddArg(v0)
  9039  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
  9040  		v3.AuxInt = 0
  9041  		v.AddArg(v3)
  9042  		return true
  9043  	}
  9044  	// match: (Eq16 (Mod16 x (Const16 [c])) (Const16 [0]))
  9045  	// cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
  9046  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
  9047  	for {
  9048  		_ = v.Args[1]
  9049  		v_0 := v.Args[0]
  9050  		if v_0.Op != OpMod16 {
  9051  			break
  9052  		}
  9053  		_ = v_0.Args[1]
  9054  		x := v_0.Args[0]
  9055  		v_0_1 := v_0.Args[1]
  9056  		if v_0_1.Op != OpConst16 {
  9057  			break
  9058  		}
  9059  		c := v_0_1.AuxInt
  9060  		v_1 := v.Args[1]
  9061  		if v_1.Op != OpConst16 {
  9062  			break
  9063  		}
  9064  		if v_1.AuxInt != 0 {
  9065  			break
  9066  		}
  9067  		if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
  9068  			break
  9069  		}
  9070  		v.reset(OpEq32)
  9071  		v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  9072  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  9073  		v1.AddArg(x)
  9074  		v0.AddArg(v1)
  9075  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9076  		v2.AuxInt = c
  9077  		v0.AddArg(v2)
  9078  		v.AddArg(v0)
  9079  		v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9080  		v3.AuxInt = 0
  9081  		v.AddArg(v3)
  9082  		return true
  9083  	}
  9084  	return false
  9085  }
  9086  func rewriteValuegeneric_OpEq16_10(v *Value) bool {
  9087  	b := v.Block
  9088  	config := b.Func.Config
  9089  	typ := &b.Func.Config.Types
  9090  	// match: (Eq16 (Const16 [0]) (Mod16 x (Const16 [c])))
  9091  	// cond: x.Op != OpConst16 && sdivisibleOK(16,c) && !hasSmallRotate(config)
  9092  	// result: (Eq32 (Mod32 <typ.Int32> (SignExt16to32 <typ.Int32> x) (Const32 <typ.Int32> [c])) (Const32 <typ.Int32> [0]))
  9093  	for {
  9094  		_ = v.Args[1]
  9095  		v_0 := v.Args[0]
  9096  		if v_0.Op != OpConst16 {
  9097  			break
  9098  		}
  9099  		if v_0.AuxInt != 0 {
  9100  			break
  9101  		}
  9102  		v_1 := v.Args[1]
  9103  		if v_1.Op != OpMod16 {
  9104  			break
  9105  		}
  9106  		_ = v_1.Args[1]
  9107  		x := v_1.Args[0]
  9108  		v_1_1 := v_1.Args[1]
  9109  		if v_1_1.Op != OpConst16 {
  9110  			break
  9111  		}
  9112  		c := v_1_1.AuxInt
  9113  		if !(x.Op != OpConst16 && sdivisibleOK(16, c) && !hasSmallRotate(config)) {
  9114  			break
  9115  		}
  9116  		v.reset(OpEq32)
  9117  		v0 := b.NewValue0(v.Pos, OpMod32, typ.Int32)
  9118  		v1 := b.NewValue0(v.Pos, OpSignExt16to32, typ.Int32)
  9119  		v1.AddArg(x)
  9120  		v0.AddArg(v1)
  9121  		v2 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9122  		v2.AuxInt = c
  9123  		v0.AddArg(v2)
  9124  		v.AddArg(v0)
  9125  		v3 := b.NewValue0(v.Pos, OpConst32, typ.Int32)
  9126  		v3.AuxInt = 0
  9127  		v.AddArg(v3)
  9128  		return true
  9129  	}
  9130  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))))
  9131  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9132  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9133  	for {
  9134  		_ = v.Args[1]
  9135  		x := v.Args[0]
  9136  		v_1 := v.Args[1]
  9137  		if v_1.Op != OpMul16 {
  9138  			break
  9139  		}
  9140  		_ = v_1.Args[1]
  9141  		v_1_0 := v_1.Args[0]
  9142  		if v_1_0.Op != OpConst16 {
  9143  			break
  9144  		}
  9145  		c := v_1_0.AuxInt
  9146  		v_1_1 := v_1.Args[1]
  9147  		if v_1_1.Op != OpTrunc64to16 {
  9148  			break
  9149  		}
  9150  		v_1_1_0 := v_1_1.Args[0]
  9151  		if v_1_1_0.Op != OpRsh64Ux64 {
  9152  			break
  9153  		}
  9154  		_ = v_1_1_0.Args[1]
  9155  		mul := v_1_1_0.Args[0]
  9156  		if mul.Op != OpMul64 {
  9157  			break
  9158  		}
  9159  		_ = mul.Args[1]
  9160  		mul_0 := mul.Args[0]
  9161  		if mul_0.Op != OpConst64 {
  9162  			break
  9163  		}
  9164  		m := mul_0.AuxInt
  9165  		mul_1 := mul.Args[1]
  9166  		if mul_1.Op != OpZeroExt16to64 {
  9167  			break
  9168  		}
  9169  		if x != mul_1.Args[0] {
  9170  			break
  9171  		}
  9172  		v_1_1_0_1 := v_1_1_0.Args[1]
  9173  		if v_1_1_0_1.Op != OpConst64 {
  9174  			break
  9175  		}
  9176  		s := v_1_1_0_1.AuxInt
  9177  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9178  			break
  9179  		}
  9180  		v.reset(OpLeq16U)
  9181  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9182  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9183  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9184  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9185  		v1.AddArg(v2)
  9186  		v1.AddArg(x)
  9187  		v0.AddArg(v1)
  9188  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9189  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9190  		v0.AddArg(v3)
  9191  		v.AddArg(v0)
  9192  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9193  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9194  		v.AddArg(v4)
  9195  		return true
  9196  	}
  9197  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))))
  9198  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9199  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9200  	for {
  9201  		_ = v.Args[1]
  9202  		x := v.Args[0]
  9203  		v_1 := v.Args[1]
  9204  		if v_1.Op != OpMul16 {
  9205  			break
  9206  		}
  9207  		_ = v_1.Args[1]
  9208  		v_1_0 := v_1.Args[0]
  9209  		if v_1_0.Op != OpConst16 {
  9210  			break
  9211  		}
  9212  		c := v_1_0.AuxInt
  9213  		v_1_1 := v_1.Args[1]
  9214  		if v_1_1.Op != OpTrunc64to16 {
  9215  			break
  9216  		}
  9217  		v_1_1_0 := v_1_1.Args[0]
  9218  		if v_1_1_0.Op != OpRsh64Ux64 {
  9219  			break
  9220  		}
  9221  		_ = v_1_1_0.Args[1]
  9222  		mul := v_1_1_0.Args[0]
  9223  		if mul.Op != OpMul64 {
  9224  			break
  9225  		}
  9226  		_ = mul.Args[1]
  9227  		mul_0 := mul.Args[0]
  9228  		if mul_0.Op != OpZeroExt16to64 {
  9229  			break
  9230  		}
  9231  		if x != mul_0.Args[0] {
  9232  			break
  9233  		}
  9234  		mul_1 := mul.Args[1]
  9235  		if mul_1.Op != OpConst64 {
  9236  			break
  9237  		}
  9238  		m := mul_1.AuxInt
  9239  		v_1_1_0_1 := v_1_1_0.Args[1]
  9240  		if v_1_1_0_1.Op != OpConst64 {
  9241  			break
  9242  		}
  9243  		s := v_1_1_0_1.AuxInt
  9244  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9245  			break
  9246  		}
  9247  		v.reset(OpLeq16U)
  9248  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9249  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9250  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9251  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9252  		v1.AddArg(v2)
  9253  		v1.AddArg(x)
  9254  		v0.AddArg(v1)
  9255  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9256  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9257  		v0.AddArg(v3)
  9258  		v.AddArg(v0)
  9259  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9260  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9261  		v.AddArg(v4)
  9262  		return true
  9263  	}
  9264  	// match: (Eq16 x (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) (Const16 [c])))
  9265  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9266  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9267  	for {
  9268  		_ = v.Args[1]
  9269  		x := v.Args[0]
  9270  		v_1 := v.Args[1]
  9271  		if v_1.Op != OpMul16 {
  9272  			break
  9273  		}
  9274  		_ = v_1.Args[1]
  9275  		v_1_0 := v_1.Args[0]
  9276  		if v_1_0.Op != OpTrunc64to16 {
  9277  			break
  9278  		}
  9279  		v_1_0_0 := v_1_0.Args[0]
  9280  		if v_1_0_0.Op != OpRsh64Ux64 {
  9281  			break
  9282  		}
  9283  		_ = v_1_0_0.Args[1]
  9284  		mul := v_1_0_0.Args[0]
  9285  		if mul.Op != OpMul64 {
  9286  			break
  9287  		}
  9288  		_ = mul.Args[1]
  9289  		mul_0 := mul.Args[0]
  9290  		if mul_0.Op != OpConst64 {
  9291  			break
  9292  		}
  9293  		m := mul_0.AuxInt
  9294  		mul_1 := mul.Args[1]
  9295  		if mul_1.Op != OpZeroExt16to64 {
  9296  			break
  9297  		}
  9298  		if x != mul_1.Args[0] {
  9299  			break
  9300  		}
  9301  		v_1_0_0_1 := v_1_0_0.Args[1]
  9302  		if v_1_0_0_1.Op != OpConst64 {
  9303  			break
  9304  		}
  9305  		s := v_1_0_0_1.AuxInt
  9306  		v_1_1 := v_1.Args[1]
  9307  		if v_1_1.Op != OpConst16 {
  9308  			break
  9309  		}
  9310  		c := v_1_1.AuxInt
  9311  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9312  			break
  9313  		}
  9314  		v.reset(OpLeq16U)
  9315  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9316  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9317  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9318  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9319  		v1.AddArg(v2)
  9320  		v1.AddArg(x)
  9321  		v0.AddArg(v1)
  9322  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9323  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9324  		v0.AddArg(v3)
  9325  		v.AddArg(v0)
  9326  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9327  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9328  		v.AddArg(v4)
  9329  		return true
  9330  	}
  9331  	// match: (Eq16 x (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s]))) (Const16 [c])))
  9332  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9333  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9334  	for {
  9335  		_ = v.Args[1]
  9336  		x := v.Args[0]
  9337  		v_1 := v.Args[1]
  9338  		if v_1.Op != OpMul16 {
  9339  			break
  9340  		}
  9341  		_ = v_1.Args[1]
  9342  		v_1_0 := v_1.Args[0]
  9343  		if v_1_0.Op != OpTrunc64to16 {
  9344  			break
  9345  		}
  9346  		v_1_0_0 := v_1_0.Args[0]
  9347  		if v_1_0_0.Op != OpRsh64Ux64 {
  9348  			break
  9349  		}
  9350  		_ = v_1_0_0.Args[1]
  9351  		mul := v_1_0_0.Args[0]
  9352  		if mul.Op != OpMul64 {
  9353  			break
  9354  		}
  9355  		_ = mul.Args[1]
  9356  		mul_0 := mul.Args[0]
  9357  		if mul_0.Op != OpZeroExt16to64 {
  9358  			break
  9359  		}
  9360  		if x != mul_0.Args[0] {
  9361  			break
  9362  		}
  9363  		mul_1 := mul.Args[1]
  9364  		if mul_1.Op != OpConst64 {
  9365  			break
  9366  		}
  9367  		m := mul_1.AuxInt
  9368  		v_1_0_0_1 := v_1_0_0.Args[1]
  9369  		if v_1_0_0_1.Op != OpConst64 {
  9370  			break
  9371  		}
  9372  		s := v_1_0_0_1.AuxInt
  9373  		v_1_1 := v_1.Args[1]
  9374  		if v_1_1.Op != OpConst16 {
  9375  			break
  9376  		}
  9377  		c := v_1_1.AuxInt
  9378  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9379  			break
  9380  		}
  9381  		v.reset(OpLeq16U)
  9382  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9383  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9384  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9385  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9386  		v1.AddArg(v2)
  9387  		v1.AddArg(x)
  9388  		v0.AddArg(v1)
  9389  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9390  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9391  		v0.AddArg(v3)
  9392  		v.AddArg(v0)
  9393  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9394  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9395  		v.AddArg(v4)
  9396  		return true
  9397  	}
  9398  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s])))) x)
  9399  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9400  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9401  	for {
  9402  		x := v.Args[1]
  9403  		v_0 := v.Args[0]
  9404  		if v_0.Op != OpMul16 {
  9405  			break
  9406  		}
  9407  		_ = v_0.Args[1]
  9408  		v_0_0 := v_0.Args[0]
  9409  		if v_0_0.Op != OpConst16 {
  9410  			break
  9411  		}
  9412  		c := v_0_0.AuxInt
  9413  		v_0_1 := v_0.Args[1]
  9414  		if v_0_1.Op != OpTrunc64to16 {
  9415  			break
  9416  		}
  9417  		v_0_1_0 := v_0_1.Args[0]
  9418  		if v_0_1_0.Op != OpRsh64Ux64 {
  9419  			break
  9420  		}
  9421  		_ = v_0_1_0.Args[1]
  9422  		mul := v_0_1_0.Args[0]
  9423  		if mul.Op != OpMul64 {
  9424  			break
  9425  		}
  9426  		_ = mul.Args[1]
  9427  		mul_0 := mul.Args[0]
  9428  		if mul_0.Op != OpConst64 {
  9429  			break
  9430  		}
  9431  		m := mul_0.AuxInt
  9432  		mul_1 := mul.Args[1]
  9433  		if mul_1.Op != OpZeroExt16to64 {
  9434  			break
  9435  		}
  9436  		if x != mul_1.Args[0] {
  9437  			break
  9438  		}
  9439  		v_0_1_0_1 := v_0_1_0.Args[1]
  9440  		if v_0_1_0_1.Op != OpConst64 {
  9441  			break
  9442  		}
  9443  		s := v_0_1_0_1.AuxInt
  9444  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9445  			break
  9446  		}
  9447  		v.reset(OpLeq16U)
  9448  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9449  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9450  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9451  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9452  		v1.AddArg(v2)
  9453  		v1.AddArg(x)
  9454  		v0.AddArg(v1)
  9455  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9456  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9457  		v0.AddArg(v3)
  9458  		v.AddArg(v0)
  9459  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9460  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9461  		v.AddArg(v4)
  9462  		return true
  9463  	}
  9464  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s])))) x)
  9465  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9466  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9467  	for {
  9468  		x := v.Args[1]
  9469  		v_0 := v.Args[0]
  9470  		if v_0.Op != OpMul16 {
  9471  			break
  9472  		}
  9473  		_ = v_0.Args[1]
  9474  		v_0_0 := v_0.Args[0]
  9475  		if v_0_0.Op != OpConst16 {
  9476  			break
  9477  		}
  9478  		c := v_0_0.AuxInt
  9479  		v_0_1 := v_0.Args[1]
  9480  		if v_0_1.Op != OpTrunc64to16 {
  9481  			break
  9482  		}
  9483  		v_0_1_0 := v_0_1.Args[0]
  9484  		if v_0_1_0.Op != OpRsh64Ux64 {
  9485  			break
  9486  		}
  9487  		_ = v_0_1_0.Args[1]
  9488  		mul := v_0_1_0.Args[0]
  9489  		if mul.Op != OpMul64 {
  9490  			break
  9491  		}
  9492  		_ = mul.Args[1]
  9493  		mul_0 := mul.Args[0]
  9494  		if mul_0.Op != OpZeroExt16to64 {
  9495  			break
  9496  		}
  9497  		if x != mul_0.Args[0] {
  9498  			break
  9499  		}
  9500  		mul_1 := mul.Args[1]
  9501  		if mul_1.Op != OpConst64 {
  9502  			break
  9503  		}
  9504  		m := mul_1.AuxInt
  9505  		v_0_1_0_1 := v_0_1_0.Args[1]
  9506  		if v_0_1_0_1.Op != OpConst64 {
  9507  			break
  9508  		}
  9509  		s := v_0_1_0_1.AuxInt
  9510  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9511  			break
  9512  		}
  9513  		v.reset(OpLeq16U)
  9514  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9515  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9516  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9517  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9518  		v1.AddArg(v2)
  9519  		v1.AddArg(x)
  9520  		v0.AddArg(v1)
  9521  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9522  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9523  		v0.AddArg(v3)
  9524  		v.AddArg(v0)
  9525  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9526  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9527  		v.AddArg(v4)
  9528  		return true
  9529  	}
  9530  	// match: (Eq16 (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (Const64 [m]) (ZeroExt16to64 x)) (Const64 [s]))) (Const16 [c])) x)
  9531  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9532  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9533  	for {
  9534  		x := v.Args[1]
  9535  		v_0 := v.Args[0]
  9536  		if v_0.Op != OpMul16 {
  9537  			break
  9538  		}
  9539  		_ = v_0.Args[1]
  9540  		v_0_0 := v_0.Args[0]
  9541  		if v_0_0.Op != OpTrunc64to16 {
  9542  			break
  9543  		}
  9544  		v_0_0_0 := v_0_0.Args[0]
  9545  		if v_0_0_0.Op != OpRsh64Ux64 {
  9546  			break
  9547  		}
  9548  		_ = v_0_0_0.Args[1]
  9549  		mul := v_0_0_0.Args[0]
  9550  		if mul.Op != OpMul64 {
  9551  			break
  9552  		}
  9553  		_ = mul.Args[1]
  9554  		mul_0 := mul.Args[0]
  9555  		if mul_0.Op != OpConst64 {
  9556  			break
  9557  		}
  9558  		m := mul_0.AuxInt
  9559  		mul_1 := mul.Args[1]
  9560  		if mul_1.Op != OpZeroExt16to64 {
  9561  			break
  9562  		}
  9563  		if x != mul_1.Args[0] {
  9564  			break
  9565  		}
  9566  		v_0_0_0_1 := v_0_0_0.Args[1]
  9567  		if v_0_0_0_1.Op != OpConst64 {
  9568  			break
  9569  		}
  9570  		s := v_0_0_0_1.AuxInt
  9571  		v_0_1 := v_0.Args[1]
  9572  		if v_0_1.Op != OpConst16 {
  9573  			break
  9574  		}
  9575  		c := v_0_1.AuxInt
  9576  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9577  			break
  9578  		}
  9579  		v.reset(OpLeq16U)
  9580  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9581  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9582  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9583  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9584  		v1.AddArg(v2)
  9585  		v1.AddArg(x)
  9586  		v0.AddArg(v1)
  9587  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9588  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9589  		v0.AddArg(v3)
  9590  		v.AddArg(v0)
  9591  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9592  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9593  		v.AddArg(v4)
  9594  		return true
  9595  	}
  9596  	// match: (Eq16 (Mul16 (Trunc64to16 (Rsh64Ux64 mul:(Mul64 (ZeroExt16to64 x) (Const64 [m])) (Const64 [s]))) (Const16 [c])) x)
  9597  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16,c).m) && s == 16+umagic(16,c).s && x.Op != OpConst16 && udivisibleOK(16,c)
  9598  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9599  	for {
  9600  		x := v.Args[1]
  9601  		v_0 := v.Args[0]
  9602  		if v_0.Op != OpMul16 {
  9603  			break
  9604  		}
  9605  		_ = v_0.Args[1]
  9606  		v_0_0 := v_0.Args[0]
  9607  		if v_0_0.Op != OpTrunc64to16 {
  9608  			break
  9609  		}
  9610  		v_0_0_0 := v_0_0.Args[0]
  9611  		if v_0_0_0.Op != OpRsh64Ux64 {
  9612  			break
  9613  		}
  9614  		_ = v_0_0_0.Args[1]
  9615  		mul := v_0_0_0.Args[0]
  9616  		if mul.Op != OpMul64 {
  9617  			break
  9618  		}
  9619  		_ = mul.Args[1]
  9620  		mul_0 := mul.Args[0]
  9621  		if mul_0.Op != OpZeroExt16to64 {
  9622  			break
  9623  		}
  9624  		if x != mul_0.Args[0] {
  9625  			break
  9626  		}
  9627  		mul_1 := mul.Args[1]
  9628  		if mul_1.Op != OpConst64 {
  9629  			break
  9630  		}
  9631  		m := mul_1.AuxInt
  9632  		v_0_0_0_1 := v_0_0_0.Args[1]
  9633  		if v_0_0_0_1.Op != OpConst64 {
  9634  			break
  9635  		}
  9636  		s := v_0_0_0_1.AuxInt
  9637  		v_0_1 := v_0.Args[1]
  9638  		if v_0_1.Op != OpConst16 {
  9639  			break
  9640  		}
  9641  		c := v_0_1.AuxInt
  9642  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<16+umagic(16, c).m) && s == 16+umagic(16, c).s && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9643  			break
  9644  		}
  9645  		v.reset(OpLeq16U)
  9646  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9647  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9648  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9649  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9650  		v1.AddArg(v2)
  9651  		v1.AddArg(x)
  9652  		v0.AddArg(v1)
  9653  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9654  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9655  		v0.AddArg(v3)
  9656  		v.AddArg(v0)
  9657  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9658  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9659  		v.AddArg(v4)
  9660  		return true
  9661  	}
  9662  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s])))))
  9663  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
  9664  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9665  	for {
  9666  		_ = v.Args[1]
  9667  		x := v.Args[0]
  9668  		v_1 := v.Args[1]
  9669  		if v_1.Op != OpMul16 {
  9670  			break
  9671  		}
  9672  		_ = v_1.Args[1]
  9673  		v_1_0 := v_1.Args[0]
  9674  		if v_1_0.Op != OpConst16 {
  9675  			break
  9676  		}
  9677  		c := v_1_0.AuxInt
  9678  		v_1_1 := v_1.Args[1]
  9679  		if v_1_1.Op != OpTrunc32to16 {
  9680  			break
  9681  		}
  9682  		v_1_1_0 := v_1_1.Args[0]
  9683  		if v_1_1_0.Op != OpRsh32Ux64 {
  9684  			break
  9685  		}
  9686  		_ = v_1_1_0.Args[1]
  9687  		mul := v_1_1_0.Args[0]
  9688  		if mul.Op != OpMul32 {
  9689  			break
  9690  		}
  9691  		_ = mul.Args[1]
  9692  		mul_0 := mul.Args[0]
  9693  		if mul_0.Op != OpConst32 {
  9694  			break
  9695  		}
  9696  		m := mul_0.AuxInt
  9697  		mul_1 := mul.Args[1]
  9698  		if mul_1.Op != OpZeroExt16to32 {
  9699  			break
  9700  		}
  9701  		if x != mul_1.Args[0] {
  9702  			break
  9703  		}
  9704  		v_1_1_0_1 := v_1_1_0.Args[1]
  9705  		if v_1_1_0_1.Op != OpConst64 {
  9706  			break
  9707  		}
  9708  		s := v_1_1_0_1.AuxInt
  9709  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9710  			break
  9711  		}
  9712  		v.reset(OpLeq16U)
  9713  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9714  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9715  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9716  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9717  		v1.AddArg(v2)
  9718  		v1.AddArg(x)
  9719  		v0.AddArg(v1)
  9720  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9721  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9722  		v0.AddArg(v3)
  9723  		v.AddArg(v0)
  9724  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9725  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9726  		v.AddArg(v4)
  9727  		return true
  9728  	}
  9729  	return false
  9730  }
  9731  func rewriteValuegeneric_OpEq16_20(v *Value) bool {
  9732  	b := v.Block
  9733  	typ := &b.Func.Config.Types
  9734  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))))
  9735  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
  9736  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9737  	for {
  9738  		_ = v.Args[1]
  9739  		x := v.Args[0]
  9740  		v_1 := v.Args[1]
  9741  		if v_1.Op != OpMul16 {
  9742  			break
  9743  		}
  9744  		_ = v_1.Args[1]
  9745  		v_1_0 := v_1.Args[0]
  9746  		if v_1_0.Op != OpConst16 {
  9747  			break
  9748  		}
  9749  		c := v_1_0.AuxInt
  9750  		v_1_1 := v_1.Args[1]
  9751  		if v_1_1.Op != OpTrunc32to16 {
  9752  			break
  9753  		}
  9754  		v_1_1_0 := v_1_1.Args[0]
  9755  		if v_1_1_0.Op != OpRsh32Ux64 {
  9756  			break
  9757  		}
  9758  		_ = v_1_1_0.Args[1]
  9759  		mul := v_1_1_0.Args[0]
  9760  		if mul.Op != OpMul32 {
  9761  			break
  9762  		}
  9763  		_ = mul.Args[1]
  9764  		mul_0 := mul.Args[0]
  9765  		if mul_0.Op != OpZeroExt16to32 {
  9766  			break
  9767  		}
  9768  		if x != mul_0.Args[0] {
  9769  			break
  9770  		}
  9771  		mul_1 := mul.Args[1]
  9772  		if mul_1.Op != OpConst32 {
  9773  			break
  9774  		}
  9775  		m := mul_1.AuxInt
  9776  		v_1_1_0_1 := v_1_1_0.Args[1]
  9777  		if v_1_1_0_1.Op != OpConst64 {
  9778  			break
  9779  		}
  9780  		s := v_1_1_0_1.AuxInt
  9781  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9782  			break
  9783  		}
  9784  		v.reset(OpLeq16U)
  9785  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9786  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9787  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9788  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9789  		v1.AddArg(v2)
  9790  		v1.AddArg(x)
  9791  		v0.AddArg(v1)
  9792  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9793  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9794  		v0.AddArg(v3)
  9795  		v.AddArg(v0)
  9796  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9797  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9798  		v.AddArg(v4)
  9799  		return true
  9800  	}
  9801  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) (Const16 [c])))
  9802  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
  9803  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9804  	for {
  9805  		_ = v.Args[1]
  9806  		x := v.Args[0]
  9807  		v_1 := v.Args[1]
  9808  		if v_1.Op != OpMul16 {
  9809  			break
  9810  		}
  9811  		_ = v_1.Args[1]
  9812  		v_1_0 := v_1.Args[0]
  9813  		if v_1_0.Op != OpTrunc32to16 {
  9814  			break
  9815  		}
  9816  		v_1_0_0 := v_1_0.Args[0]
  9817  		if v_1_0_0.Op != OpRsh32Ux64 {
  9818  			break
  9819  		}
  9820  		_ = v_1_0_0.Args[1]
  9821  		mul := v_1_0_0.Args[0]
  9822  		if mul.Op != OpMul32 {
  9823  			break
  9824  		}
  9825  		_ = mul.Args[1]
  9826  		mul_0 := mul.Args[0]
  9827  		if mul_0.Op != OpConst32 {
  9828  			break
  9829  		}
  9830  		m := mul_0.AuxInt
  9831  		mul_1 := mul.Args[1]
  9832  		if mul_1.Op != OpZeroExt16to32 {
  9833  			break
  9834  		}
  9835  		if x != mul_1.Args[0] {
  9836  			break
  9837  		}
  9838  		v_1_0_0_1 := v_1_0_0.Args[1]
  9839  		if v_1_0_0_1.Op != OpConst64 {
  9840  			break
  9841  		}
  9842  		s := v_1_0_0_1.AuxInt
  9843  		v_1_1 := v_1.Args[1]
  9844  		if v_1_1.Op != OpConst16 {
  9845  			break
  9846  		}
  9847  		c := v_1_1.AuxInt
  9848  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9849  			break
  9850  		}
  9851  		v.reset(OpLeq16U)
  9852  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9853  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9854  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9855  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9856  		v1.AddArg(v2)
  9857  		v1.AddArg(x)
  9858  		v0.AddArg(v1)
  9859  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9860  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9861  		v0.AddArg(v3)
  9862  		v.AddArg(v0)
  9863  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9864  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9865  		v.AddArg(v4)
  9866  		return true
  9867  	}
  9868  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
  9869  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
  9870  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9871  	for {
  9872  		_ = v.Args[1]
  9873  		x := v.Args[0]
  9874  		v_1 := v.Args[1]
  9875  		if v_1.Op != OpMul16 {
  9876  			break
  9877  		}
  9878  		_ = v_1.Args[1]
  9879  		v_1_0 := v_1.Args[0]
  9880  		if v_1_0.Op != OpTrunc32to16 {
  9881  			break
  9882  		}
  9883  		v_1_0_0 := v_1_0.Args[0]
  9884  		if v_1_0_0.Op != OpRsh32Ux64 {
  9885  			break
  9886  		}
  9887  		_ = v_1_0_0.Args[1]
  9888  		mul := v_1_0_0.Args[0]
  9889  		if mul.Op != OpMul32 {
  9890  			break
  9891  		}
  9892  		_ = mul.Args[1]
  9893  		mul_0 := mul.Args[0]
  9894  		if mul_0.Op != OpZeroExt16to32 {
  9895  			break
  9896  		}
  9897  		if x != mul_0.Args[0] {
  9898  			break
  9899  		}
  9900  		mul_1 := mul.Args[1]
  9901  		if mul_1.Op != OpConst32 {
  9902  			break
  9903  		}
  9904  		m := mul_1.AuxInt
  9905  		v_1_0_0_1 := v_1_0_0.Args[1]
  9906  		if v_1_0_0_1.Op != OpConst64 {
  9907  			break
  9908  		}
  9909  		s := v_1_0_0_1.AuxInt
  9910  		v_1_1 := v_1.Args[1]
  9911  		if v_1_1.Op != OpConst16 {
  9912  			break
  9913  		}
  9914  		c := v_1_1.AuxInt
  9915  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9916  			break
  9917  		}
  9918  		v.reset(OpLeq16U)
  9919  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9920  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9921  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9922  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9923  		v1.AddArg(v2)
  9924  		v1.AddArg(x)
  9925  		v0.AddArg(v1)
  9926  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9927  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9928  		v0.AddArg(v3)
  9929  		v.AddArg(v0)
  9930  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9931  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9932  		v.AddArg(v4)
  9933  		return true
  9934  	}
  9935  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s])))) x)
  9936  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
  9937  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
  9938  	for {
  9939  		x := v.Args[1]
  9940  		v_0 := v.Args[0]
  9941  		if v_0.Op != OpMul16 {
  9942  			break
  9943  		}
  9944  		_ = v_0.Args[1]
  9945  		v_0_0 := v_0.Args[0]
  9946  		if v_0_0.Op != OpConst16 {
  9947  			break
  9948  		}
  9949  		c := v_0_0.AuxInt
  9950  		v_0_1 := v_0.Args[1]
  9951  		if v_0_1.Op != OpTrunc32to16 {
  9952  			break
  9953  		}
  9954  		v_0_1_0 := v_0_1.Args[0]
  9955  		if v_0_1_0.Op != OpRsh32Ux64 {
  9956  			break
  9957  		}
  9958  		_ = v_0_1_0.Args[1]
  9959  		mul := v_0_1_0.Args[0]
  9960  		if mul.Op != OpMul32 {
  9961  			break
  9962  		}
  9963  		_ = mul.Args[1]
  9964  		mul_0 := mul.Args[0]
  9965  		if mul_0.Op != OpConst32 {
  9966  			break
  9967  		}
  9968  		m := mul_0.AuxInt
  9969  		mul_1 := mul.Args[1]
  9970  		if mul_1.Op != OpZeroExt16to32 {
  9971  			break
  9972  		}
  9973  		if x != mul_1.Args[0] {
  9974  			break
  9975  		}
  9976  		v_0_1_0_1 := v_0_1_0.Args[1]
  9977  		if v_0_1_0_1.Op != OpConst64 {
  9978  			break
  9979  		}
  9980  		s := v_0_1_0_1.AuxInt
  9981  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
  9982  			break
  9983  		}
  9984  		v.reset(OpLeq16U)
  9985  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
  9986  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
  9987  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9988  		v2.AuxInt = int64(int16(udivisible(16, c).m))
  9989  		v1.AddArg(v2)
  9990  		v1.AddArg(x)
  9991  		v0.AddArg(v1)
  9992  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9993  		v3.AuxInt = int64(16 - udivisible(16, c).k)
  9994  		v0.AddArg(v3)
  9995  		v.AddArg(v0)
  9996  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
  9997  		v4.AuxInt = int64(int16(udivisible(16, c).max))
  9998  		v.AddArg(v4)
  9999  		return true
 10000  	}
 10001  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s])))) x)
 10002  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 10003  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10004  	for {
 10005  		x := v.Args[1]
 10006  		v_0 := v.Args[0]
 10007  		if v_0.Op != OpMul16 {
 10008  			break
 10009  		}
 10010  		_ = v_0.Args[1]
 10011  		v_0_0 := v_0.Args[0]
 10012  		if v_0_0.Op != OpConst16 {
 10013  			break
 10014  		}
 10015  		c := v_0_0.AuxInt
 10016  		v_0_1 := v_0.Args[1]
 10017  		if v_0_1.Op != OpTrunc32to16 {
 10018  			break
 10019  		}
 10020  		v_0_1_0 := v_0_1.Args[0]
 10021  		if v_0_1_0.Op != OpRsh32Ux64 {
 10022  			break
 10023  		}
 10024  		_ = v_0_1_0.Args[1]
 10025  		mul := v_0_1_0.Args[0]
 10026  		if mul.Op != OpMul32 {
 10027  			break
 10028  		}
 10029  		_ = mul.Args[1]
 10030  		mul_0 := mul.Args[0]
 10031  		if mul_0.Op != OpZeroExt16to32 {
 10032  			break
 10033  		}
 10034  		if x != mul_0.Args[0] {
 10035  			break
 10036  		}
 10037  		mul_1 := mul.Args[1]
 10038  		if mul_1.Op != OpConst32 {
 10039  			break
 10040  		}
 10041  		m := mul_1.AuxInt
 10042  		v_0_1_0_1 := v_0_1_0.Args[1]
 10043  		if v_0_1_0_1.Op != OpConst64 {
 10044  			break
 10045  		}
 10046  		s := v_0_1_0_1.AuxInt
 10047  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10048  			break
 10049  		}
 10050  		v.reset(OpLeq16U)
 10051  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10052  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10053  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10054  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10055  		v1.AddArg(v2)
 10056  		v1.AddArg(x)
 10057  		v0.AddArg(v1)
 10058  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10059  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10060  		v0.AddArg(v3)
 10061  		v.AddArg(v0)
 10062  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10063  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10064  		v.AddArg(v4)
 10065  		return true
 10066  	}
 10067  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x)) (Const64 [s]))) (Const16 [c])) x)
 10068  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 10069  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10070  	for {
 10071  		x := v.Args[1]
 10072  		v_0 := v.Args[0]
 10073  		if v_0.Op != OpMul16 {
 10074  			break
 10075  		}
 10076  		_ = v_0.Args[1]
 10077  		v_0_0 := v_0.Args[0]
 10078  		if v_0_0.Op != OpTrunc32to16 {
 10079  			break
 10080  		}
 10081  		v_0_0_0 := v_0_0.Args[0]
 10082  		if v_0_0_0.Op != OpRsh32Ux64 {
 10083  			break
 10084  		}
 10085  		_ = v_0_0_0.Args[1]
 10086  		mul := v_0_0_0.Args[0]
 10087  		if mul.Op != OpMul32 {
 10088  			break
 10089  		}
 10090  		_ = mul.Args[1]
 10091  		mul_0 := mul.Args[0]
 10092  		if mul_0.Op != OpConst32 {
 10093  			break
 10094  		}
 10095  		m := mul_0.AuxInt
 10096  		mul_1 := mul.Args[1]
 10097  		if mul_1.Op != OpZeroExt16to32 {
 10098  			break
 10099  		}
 10100  		if x != mul_1.Args[0] {
 10101  			break
 10102  		}
 10103  		v_0_0_0_1 := v_0_0_0.Args[1]
 10104  		if v_0_0_0_1.Op != OpConst64 {
 10105  			break
 10106  		}
 10107  		s := v_0_0_0_1.AuxInt
 10108  		v_0_1 := v_0.Args[1]
 10109  		if v_0_1.Op != OpConst16 {
 10110  			break
 10111  		}
 10112  		c := v_0_1.AuxInt
 10113  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10114  			break
 10115  		}
 10116  		v.reset(OpLeq16U)
 10117  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10118  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10119  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10120  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10121  		v1.AddArg(v2)
 10122  		v1.AddArg(x)
 10123  		v0.AddArg(v1)
 10124  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10125  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10126  		v0.AddArg(v3)
 10127  		v.AddArg(v0)
 10128  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10129  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10130  		v.AddArg(v4)
 10131  		return true
 10132  	}
 10133  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (ZeroExt16to32 x) (Const32 [m])) (Const64 [s]))) (Const16 [c])) x)
 10134  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16,c).m/2) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 10135  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10136  	for {
 10137  		x := v.Args[1]
 10138  		v_0 := v.Args[0]
 10139  		if v_0.Op != OpMul16 {
 10140  			break
 10141  		}
 10142  		_ = v_0.Args[1]
 10143  		v_0_0 := v_0.Args[0]
 10144  		if v_0_0.Op != OpTrunc32to16 {
 10145  			break
 10146  		}
 10147  		v_0_0_0 := v_0_0.Args[0]
 10148  		if v_0_0_0.Op != OpRsh32Ux64 {
 10149  			break
 10150  		}
 10151  		_ = v_0_0_0.Args[1]
 10152  		mul := v_0_0_0.Args[0]
 10153  		if mul.Op != OpMul32 {
 10154  			break
 10155  		}
 10156  		_ = mul.Args[1]
 10157  		mul_0 := mul.Args[0]
 10158  		if mul_0.Op != OpZeroExt16to32 {
 10159  			break
 10160  		}
 10161  		if x != mul_0.Args[0] {
 10162  			break
 10163  		}
 10164  		mul_1 := mul.Args[1]
 10165  		if mul_1.Op != OpConst32 {
 10166  			break
 10167  		}
 10168  		m := mul_1.AuxInt
 10169  		v_0_0_0_1 := v_0_0_0.Args[1]
 10170  		if v_0_0_0_1.Op != OpConst64 {
 10171  			break
 10172  		}
 10173  		s := v_0_0_0_1.AuxInt
 10174  		v_0_1 := v_0.Args[1]
 10175  		if v_0_1.Op != OpConst16 {
 10176  			break
 10177  		}
 10178  		c := v_0_1.AuxInt
 10179  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+umagic(16, c).m/2) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10180  			break
 10181  		}
 10182  		v.reset(OpLeq16U)
 10183  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10184  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10185  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10186  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10187  		v1.AddArg(v2)
 10188  		v1.AddArg(x)
 10189  		v0.AddArg(v1)
 10190  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10191  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10192  		v0.AddArg(v3)
 10193  		v.AddArg(v0)
 10194  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10195  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10196  		v.AddArg(v4)
 10197  		return true
 10198  	}
 10199  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s])))))
 10200  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10201  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10202  	for {
 10203  		_ = v.Args[1]
 10204  		x := v.Args[0]
 10205  		v_1 := v.Args[1]
 10206  		if v_1.Op != OpMul16 {
 10207  			break
 10208  		}
 10209  		_ = v_1.Args[1]
 10210  		v_1_0 := v_1.Args[0]
 10211  		if v_1_0.Op != OpConst16 {
 10212  			break
 10213  		}
 10214  		c := v_1_0.AuxInt
 10215  		v_1_1 := v_1.Args[1]
 10216  		if v_1_1.Op != OpTrunc32to16 {
 10217  			break
 10218  		}
 10219  		v_1_1_0 := v_1_1.Args[0]
 10220  		if v_1_1_0.Op != OpRsh32Ux64 {
 10221  			break
 10222  		}
 10223  		_ = v_1_1_0.Args[1]
 10224  		mul := v_1_1_0.Args[0]
 10225  		if mul.Op != OpMul32 {
 10226  			break
 10227  		}
 10228  		_ = mul.Args[1]
 10229  		mul_0 := mul.Args[0]
 10230  		if mul_0.Op != OpConst32 {
 10231  			break
 10232  		}
 10233  		m := mul_0.AuxInt
 10234  		mul_1 := mul.Args[1]
 10235  		if mul_1.Op != OpRsh32Ux64 {
 10236  			break
 10237  		}
 10238  		_ = mul_1.Args[1]
 10239  		mul_1_0 := mul_1.Args[0]
 10240  		if mul_1_0.Op != OpZeroExt16to32 {
 10241  			break
 10242  		}
 10243  		if x != mul_1_0.Args[0] {
 10244  			break
 10245  		}
 10246  		mul_1_1 := mul_1.Args[1]
 10247  		if mul_1_1.Op != OpConst64 {
 10248  			break
 10249  		}
 10250  		if mul_1_1.AuxInt != 1 {
 10251  			break
 10252  		}
 10253  		v_1_1_0_1 := v_1_1_0.Args[1]
 10254  		if v_1_1_0_1.Op != OpConst64 {
 10255  			break
 10256  		}
 10257  		s := v_1_1_0_1.AuxInt
 10258  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10259  			break
 10260  		}
 10261  		v.reset(OpLeq16U)
 10262  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10263  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10264  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10265  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10266  		v1.AddArg(v2)
 10267  		v1.AddArg(x)
 10268  		v0.AddArg(v1)
 10269  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10270  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10271  		v0.AddArg(v3)
 10272  		v.AddArg(v0)
 10273  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10274  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10275  		v.AddArg(v4)
 10276  		return true
 10277  	}
 10278  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))))
 10279  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10280  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10281  	for {
 10282  		_ = v.Args[1]
 10283  		x := v.Args[0]
 10284  		v_1 := v.Args[1]
 10285  		if v_1.Op != OpMul16 {
 10286  			break
 10287  		}
 10288  		_ = v_1.Args[1]
 10289  		v_1_0 := v_1.Args[0]
 10290  		if v_1_0.Op != OpConst16 {
 10291  			break
 10292  		}
 10293  		c := v_1_0.AuxInt
 10294  		v_1_1 := v_1.Args[1]
 10295  		if v_1_1.Op != OpTrunc32to16 {
 10296  			break
 10297  		}
 10298  		v_1_1_0 := v_1_1.Args[0]
 10299  		if v_1_1_0.Op != OpRsh32Ux64 {
 10300  			break
 10301  		}
 10302  		_ = v_1_1_0.Args[1]
 10303  		mul := v_1_1_0.Args[0]
 10304  		if mul.Op != OpMul32 {
 10305  			break
 10306  		}
 10307  		_ = mul.Args[1]
 10308  		mul_0 := mul.Args[0]
 10309  		if mul_0.Op != OpRsh32Ux64 {
 10310  			break
 10311  		}
 10312  		_ = mul_0.Args[1]
 10313  		mul_0_0 := mul_0.Args[0]
 10314  		if mul_0_0.Op != OpZeroExt16to32 {
 10315  			break
 10316  		}
 10317  		if x != mul_0_0.Args[0] {
 10318  			break
 10319  		}
 10320  		mul_0_1 := mul_0.Args[1]
 10321  		if mul_0_1.Op != OpConst64 {
 10322  			break
 10323  		}
 10324  		if mul_0_1.AuxInt != 1 {
 10325  			break
 10326  		}
 10327  		mul_1 := mul.Args[1]
 10328  		if mul_1.Op != OpConst32 {
 10329  			break
 10330  		}
 10331  		m := mul_1.AuxInt
 10332  		v_1_1_0_1 := v_1_1_0.Args[1]
 10333  		if v_1_1_0_1.Op != OpConst64 {
 10334  			break
 10335  		}
 10336  		s := v_1_1_0_1.AuxInt
 10337  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10338  			break
 10339  		}
 10340  		v.reset(OpLeq16U)
 10341  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10342  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10343  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10344  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10345  		v1.AddArg(v2)
 10346  		v1.AddArg(x)
 10347  		v0.AddArg(v1)
 10348  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10349  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10350  		v0.AddArg(v3)
 10351  		v.AddArg(v0)
 10352  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10353  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10354  		v.AddArg(v4)
 10355  		return true
 10356  	}
 10357  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) (Const16 [c])))
 10358  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10359  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10360  	for {
 10361  		_ = v.Args[1]
 10362  		x := v.Args[0]
 10363  		v_1 := v.Args[1]
 10364  		if v_1.Op != OpMul16 {
 10365  			break
 10366  		}
 10367  		_ = v_1.Args[1]
 10368  		v_1_0 := v_1.Args[0]
 10369  		if v_1_0.Op != OpTrunc32to16 {
 10370  			break
 10371  		}
 10372  		v_1_0_0 := v_1_0.Args[0]
 10373  		if v_1_0_0.Op != OpRsh32Ux64 {
 10374  			break
 10375  		}
 10376  		_ = v_1_0_0.Args[1]
 10377  		mul := v_1_0_0.Args[0]
 10378  		if mul.Op != OpMul32 {
 10379  			break
 10380  		}
 10381  		_ = mul.Args[1]
 10382  		mul_0 := mul.Args[0]
 10383  		if mul_0.Op != OpConst32 {
 10384  			break
 10385  		}
 10386  		m := mul_0.AuxInt
 10387  		mul_1 := mul.Args[1]
 10388  		if mul_1.Op != OpRsh32Ux64 {
 10389  			break
 10390  		}
 10391  		_ = mul_1.Args[1]
 10392  		mul_1_0 := mul_1.Args[0]
 10393  		if mul_1_0.Op != OpZeroExt16to32 {
 10394  			break
 10395  		}
 10396  		if x != mul_1_0.Args[0] {
 10397  			break
 10398  		}
 10399  		mul_1_1 := mul_1.Args[1]
 10400  		if mul_1_1.Op != OpConst64 {
 10401  			break
 10402  		}
 10403  		if mul_1_1.AuxInt != 1 {
 10404  			break
 10405  		}
 10406  		v_1_0_0_1 := v_1_0_0.Args[1]
 10407  		if v_1_0_0_1.Op != OpConst64 {
 10408  			break
 10409  		}
 10410  		s := v_1_0_0_1.AuxInt
 10411  		v_1_1 := v_1.Args[1]
 10412  		if v_1_1.Op != OpConst16 {
 10413  			break
 10414  		}
 10415  		c := v_1_1.AuxInt
 10416  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10417  			break
 10418  		}
 10419  		v.reset(OpLeq16U)
 10420  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10421  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10422  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10423  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10424  		v1.AddArg(v2)
 10425  		v1.AddArg(x)
 10426  		v0.AddArg(v1)
 10427  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10428  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10429  		v0.AddArg(v3)
 10430  		v.AddArg(v0)
 10431  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10432  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10433  		v.AddArg(v4)
 10434  		return true
 10435  	}
 10436  	return false
 10437  }
 10438  func rewriteValuegeneric_OpEq16_30(v *Value) bool {
 10439  	b := v.Block
 10440  	typ := &b.Func.Config.Types
 10441  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])))
 10442  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10443  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10444  	for {
 10445  		_ = v.Args[1]
 10446  		x := v.Args[0]
 10447  		v_1 := v.Args[1]
 10448  		if v_1.Op != OpMul16 {
 10449  			break
 10450  		}
 10451  		_ = v_1.Args[1]
 10452  		v_1_0 := v_1.Args[0]
 10453  		if v_1_0.Op != OpTrunc32to16 {
 10454  			break
 10455  		}
 10456  		v_1_0_0 := v_1_0.Args[0]
 10457  		if v_1_0_0.Op != OpRsh32Ux64 {
 10458  			break
 10459  		}
 10460  		_ = v_1_0_0.Args[1]
 10461  		mul := v_1_0_0.Args[0]
 10462  		if mul.Op != OpMul32 {
 10463  			break
 10464  		}
 10465  		_ = mul.Args[1]
 10466  		mul_0 := mul.Args[0]
 10467  		if mul_0.Op != OpRsh32Ux64 {
 10468  			break
 10469  		}
 10470  		_ = mul_0.Args[1]
 10471  		mul_0_0 := mul_0.Args[0]
 10472  		if mul_0_0.Op != OpZeroExt16to32 {
 10473  			break
 10474  		}
 10475  		if x != mul_0_0.Args[0] {
 10476  			break
 10477  		}
 10478  		mul_0_1 := mul_0.Args[1]
 10479  		if mul_0_1.Op != OpConst64 {
 10480  			break
 10481  		}
 10482  		if mul_0_1.AuxInt != 1 {
 10483  			break
 10484  		}
 10485  		mul_1 := mul.Args[1]
 10486  		if mul_1.Op != OpConst32 {
 10487  			break
 10488  		}
 10489  		m := mul_1.AuxInt
 10490  		v_1_0_0_1 := v_1_0_0.Args[1]
 10491  		if v_1_0_0_1.Op != OpConst64 {
 10492  			break
 10493  		}
 10494  		s := v_1_0_0_1.AuxInt
 10495  		v_1_1 := v_1.Args[1]
 10496  		if v_1_1.Op != OpConst16 {
 10497  			break
 10498  		}
 10499  		c := v_1_1.AuxInt
 10500  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10501  			break
 10502  		}
 10503  		v.reset(OpLeq16U)
 10504  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10505  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10506  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10507  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10508  		v1.AddArg(v2)
 10509  		v1.AddArg(x)
 10510  		v0.AddArg(v1)
 10511  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10512  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10513  		v0.AddArg(v3)
 10514  		v.AddArg(v0)
 10515  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10516  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10517  		v.AddArg(v4)
 10518  		return true
 10519  	}
 10520  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s])))) x)
 10521  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10522  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10523  	for {
 10524  		x := v.Args[1]
 10525  		v_0 := v.Args[0]
 10526  		if v_0.Op != OpMul16 {
 10527  			break
 10528  		}
 10529  		_ = v_0.Args[1]
 10530  		v_0_0 := v_0.Args[0]
 10531  		if v_0_0.Op != OpConst16 {
 10532  			break
 10533  		}
 10534  		c := v_0_0.AuxInt
 10535  		v_0_1 := v_0.Args[1]
 10536  		if v_0_1.Op != OpTrunc32to16 {
 10537  			break
 10538  		}
 10539  		v_0_1_0 := v_0_1.Args[0]
 10540  		if v_0_1_0.Op != OpRsh32Ux64 {
 10541  			break
 10542  		}
 10543  		_ = v_0_1_0.Args[1]
 10544  		mul := v_0_1_0.Args[0]
 10545  		if mul.Op != OpMul32 {
 10546  			break
 10547  		}
 10548  		_ = mul.Args[1]
 10549  		mul_0 := mul.Args[0]
 10550  		if mul_0.Op != OpConst32 {
 10551  			break
 10552  		}
 10553  		m := mul_0.AuxInt
 10554  		mul_1 := mul.Args[1]
 10555  		if mul_1.Op != OpRsh32Ux64 {
 10556  			break
 10557  		}
 10558  		_ = mul_1.Args[1]
 10559  		mul_1_0 := mul_1.Args[0]
 10560  		if mul_1_0.Op != OpZeroExt16to32 {
 10561  			break
 10562  		}
 10563  		if x != mul_1_0.Args[0] {
 10564  			break
 10565  		}
 10566  		mul_1_1 := mul_1.Args[1]
 10567  		if mul_1_1.Op != OpConst64 {
 10568  			break
 10569  		}
 10570  		if mul_1_1.AuxInt != 1 {
 10571  			break
 10572  		}
 10573  		v_0_1_0_1 := v_0_1_0.Args[1]
 10574  		if v_0_1_0_1.Op != OpConst64 {
 10575  			break
 10576  		}
 10577  		s := v_0_1_0_1.AuxInt
 10578  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10579  			break
 10580  		}
 10581  		v.reset(OpLeq16U)
 10582  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10583  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10584  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10585  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10586  		v1.AddArg(v2)
 10587  		v1.AddArg(x)
 10588  		v0.AddArg(v1)
 10589  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10590  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10591  		v0.AddArg(v3)
 10592  		v.AddArg(v0)
 10593  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10594  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10595  		v.AddArg(v4)
 10596  		return true
 10597  	}
 10598  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s])))) x)
 10599  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10600  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10601  	for {
 10602  		x := v.Args[1]
 10603  		v_0 := v.Args[0]
 10604  		if v_0.Op != OpMul16 {
 10605  			break
 10606  		}
 10607  		_ = v_0.Args[1]
 10608  		v_0_0 := v_0.Args[0]
 10609  		if v_0_0.Op != OpConst16 {
 10610  			break
 10611  		}
 10612  		c := v_0_0.AuxInt
 10613  		v_0_1 := v_0.Args[1]
 10614  		if v_0_1.Op != OpTrunc32to16 {
 10615  			break
 10616  		}
 10617  		v_0_1_0 := v_0_1.Args[0]
 10618  		if v_0_1_0.Op != OpRsh32Ux64 {
 10619  			break
 10620  		}
 10621  		_ = v_0_1_0.Args[1]
 10622  		mul := v_0_1_0.Args[0]
 10623  		if mul.Op != OpMul32 {
 10624  			break
 10625  		}
 10626  		_ = mul.Args[1]
 10627  		mul_0 := mul.Args[0]
 10628  		if mul_0.Op != OpRsh32Ux64 {
 10629  			break
 10630  		}
 10631  		_ = mul_0.Args[1]
 10632  		mul_0_0 := mul_0.Args[0]
 10633  		if mul_0_0.Op != OpZeroExt16to32 {
 10634  			break
 10635  		}
 10636  		if x != mul_0_0.Args[0] {
 10637  			break
 10638  		}
 10639  		mul_0_1 := mul_0.Args[1]
 10640  		if mul_0_1.Op != OpConst64 {
 10641  			break
 10642  		}
 10643  		if mul_0_1.AuxInt != 1 {
 10644  			break
 10645  		}
 10646  		mul_1 := mul.Args[1]
 10647  		if mul_1.Op != OpConst32 {
 10648  			break
 10649  		}
 10650  		m := mul_1.AuxInt
 10651  		v_0_1_0_1 := v_0_1_0.Args[1]
 10652  		if v_0_1_0_1.Op != OpConst64 {
 10653  			break
 10654  		}
 10655  		s := v_0_1_0_1.AuxInt
 10656  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10657  			break
 10658  		}
 10659  		v.reset(OpLeq16U)
 10660  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10661  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10662  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10663  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10664  		v1.AddArg(v2)
 10665  		v1.AddArg(x)
 10666  		v0.AddArg(v1)
 10667  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10668  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10669  		v0.AddArg(v3)
 10670  		v.AddArg(v0)
 10671  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10672  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10673  		v.AddArg(v4)
 10674  		return true
 10675  	}
 10676  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Const32 [m]) (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1]))) (Const64 [s]))) (Const16 [c])) x)
 10677  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10678  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10679  	for {
 10680  		x := v.Args[1]
 10681  		v_0 := v.Args[0]
 10682  		if v_0.Op != OpMul16 {
 10683  			break
 10684  		}
 10685  		_ = v_0.Args[1]
 10686  		v_0_0 := v_0.Args[0]
 10687  		if v_0_0.Op != OpTrunc32to16 {
 10688  			break
 10689  		}
 10690  		v_0_0_0 := v_0_0.Args[0]
 10691  		if v_0_0_0.Op != OpRsh32Ux64 {
 10692  			break
 10693  		}
 10694  		_ = v_0_0_0.Args[1]
 10695  		mul := v_0_0_0.Args[0]
 10696  		if mul.Op != OpMul32 {
 10697  			break
 10698  		}
 10699  		_ = mul.Args[1]
 10700  		mul_0 := mul.Args[0]
 10701  		if mul_0.Op != OpConst32 {
 10702  			break
 10703  		}
 10704  		m := mul_0.AuxInt
 10705  		mul_1 := mul.Args[1]
 10706  		if mul_1.Op != OpRsh32Ux64 {
 10707  			break
 10708  		}
 10709  		_ = mul_1.Args[1]
 10710  		mul_1_0 := mul_1.Args[0]
 10711  		if mul_1_0.Op != OpZeroExt16to32 {
 10712  			break
 10713  		}
 10714  		if x != mul_1_0.Args[0] {
 10715  			break
 10716  		}
 10717  		mul_1_1 := mul_1.Args[1]
 10718  		if mul_1_1.Op != OpConst64 {
 10719  			break
 10720  		}
 10721  		if mul_1_1.AuxInt != 1 {
 10722  			break
 10723  		}
 10724  		v_0_0_0_1 := v_0_0_0.Args[1]
 10725  		if v_0_0_0_1.Op != OpConst64 {
 10726  			break
 10727  		}
 10728  		s := v_0_0_0_1.AuxInt
 10729  		v_0_1 := v_0.Args[1]
 10730  		if v_0_1.Op != OpConst16 {
 10731  			break
 10732  		}
 10733  		c := v_0_1.AuxInt
 10734  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10735  			break
 10736  		}
 10737  		v.reset(OpLeq16U)
 10738  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10739  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10740  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10741  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10742  		v1.AddArg(v2)
 10743  		v1.AddArg(x)
 10744  		v0.AddArg(v1)
 10745  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10746  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10747  		v0.AddArg(v3)
 10748  		v.AddArg(v0)
 10749  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10750  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10751  		v.AddArg(v4)
 10752  		return true
 10753  	}
 10754  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 mul:(Mul32 (Rsh32Ux64 (ZeroExt16to32 x) (Const64 [1])) (Const32 [m])) (Const64 [s]))) (Const16 [c])) x)
 10755  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16,c).m+1)/2) && s == 16+umagic(16,c).s-2 && x.Op != OpConst16 && udivisibleOK(16,c)
 10756  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10757  	for {
 10758  		x := v.Args[1]
 10759  		v_0 := v.Args[0]
 10760  		if v_0.Op != OpMul16 {
 10761  			break
 10762  		}
 10763  		_ = v_0.Args[1]
 10764  		v_0_0 := v_0.Args[0]
 10765  		if v_0_0.Op != OpTrunc32to16 {
 10766  			break
 10767  		}
 10768  		v_0_0_0 := v_0_0.Args[0]
 10769  		if v_0_0_0.Op != OpRsh32Ux64 {
 10770  			break
 10771  		}
 10772  		_ = v_0_0_0.Args[1]
 10773  		mul := v_0_0_0.Args[0]
 10774  		if mul.Op != OpMul32 {
 10775  			break
 10776  		}
 10777  		_ = mul.Args[1]
 10778  		mul_0 := mul.Args[0]
 10779  		if mul_0.Op != OpRsh32Ux64 {
 10780  			break
 10781  		}
 10782  		_ = mul_0.Args[1]
 10783  		mul_0_0 := mul_0.Args[0]
 10784  		if mul_0_0.Op != OpZeroExt16to32 {
 10785  			break
 10786  		}
 10787  		if x != mul_0_0.Args[0] {
 10788  			break
 10789  		}
 10790  		mul_0_1 := mul_0.Args[1]
 10791  		if mul_0_1.Op != OpConst64 {
 10792  			break
 10793  		}
 10794  		if mul_0_1.AuxInt != 1 {
 10795  			break
 10796  		}
 10797  		mul_1 := mul.Args[1]
 10798  		if mul_1.Op != OpConst32 {
 10799  			break
 10800  		}
 10801  		m := mul_1.AuxInt
 10802  		v_0_0_0_1 := v_0_0_0.Args[1]
 10803  		if v_0_0_0_1.Op != OpConst64 {
 10804  			break
 10805  		}
 10806  		s := v_0_0_0_1.AuxInt
 10807  		v_0_1 := v_0.Args[1]
 10808  		if v_0_1.Op != OpConst16 {
 10809  			break
 10810  		}
 10811  		c := v_0_1.AuxInt
 10812  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(1<<15+(umagic(16, c).m+1)/2) && s == 16+umagic(16, c).s-2 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10813  			break
 10814  		}
 10815  		v.reset(OpLeq16U)
 10816  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10817  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10818  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10819  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10820  		v1.AddArg(v2)
 10821  		v1.AddArg(x)
 10822  		v0.AddArg(v1)
 10823  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10824  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10825  		v0.AddArg(v3)
 10826  		v.AddArg(v0)
 10827  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10828  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10829  		v.AddArg(v4)
 10830  		return true
 10831  	}
 10832  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s])))))
 10833  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 10834  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10835  	for {
 10836  		_ = v.Args[1]
 10837  		x := v.Args[0]
 10838  		v_1 := v.Args[1]
 10839  		if v_1.Op != OpMul16 {
 10840  			break
 10841  		}
 10842  		_ = v_1.Args[1]
 10843  		v_1_0 := v_1.Args[0]
 10844  		if v_1_0.Op != OpConst16 {
 10845  			break
 10846  		}
 10847  		c := v_1_0.AuxInt
 10848  		v_1_1 := v_1.Args[1]
 10849  		if v_1_1.Op != OpTrunc32to16 {
 10850  			break
 10851  		}
 10852  		v_1_1_0 := v_1_1.Args[0]
 10853  		if v_1_1_0.Op != OpRsh32Ux64 {
 10854  			break
 10855  		}
 10856  		_ = v_1_1_0.Args[1]
 10857  		v_1_1_0_0 := v_1_1_0.Args[0]
 10858  		if v_1_1_0_0.Op != OpAvg32u {
 10859  			break
 10860  		}
 10861  		_ = v_1_1_0_0.Args[1]
 10862  		v_1_1_0_0_0 := v_1_1_0_0.Args[0]
 10863  		if v_1_1_0_0_0.Op != OpLsh32x64 {
 10864  			break
 10865  		}
 10866  		_ = v_1_1_0_0_0.Args[1]
 10867  		v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
 10868  		if v_1_1_0_0_0_0.Op != OpZeroExt16to32 {
 10869  			break
 10870  		}
 10871  		if x != v_1_1_0_0_0_0.Args[0] {
 10872  			break
 10873  		}
 10874  		v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
 10875  		if v_1_1_0_0_0_1.Op != OpConst64 {
 10876  			break
 10877  		}
 10878  		if v_1_1_0_0_0_1.AuxInt != 16 {
 10879  			break
 10880  		}
 10881  		mul := v_1_1_0_0.Args[1]
 10882  		if mul.Op != OpMul32 {
 10883  			break
 10884  		}
 10885  		_ = mul.Args[1]
 10886  		mul_0 := mul.Args[0]
 10887  		if mul_0.Op != OpConst32 {
 10888  			break
 10889  		}
 10890  		m := mul_0.AuxInt
 10891  		mul_1 := mul.Args[1]
 10892  		if mul_1.Op != OpZeroExt16to32 {
 10893  			break
 10894  		}
 10895  		if x != mul_1.Args[0] {
 10896  			break
 10897  		}
 10898  		v_1_1_0_1 := v_1_1_0.Args[1]
 10899  		if v_1_1_0_1.Op != OpConst64 {
 10900  			break
 10901  		}
 10902  		s := v_1_1_0_1.AuxInt
 10903  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10904  			break
 10905  		}
 10906  		v.reset(OpLeq16U)
 10907  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10908  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 10909  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10910  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 10911  		v1.AddArg(v2)
 10912  		v1.AddArg(x)
 10913  		v0.AddArg(v1)
 10914  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10915  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 10916  		v0.AddArg(v3)
 10917  		v.AddArg(v0)
 10918  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 10919  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 10920  		v.AddArg(v4)
 10921  		return true
 10922  	}
 10923  	// match: (Eq16 x (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))))
 10924  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 10925  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 10926  	for {
 10927  		_ = v.Args[1]
 10928  		x := v.Args[0]
 10929  		v_1 := v.Args[1]
 10930  		if v_1.Op != OpMul16 {
 10931  			break
 10932  		}
 10933  		_ = v_1.Args[1]
 10934  		v_1_0 := v_1.Args[0]
 10935  		if v_1_0.Op != OpConst16 {
 10936  			break
 10937  		}
 10938  		c := v_1_0.AuxInt
 10939  		v_1_1 := v_1.Args[1]
 10940  		if v_1_1.Op != OpTrunc32to16 {
 10941  			break
 10942  		}
 10943  		v_1_1_0 := v_1_1.Args[0]
 10944  		if v_1_1_0.Op != OpRsh32Ux64 {
 10945  			break
 10946  		}
 10947  		_ = v_1_1_0.Args[1]
 10948  		v_1_1_0_0 := v_1_1_0.Args[0]
 10949  		if v_1_1_0_0.Op != OpAvg32u {
 10950  			break
 10951  		}
 10952  		_ = v_1_1_0_0.Args[1]
 10953  		v_1_1_0_0_0 := v_1_1_0_0.Args[0]
 10954  		if v_1_1_0_0_0.Op != OpLsh32x64 {
 10955  			break
 10956  		}
 10957  		_ = v_1_1_0_0_0.Args[1]
 10958  		v_1_1_0_0_0_0 := v_1_1_0_0_0.Args[0]
 10959  		if v_1_1_0_0_0_0.Op != OpZeroExt16to32 {
 10960  			break
 10961  		}
 10962  		if x != v_1_1_0_0_0_0.Args[0] {
 10963  			break
 10964  		}
 10965  		v_1_1_0_0_0_1 := v_1_1_0_0_0.Args[1]
 10966  		if v_1_1_0_0_0_1.Op != OpConst64 {
 10967  			break
 10968  		}
 10969  		if v_1_1_0_0_0_1.AuxInt != 16 {
 10970  			break
 10971  		}
 10972  		mul := v_1_1_0_0.Args[1]
 10973  		if mul.Op != OpMul32 {
 10974  			break
 10975  		}
 10976  		_ = mul.Args[1]
 10977  		mul_0 := mul.Args[0]
 10978  		if mul_0.Op != OpZeroExt16to32 {
 10979  			break
 10980  		}
 10981  		if x != mul_0.Args[0] {
 10982  			break
 10983  		}
 10984  		mul_1 := mul.Args[1]
 10985  		if mul_1.Op != OpConst32 {
 10986  			break
 10987  		}
 10988  		m := mul_1.AuxInt
 10989  		v_1_1_0_1 := v_1_1_0.Args[1]
 10990  		if v_1_1_0_1.Op != OpConst64 {
 10991  			break
 10992  		}
 10993  		s := v_1_1_0_1.AuxInt
 10994  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 10995  			break
 10996  		}
 10997  		v.reset(OpLeq16U)
 10998  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 10999  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11000  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11001  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11002  		v1.AddArg(v2)
 11003  		v1.AddArg(x)
 11004  		v0.AddArg(v1)
 11005  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11006  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11007  		v0.AddArg(v3)
 11008  		v.AddArg(v0)
 11009  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11010  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11011  		v.AddArg(v4)
 11012  		return true
 11013  	}
 11014  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) (Const16 [c])))
 11015  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11016  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11017  	for {
 11018  		_ = v.Args[1]
 11019  		x := v.Args[0]
 11020  		v_1 := v.Args[1]
 11021  		if v_1.Op != OpMul16 {
 11022  			break
 11023  		}
 11024  		_ = v_1.Args[1]
 11025  		v_1_0 := v_1.Args[0]
 11026  		if v_1_0.Op != OpTrunc32to16 {
 11027  			break
 11028  		}
 11029  		v_1_0_0 := v_1_0.Args[0]
 11030  		if v_1_0_0.Op != OpRsh32Ux64 {
 11031  			break
 11032  		}
 11033  		_ = v_1_0_0.Args[1]
 11034  		v_1_0_0_0 := v_1_0_0.Args[0]
 11035  		if v_1_0_0_0.Op != OpAvg32u {
 11036  			break
 11037  		}
 11038  		_ = v_1_0_0_0.Args[1]
 11039  		v_1_0_0_0_0 := v_1_0_0_0.Args[0]
 11040  		if v_1_0_0_0_0.Op != OpLsh32x64 {
 11041  			break
 11042  		}
 11043  		_ = v_1_0_0_0_0.Args[1]
 11044  		v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
 11045  		if v_1_0_0_0_0_0.Op != OpZeroExt16to32 {
 11046  			break
 11047  		}
 11048  		if x != v_1_0_0_0_0_0.Args[0] {
 11049  			break
 11050  		}
 11051  		v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
 11052  		if v_1_0_0_0_0_1.Op != OpConst64 {
 11053  			break
 11054  		}
 11055  		if v_1_0_0_0_0_1.AuxInt != 16 {
 11056  			break
 11057  		}
 11058  		mul := v_1_0_0_0.Args[1]
 11059  		if mul.Op != OpMul32 {
 11060  			break
 11061  		}
 11062  		_ = mul.Args[1]
 11063  		mul_0 := mul.Args[0]
 11064  		if mul_0.Op != OpConst32 {
 11065  			break
 11066  		}
 11067  		m := mul_0.AuxInt
 11068  		mul_1 := mul.Args[1]
 11069  		if mul_1.Op != OpZeroExt16to32 {
 11070  			break
 11071  		}
 11072  		if x != mul_1.Args[0] {
 11073  			break
 11074  		}
 11075  		v_1_0_0_1 := v_1_0_0.Args[1]
 11076  		if v_1_0_0_1.Op != OpConst64 {
 11077  			break
 11078  		}
 11079  		s := v_1_0_0_1.AuxInt
 11080  		v_1_1 := v_1.Args[1]
 11081  		if v_1_1.Op != OpConst16 {
 11082  			break
 11083  		}
 11084  		c := v_1_1.AuxInt
 11085  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11086  			break
 11087  		}
 11088  		v.reset(OpLeq16U)
 11089  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11090  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11091  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11092  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11093  		v1.AddArg(v2)
 11094  		v1.AddArg(x)
 11095  		v0.AddArg(v1)
 11096  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11097  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11098  		v0.AddArg(v3)
 11099  		v.AddArg(v0)
 11100  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11101  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11102  		v.AddArg(v4)
 11103  		return true
 11104  	}
 11105  	// match: (Eq16 x (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])))
 11106  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11107  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11108  	for {
 11109  		_ = v.Args[1]
 11110  		x := v.Args[0]
 11111  		v_1 := v.Args[1]
 11112  		if v_1.Op != OpMul16 {
 11113  			break
 11114  		}
 11115  		_ = v_1.Args[1]
 11116  		v_1_0 := v_1.Args[0]
 11117  		if v_1_0.Op != OpTrunc32to16 {
 11118  			break
 11119  		}
 11120  		v_1_0_0 := v_1_0.Args[0]
 11121  		if v_1_0_0.Op != OpRsh32Ux64 {
 11122  			break
 11123  		}
 11124  		_ = v_1_0_0.Args[1]
 11125  		v_1_0_0_0 := v_1_0_0.Args[0]
 11126  		if v_1_0_0_0.Op != OpAvg32u {
 11127  			break
 11128  		}
 11129  		_ = v_1_0_0_0.Args[1]
 11130  		v_1_0_0_0_0 := v_1_0_0_0.Args[0]
 11131  		if v_1_0_0_0_0.Op != OpLsh32x64 {
 11132  			break
 11133  		}
 11134  		_ = v_1_0_0_0_0.Args[1]
 11135  		v_1_0_0_0_0_0 := v_1_0_0_0_0.Args[0]
 11136  		if v_1_0_0_0_0_0.Op != OpZeroExt16to32 {
 11137  			break
 11138  		}
 11139  		if x != v_1_0_0_0_0_0.Args[0] {
 11140  			break
 11141  		}
 11142  		v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
 11143  		if v_1_0_0_0_0_1.Op != OpConst64 {
 11144  			break
 11145  		}
 11146  		if v_1_0_0_0_0_1.AuxInt != 16 {
 11147  			break
 11148  		}
 11149  		mul := v_1_0_0_0.Args[1]
 11150  		if mul.Op != OpMul32 {
 11151  			break
 11152  		}
 11153  		_ = mul.Args[1]
 11154  		mul_0 := mul.Args[0]
 11155  		if mul_0.Op != OpZeroExt16to32 {
 11156  			break
 11157  		}
 11158  		if x != mul_0.Args[0] {
 11159  			break
 11160  		}
 11161  		mul_1 := mul.Args[1]
 11162  		if mul_1.Op != OpConst32 {
 11163  			break
 11164  		}
 11165  		m := mul_1.AuxInt
 11166  		v_1_0_0_1 := v_1_0_0.Args[1]
 11167  		if v_1_0_0_1.Op != OpConst64 {
 11168  			break
 11169  		}
 11170  		s := v_1_0_0_1.AuxInt
 11171  		v_1_1 := v_1.Args[1]
 11172  		if v_1_1.Op != OpConst16 {
 11173  			break
 11174  		}
 11175  		c := v_1_1.AuxInt
 11176  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11177  			break
 11178  		}
 11179  		v.reset(OpLeq16U)
 11180  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11181  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11182  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11183  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11184  		v1.AddArg(v2)
 11185  		v1.AddArg(x)
 11186  		v0.AddArg(v1)
 11187  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11188  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11189  		v0.AddArg(v3)
 11190  		v.AddArg(v0)
 11191  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11192  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11193  		v.AddArg(v4)
 11194  		return true
 11195  	}
 11196  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s])))) x)
 11197  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11198  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11199  	for {
 11200  		x := v.Args[1]
 11201  		v_0 := v.Args[0]
 11202  		if v_0.Op != OpMul16 {
 11203  			break
 11204  		}
 11205  		_ = v_0.Args[1]
 11206  		v_0_0 := v_0.Args[0]
 11207  		if v_0_0.Op != OpConst16 {
 11208  			break
 11209  		}
 11210  		c := v_0_0.AuxInt
 11211  		v_0_1 := v_0.Args[1]
 11212  		if v_0_1.Op != OpTrunc32to16 {
 11213  			break
 11214  		}
 11215  		v_0_1_0 := v_0_1.Args[0]
 11216  		if v_0_1_0.Op != OpRsh32Ux64 {
 11217  			break
 11218  		}
 11219  		_ = v_0_1_0.Args[1]
 11220  		v_0_1_0_0 := v_0_1_0.Args[0]
 11221  		if v_0_1_0_0.Op != OpAvg32u {
 11222  			break
 11223  		}
 11224  		_ = v_0_1_0_0.Args[1]
 11225  		v_0_1_0_0_0 := v_0_1_0_0.Args[0]
 11226  		if v_0_1_0_0_0.Op != OpLsh32x64 {
 11227  			break
 11228  		}
 11229  		_ = v_0_1_0_0_0.Args[1]
 11230  		v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
 11231  		if v_0_1_0_0_0_0.Op != OpZeroExt16to32 {
 11232  			break
 11233  		}
 11234  		if x != v_0_1_0_0_0_0.Args[0] {
 11235  			break
 11236  		}
 11237  		v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
 11238  		if v_0_1_0_0_0_1.Op != OpConst64 {
 11239  			break
 11240  		}
 11241  		if v_0_1_0_0_0_1.AuxInt != 16 {
 11242  			break
 11243  		}
 11244  		mul := v_0_1_0_0.Args[1]
 11245  		if mul.Op != OpMul32 {
 11246  			break
 11247  		}
 11248  		_ = mul.Args[1]
 11249  		mul_0 := mul.Args[0]
 11250  		if mul_0.Op != OpConst32 {
 11251  			break
 11252  		}
 11253  		m := mul_0.AuxInt
 11254  		mul_1 := mul.Args[1]
 11255  		if mul_1.Op != OpZeroExt16to32 {
 11256  			break
 11257  		}
 11258  		if x != mul_1.Args[0] {
 11259  			break
 11260  		}
 11261  		v_0_1_0_1 := v_0_1_0.Args[1]
 11262  		if v_0_1_0_1.Op != OpConst64 {
 11263  			break
 11264  		}
 11265  		s := v_0_1_0_1.AuxInt
 11266  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11267  			break
 11268  		}
 11269  		v.reset(OpLeq16U)
 11270  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11271  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11272  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11273  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11274  		v1.AddArg(v2)
 11275  		v1.AddArg(x)
 11276  		v0.AddArg(v1)
 11277  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11278  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11279  		v0.AddArg(v3)
 11280  		v.AddArg(v0)
 11281  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11282  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11283  		v.AddArg(v4)
 11284  		return true
 11285  	}
 11286  	return false
 11287  }
 11288  func rewriteValuegeneric_OpEq16_40(v *Value) bool {
 11289  	b := v.Block
 11290  	typ := &b.Func.Config.Types
 11291  	// match: (Eq16 (Mul16 (Const16 [c]) (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s])))) x)
 11292  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11293  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11294  	for {
 11295  		x := v.Args[1]
 11296  		v_0 := v.Args[0]
 11297  		if v_0.Op != OpMul16 {
 11298  			break
 11299  		}
 11300  		_ = v_0.Args[1]
 11301  		v_0_0 := v_0.Args[0]
 11302  		if v_0_0.Op != OpConst16 {
 11303  			break
 11304  		}
 11305  		c := v_0_0.AuxInt
 11306  		v_0_1 := v_0.Args[1]
 11307  		if v_0_1.Op != OpTrunc32to16 {
 11308  			break
 11309  		}
 11310  		v_0_1_0 := v_0_1.Args[0]
 11311  		if v_0_1_0.Op != OpRsh32Ux64 {
 11312  			break
 11313  		}
 11314  		_ = v_0_1_0.Args[1]
 11315  		v_0_1_0_0 := v_0_1_0.Args[0]
 11316  		if v_0_1_0_0.Op != OpAvg32u {
 11317  			break
 11318  		}
 11319  		_ = v_0_1_0_0.Args[1]
 11320  		v_0_1_0_0_0 := v_0_1_0_0.Args[0]
 11321  		if v_0_1_0_0_0.Op != OpLsh32x64 {
 11322  			break
 11323  		}
 11324  		_ = v_0_1_0_0_0.Args[1]
 11325  		v_0_1_0_0_0_0 := v_0_1_0_0_0.Args[0]
 11326  		if v_0_1_0_0_0_0.Op != OpZeroExt16to32 {
 11327  			break
 11328  		}
 11329  		if x != v_0_1_0_0_0_0.Args[0] {
 11330  			break
 11331  		}
 11332  		v_0_1_0_0_0_1 := v_0_1_0_0_0.Args[1]
 11333  		if v_0_1_0_0_0_1.Op != OpConst64 {
 11334  			break
 11335  		}
 11336  		if v_0_1_0_0_0_1.AuxInt != 16 {
 11337  			break
 11338  		}
 11339  		mul := v_0_1_0_0.Args[1]
 11340  		if mul.Op != OpMul32 {
 11341  			break
 11342  		}
 11343  		_ = mul.Args[1]
 11344  		mul_0 := mul.Args[0]
 11345  		if mul_0.Op != OpZeroExt16to32 {
 11346  			break
 11347  		}
 11348  		if x != mul_0.Args[0] {
 11349  			break
 11350  		}
 11351  		mul_1 := mul.Args[1]
 11352  		if mul_1.Op != OpConst32 {
 11353  			break
 11354  		}
 11355  		m := mul_1.AuxInt
 11356  		v_0_1_0_1 := v_0_1_0.Args[1]
 11357  		if v_0_1_0_1.Op != OpConst64 {
 11358  			break
 11359  		}
 11360  		s := v_0_1_0_1.AuxInt
 11361  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11362  			break
 11363  		}
 11364  		v.reset(OpLeq16U)
 11365  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11366  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11367  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11368  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11369  		v1.AddArg(v2)
 11370  		v1.AddArg(x)
 11371  		v0.AddArg(v1)
 11372  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11373  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11374  		v0.AddArg(v3)
 11375  		v.AddArg(v0)
 11376  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11377  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11378  		v.AddArg(v4)
 11379  		return true
 11380  	}
 11381  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (Const32 [m]) (ZeroExt16to32 x))) (Const64 [s]))) (Const16 [c])) x)
 11382  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11383  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11384  	for {
 11385  		x := v.Args[1]
 11386  		v_0 := v.Args[0]
 11387  		if v_0.Op != OpMul16 {
 11388  			break
 11389  		}
 11390  		_ = v_0.Args[1]
 11391  		v_0_0 := v_0.Args[0]
 11392  		if v_0_0.Op != OpTrunc32to16 {
 11393  			break
 11394  		}
 11395  		v_0_0_0 := v_0_0.Args[0]
 11396  		if v_0_0_0.Op != OpRsh32Ux64 {
 11397  			break
 11398  		}
 11399  		_ = v_0_0_0.Args[1]
 11400  		v_0_0_0_0 := v_0_0_0.Args[0]
 11401  		if v_0_0_0_0.Op != OpAvg32u {
 11402  			break
 11403  		}
 11404  		_ = v_0_0_0_0.Args[1]
 11405  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 11406  		if v_0_0_0_0_0.Op != OpLsh32x64 {
 11407  			break
 11408  		}
 11409  		_ = v_0_0_0_0_0.Args[1]
 11410  		v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
 11411  		if v_0_0_0_0_0_0.Op != OpZeroExt16to32 {
 11412  			break
 11413  		}
 11414  		if x != v_0_0_0_0_0_0.Args[0] {
 11415  			break
 11416  		}
 11417  		v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
 11418  		if v_0_0_0_0_0_1.Op != OpConst64 {
 11419  			break
 11420  		}
 11421  		if v_0_0_0_0_0_1.AuxInt != 16 {
 11422  			break
 11423  		}
 11424  		mul := v_0_0_0_0.Args[1]
 11425  		if mul.Op != OpMul32 {
 11426  			break
 11427  		}
 11428  		_ = mul.Args[1]
 11429  		mul_0 := mul.Args[0]
 11430  		if mul_0.Op != OpConst32 {
 11431  			break
 11432  		}
 11433  		m := mul_0.AuxInt
 11434  		mul_1 := mul.Args[1]
 11435  		if mul_1.Op != OpZeroExt16to32 {
 11436  			break
 11437  		}
 11438  		if x != mul_1.Args[0] {
 11439  			break
 11440  		}
 11441  		v_0_0_0_1 := v_0_0_0.Args[1]
 11442  		if v_0_0_0_1.Op != OpConst64 {
 11443  			break
 11444  		}
 11445  		s := v_0_0_0_1.AuxInt
 11446  		v_0_1 := v_0.Args[1]
 11447  		if v_0_1.Op != OpConst16 {
 11448  			break
 11449  		}
 11450  		c := v_0_1.AuxInt
 11451  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11452  			break
 11453  		}
 11454  		v.reset(OpLeq16U)
 11455  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11456  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11457  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11458  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11459  		v1.AddArg(v2)
 11460  		v1.AddArg(x)
 11461  		v0.AddArg(v1)
 11462  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11463  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11464  		v0.AddArg(v3)
 11465  		v.AddArg(v0)
 11466  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11467  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11468  		v.AddArg(v4)
 11469  		return true
 11470  	}
 11471  	// match: (Eq16 (Mul16 (Trunc32to16 (Rsh32Ux64 (Avg32u (Lsh32x64 (ZeroExt16to32 x) (Const64 [16])) mul:(Mul32 (ZeroExt16to32 x) (Const32 [m]))) (Const64 [s]))) (Const16 [c])) x)
 11472  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16,c).m) && s == 16+umagic(16,c).s-1 && x.Op != OpConst16 && udivisibleOK(16,c)
 11473  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(16-udivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(udivisible(16,c).max))]) )
 11474  	for {
 11475  		x := v.Args[1]
 11476  		v_0 := v.Args[0]
 11477  		if v_0.Op != OpMul16 {
 11478  			break
 11479  		}
 11480  		_ = v_0.Args[1]
 11481  		v_0_0 := v_0.Args[0]
 11482  		if v_0_0.Op != OpTrunc32to16 {
 11483  			break
 11484  		}
 11485  		v_0_0_0 := v_0_0.Args[0]
 11486  		if v_0_0_0.Op != OpRsh32Ux64 {
 11487  			break
 11488  		}
 11489  		_ = v_0_0_0.Args[1]
 11490  		v_0_0_0_0 := v_0_0_0.Args[0]
 11491  		if v_0_0_0_0.Op != OpAvg32u {
 11492  			break
 11493  		}
 11494  		_ = v_0_0_0_0.Args[1]
 11495  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 11496  		if v_0_0_0_0_0.Op != OpLsh32x64 {
 11497  			break
 11498  		}
 11499  		_ = v_0_0_0_0_0.Args[1]
 11500  		v_0_0_0_0_0_0 := v_0_0_0_0_0.Args[0]
 11501  		if v_0_0_0_0_0_0.Op != OpZeroExt16to32 {
 11502  			break
 11503  		}
 11504  		if x != v_0_0_0_0_0_0.Args[0] {
 11505  			break
 11506  		}
 11507  		v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
 11508  		if v_0_0_0_0_0_1.Op != OpConst64 {
 11509  			break
 11510  		}
 11511  		if v_0_0_0_0_0_1.AuxInt != 16 {
 11512  			break
 11513  		}
 11514  		mul := v_0_0_0_0.Args[1]
 11515  		if mul.Op != OpMul32 {
 11516  			break
 11517  		}
 11518  		_ = mul.Args[1]
 11519  		mul_0 := mul.Args[0]
 11520  		if mul_0.Op != OpZeroExt16to32 {
 11521  			break
 11522  		}
 11523  		if x != mul_0.Args[0] {
 11524  			break
 11525  		}
 11526  		mul_1 := mul.Args[1]
 11527  		if mul_1.Op != OpConst32 {
 11528  			break
 11529  		}
 11530  		m := mul_1.AuxInt
 11531  		v_0_0_0_1 := v_0_0_0.Args[1]
 11532  		if v_0_0_0_1.Op != OpConst64 {
 11533  			break
 11534  		}
 11535  		s := v_0_0_0_1.AuxInt
 11536  		v_0_1 := v_0.Args[1]
 11537  		if v_0_1.Op != OpConst16 {
 11538  			break
 11539  		}
 11540  		c := v_0_1.AuxInt
 11541  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(umagic(16, c).m) && s == 16+umagic(16, c).s-1 && x.Op != OpConst16 && udivisibleOK(16, c)) {
 11542  			break
 11543  		}
 11544  		v.reset(OpLeq16U)
 11545  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11546  		v1 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11547  		v2 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11548  		v2.AuxInt = int64(int16(udivisible(16, c).m))
 11549  		v1.AddArg(v2)
 11550  		v1.AddArg(x)
 11551  		v0.AddArg(v1)
 11552  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11553  		v3.AuxInt = int64(16 - udivisible(16, c).k)
 11554  		v0.AddArg(v3)
 11555  		v.AddArg(v0)
 11556  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11557  		v4.AuxInt = int64(int16(udivisible(16, c).max))
 11558  		v.AddArg(v4)
 11559  		return true
 11560  	}
 11561  	// match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
 11562  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 11563  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 11564  	for {
 11565  		_ = v.Args[1]
 11566  		x := v.Args[0]
 11567  		v_1 := v.Args[1]
 11568  		if v_1.Op != OpMul16 {
 11569  			break
 11570  		}
 11571  		_ = v_1.Args[1]
 11572  		v_1_0 := v_1.Args[0]
 11573  		if v_1_0.Op != OpConst16 {
 11574  			break
 11575  		}
 11576  		c := v_1_0.AuxInt
 11577  		v_1_1 := v_1.Args[1]
 11578  		if v_1_1.Op != OpSub16 {
 11579  			break
 11580  		}
 11581  		_ = v_1_1.Args[1]
 11582  		v_1_1_0 := v_1_1.Args[0]
 11583  		if v_1_1_0.Op != OpRsh32x64 {
 11584  			break
 11585  		}
 11586  		_ = v_1_1_0.Args[1]
 11587  		mul := v_1_1_0.Args[0]
 11588  		if mul.Op != OpMul32 {
 11589  			break
 11590  		}
 11591  		_ = mul.Args[1]
 11592  		mul_0 := mul.Args[0]
 11593  		if mul_0.Op != OpConst32 {
 11594  			break
 11595  		}
 11596  		m := mul_0.AuxInt
 11597  		mul_1 := mul.Args[1]
 11598  		if mul_1.Op != OpSignExt16to32 {
 11599  			break
 11600  		}
 11601  		if x != mul_1.Args[0] {
 11602  			break
 11603  		}
 11604  		v_1_1_0_1 := v_1_1_0.Args[1]
 11605  		if v_1_1_0_1.Op != OpConst64 {
 11606  			break
 11607  		}
 11608  		s := v_1_1_0_1.AuxInt
 11609  		v_1_1_1 := v_1_1.Args[1]
 11610  		if v_1_1_1.Op != OpRsh32x64 {
 11611  			break
 11612  		}
 11613  		_ = v_1_1_1.Args[1]
 11614  		v_1_1_1_0 := v_1_1_1.Args[0]
 11615  		if v_1_1_1_0.Op != OpSignExt16to32 {
 11616  			break
 11617  		}
 11618  		if x != v_1_1_1_0.Args[0] {
 11619  			break
 11620  		}
 11621  		v_1_1_1_1 := v_1_1_1.Args[1]
 11622  		if v_1_1_1_1.Op != OpConst64 {
 11623  			break
 11624  		}
 11625  		if v_1_1_1_1.AuxInt != 31 {
 11626  			break
 11627  		}
 11628  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 11629  			break
 11630  		}
 11631  		v.reset(OpLeq16U)
 11632  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11633  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 11634  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11635  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11636  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 11637  		v2.AddArg(v3)
 11638  		v2.AddArg(x)
 11639  		v1.AddArg(v2)
 11640  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11641  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 11642  		v1.AddArg(v4)
 11643  		v0.AddArg(v1)
 11644  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11645  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 11646  		v0.AddArg(v5)
 11647  		v.AddArg(v0)
 11648  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11649  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 11650  		v.AddArg(v6)
 11651  		return true
 11652  	}
 11653  	// match: (Eq16 x (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))))
 11654  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 11655  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 11656  	for {
 11657  		_ = v.Args[1]
 11658  		x := v.Args[0]
 11659  		v_1 := v.Args[1]
 11660  		if v_1.Op != OpMul16 {
 11661  			break
 11662  		}
 11663  		_ = v_1.Args[1]
 11664  		v_1_0 := v_1.Args[0]
 11665  		if v_1_0.Op != OpConst16 {
 11666  			break
 11667  		}
 11668  		c := v_1_0.AuxInt
 11669  		v_1_1 := v_1.Args[1]
 11670  		if v_1_1.Op != OpSub16 {
 11671  			break
 11672  		}
 11673  		_ = v_1_1.Args[1]
 11674  		v_1_1_0 := v_1_1.Args[0]
 11675  		if v_1_1_0.Op != OpRsh32x64 {
 11676  			break
 11677  		}
 11678  		_ = v_1_1_0.Args[1]
 11679  		mul := v_1_1_0.Args[0]
 11680  		if mul.Op != OpMul32 {
 11681  			break
 11682  		}
 11683  		_ = mul.Args[1]
 11684  		mul_0 := mul.Args[0]
 11685  		if mul_0.Op != OpSignExt16to32 {
 11686  			break
 11687  		}
 11688  		if x != mul_0.Args[0] {
 11689  			break
 11690  		}
 11691  		mul_1 := mul.Args[1]
 11692  		if mul_1.Op != OpConst32 {
 11693  			break
 11694  		}
 11695  		m := mul_1.AuxInt
 11696  		v_1_1_0_1 := v_1_1_0.Args[1]
 11697  		if v_1_1_0_1.Op != OpConst64 {
 11698  			break
 11699  		}
 11700  		s := v_1_1_0_1.AuxInt
 11701  		v_1_1_1 := v_1_1.Args[1]
 11702  		if v_1_1_1.Op != OpRsh32x64 {
 11703  			break
 11704  		}
 11705  		_ = v_1_1_1.Args[1]
 11706  		v_1_1_1_0 := v_1_1_1.Args[0]
 11707  		if v_1_1_1_0.Op != OpSignExt16to32 {
 11708  			break
 11709  		}
 11710  		if x != v_1_1_1_0.Args[0] {
 11711  			break
 11712  		}
 11713  		v_1_1_1_1 := v_1_1_1.Args[1]
 11714  		if v_1_1_1_1.Op != OpConst64 {
 11715  			break
 11716  		}
 11717  		if v_1_1_1_1.AuxInt != 31 {
 11718  			break
 11719  		}
 11720  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 11721  			break
 11722  		}
 11723  		v.reset(OpLeq16U)
 11724  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11725  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 11726  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11727  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11728  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 11729  		v2.AddArg(v3)
 11730  		v2.AddArg(x)
 11731  		v1.AddArg(v2)
 11732  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11733  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 11734  		v1.AddArg(v4)
 11735  		v0.AddArg(v1)
 11736  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11737  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 11738  		v0.AddArg(v5)
 11739  		v.AddArg(v0)
 11740  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11741  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 11742  		v.AddArg(v6)
 11743  		return true
 11744  	}
 11745  	// match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
 11746  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 11747  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 11748  	for {
 11749  		_ = v.Args[1]
 11750  		x := v.Args[0]
 11751  		v_1 := v.Args[1]
 11752  		if v_1.Op != OpMul16 {
 11753  			break
 11754  		}
 11755  		_ = v_1.Args[1]
 11756  		v_1_0 := v_1.Args[0]
 11757  		if v_1_0.Op != OpSub16 {
 11758  			break
 11759  		}
 11760  		_ = v_1_0.Args[1]
 11761  		v_1_0_0 := v_1_0.Args[0]
 11762  		if v_1_0_0.Op != OpRsh32x64 {
 11763  			break
 11764  		}
 11765  		_ = v_1_0_0.Args[1]
 11766  		mul := v_1_0_0.Args[0]
 11767  		if mul.Op != OpMul32 {
 11768  			break
 11769  		}
 11770  		_ = mul.Args[1]
 11771  		mul_0 := mul.Args[0]
 11772  		if mul_0.Op != OpConst32 {
 11773  			break
 11774  		}
 11775  		m := mul_0.AuxInt
 11776  		mul_1 := mul.Args[1]
 11777  		if mul_1.Op != OpSignExt16to32 {
 11778  			break
 11779  		}
 11780  		if x != mul_1.Args[0] {
 11781  			break
 11782  		}
 11783  		v_1_0_0_1 := v_1_0_0.Args[1]
 11784  		if v_1_0_0_1.Op != OpConst64 {
 11785  			break
 11786  		}
 11787  		s := v_1_0_0_1.AuxInt
 11788  		v_1_0_1 := v_1_0.Args[1]
 11789  		if v_1_0_1.Op != OpRsh32x64 {
 11790  			break
 11791  		}
 11792  		_ = v_1_0_1.Args[1]
 11793  		v_1_0_1_0 := v_1_0_1.Args[0]
 11794  		if v_1_0_1_0.Op != OpSignExt16to32 {
 11795  			break
 11796  		}
 11797  		if x != v_1_0_1_0.Args[0] {
 11798  			break
 11799  		}
 11800  		v_1_0_1_1 := v_1_0_1.Args[1]
 11801  		if v_1_0_1_1.Op != OpConst64 {
 11802  			break
 11803  		}
 11804  		if v_1_0_1_1.AuxInt != 31 {
 11805  			break
 11806  		}
 11807  		v_1_1 := v_1.Args[1]
 11808  		if v_1_1.Op != OpConst16 {
 11809  			break
 11810  		}
 11811  		c := v_1_1.AuxInt
 11812  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 11813  			break
 11814  		}
 11815  		v.reset(OpLeq16U)
 11816  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11817  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 11818  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11819  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11820  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 11821  		v2.AddArg(v3)
 11822  		v2.AddArg(x)
 11823  		v1.AddArg(v2)
 11824  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11825  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 11826  		v1.AddArg(v4)
 11827  		v0.AddArg(v1)
 11828  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11829  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 11830  		v0.AddArg(v5)
 11831  		v.AddArg(v0)
 11832  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11833  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 11834  		v.AddArg(v6)
 11835  		return true
 11836  	}
 11837  	// match: (Eq16 x (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])))
 11838  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 11839  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 11840  	for {
 11841  		_ = v.Args[1]
 11842  		x := v.Args[0]
 11843  		v_1 := v.Args[1]
 11844  		if v_1.Op != OpMul16 {
 11845  			break
 11846  		}
 11847  		_ = v_1.Args[1]
 11848  		v_1_0 := v_1.Args[0]
 11849  		if v_1_0.Op != OpSub16 {
 11850  			break
 11851  		}
 11852  		_ = v_1_0.Args[1]
 11853  		v_1_0_0 := v_1_0.Args[0]
 11854  		if v_1_0_0.Op != OpRsh32x64 {
 11855  			break
 11856  		}
 11857  		_ = v_1_0_0.Args[1]
 11858  		mul := v_1_0_0.Args[0]
 11859  		if mul.Op != OpMul32 {
 11860  			break
 11861  		}
 11862  		_ = mul.Args[1]
 11863  		mul_0 := mul.Args[0]
 11864  		if mul_0.Op != OpSignExt16to32 {
 11865  			break
 11866  		}
 11867  		if x != mul_0.Args[0] {
 11868  			break
 11869  		}
 11870  		mul_1 := mul.Args[1]
 11871  		if mul_1.Op != OpConst32 {
 11872  			break
 11873  		}
 11874  		m := mul_1.AuxInt
 11875  		v_1_0_0_1 := v_1_0_0.Args[1]
 11876  		if v_1_0_0_1.Op != OpConst64 {
 11877  			break
 11878  		}
 11879  		s := v_1_0_0_1.AuxInt
 11880  		v_1_0_1 := v_1_0.Args[1]
 11881  		if v_1_0_1.Op != OpRsh32x64 {
 11882  			break
 11883  		}
 11884  		_ = v_1_0_1.Args[1]
 11885  		v_1_0_1_0 := v_1_0_1.Args[0]
 11886  		if v_1_0_1_0.Op != OpSignExt16to32 {
 11887  			break
 11888  		}
 11889  		if x != v_1_0_1_0.Args[0] {
 11890  			break
 11891  		}
 11892  		v_1_0_1_1 := v_1_0_1.Args[1]
 11893  		if v_1_0_1_1.Op != OpConst64 {
 11894  			break
 11895  		}
 11896  		if v_1_0_1_1.AuxInt != 31 {
 11897  			break
 11898  		}
 11899  		v_1_1 := v_1.Args[1]
 11900  		if v_1_1.Op != OpConst16 {
 11901  			break
 11902  		}
 11903  		c := v_1_1.AuxInt
 11904  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 11905  			break
 11906  		}
 11907  		v.reset(OpLeq16U)
 11908  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 11909  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 11910  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 11911  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11912  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 11913  		v2.AddArg(v3)
 11914  		v2.AddArg(x)
 11915  		v1.AddArg(v2)
 11916  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11917  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 11918  		v1.AddArg(v4)
 11919  		v0.AddArg(v1)
 11920  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11921  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 11922  		v0.AddArg(v5)
 11923  		v.AddArg(v0)
 11924  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 11925  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 11926  		v.AddArg(v6)
 11927  		return true
 11928  	}
 11929  	// match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
 11930  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 11931  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 11932  	for {
 11933  		x := v.Args[1]
 11934  		v_0 := v.Args[0]
 11935  		if v_0.Op != OpMul16 {
 11936  			break
 11937  		}
 11938  		_ = v_0.Args[1]
 11939  		v_0_0 := v_0.Args[0]
 11940  		if v_0_0.Op != OpConst16 {
 11941  			break
 11942  		}
 11943  		c := v_0_0.AuxInt
 11944  		v_0_1 := v_0.Args[1]
 11945  		if v_0_1.Op != OpSub16 {
 11946  			break
 11947  		}
 11948  		_ = v_0_1.Args[1]
 11949  		v_0_1_0 := v_0_1.Args[0]
 11950  		if v_0_1_0.Op != OpRsh32x64 {
 11951  			break
 11952  		}
 11953  		_ = v_0_1_0.Args[1]
 11954  		mul := v_0_1_0.Args[0]
 11955  		if mul.Op != OpMul32 {
 11956  			break
 11957  		}
 11958  		_ = mul.Args[1]
 11959  		mul_0 := mul.Args[0]
 11960  		if mul_0.Op != OpConst32 {
 11961  			break
 11962  		}
 11963  		m := mul_0.AuxInt
 11964  		mul_1 := mul.Args[1]
 11965  		if mul_1.Op != OpSignExt16to32 {
 11966  			break
 11967  		}
 11968  		if x != mul_1.Args[0] {
 11969  			break
 11970  		}
 11971  		v_0_1_0_1 := v_0_1_0.Args[1]
 11972  		if v_0_1_0_1.Op != OpConst64 {
 11973  			break
 11974  		}
 11975  		s := v_0_1_0_1.AuxInt
 11976  		v_0_1_1 := v_0_1.Args[1]
 11977  		if v_0_1_1.Op != OpRsh32x64 {
 11978  			break
 11979  		}
 11980  		_ = v_0_1_1.Args[1]
 11981  		v_0_1_1_0 := v_0_1_1.Args[0]
 11982  		if v_0_1_1_0.Op != OpSignExt16to32 {
 11983  			break
 11984  		}
 11985  		if x != v_0_1_1_0.Args[0] {
 11986  			break
 11987  		}
 11988  		v_0_1_1_1 := v_0_1_1.Args[1]
 11989  		if v_0_1_1_1.Op != OpConst64 {
 11990  			break
 11991  		}
 11992  		if v_0_1_1_1.AuxInt != 31 {
 11993  			break
 11994  		}
 11995  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 11996  			break
 11997  		}
 11998  		v.reset(OpLeq16U)
 11999  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 12000  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 12001  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 12002  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12003  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 12004  		v2.AddArg(v3)
 12005  		v2.AddArg(x)
 12006  		v1.AddArg(v2)
 12007  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12008  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 12009  		v1.AddArg(v4)
 12010  		v0.AddArg(v1)
 12011  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12012  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 12013  		v0.AddArg(v5)
 12014  		v.AddArg(v0)
 12015  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12016  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 12017  		v.AddArg(v6)
 12018  		return true
 12019  	}
 12020  	// match: (Eq16 (Mul16 (Const16 [c]) (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31])))) x)
 12021  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 12022  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 12023  	for {
 12024  		x := v.Args[1]
 12025  		v_0 := v.Args[0]
 12026  		if v_0.Op != OpMul16 {
 12027  			break
 12028  		}
 12029  		_ = v_0.Args[1]
 12030  		v_0_0 := v_0.Args[0]
 12031  		if v_0_0.Op != OpConst16 {
 12032  			break
 12033  		}
 12034  		c := v_0_0.AuxInt
 12035  		v_0_1 := v_0.Args[1]
 12036  		if v_0_1.Op != OpSub16 {
 12037  			break
 12038  		}
 12039  		_ = v_0_1.Args[1]
 12040  		v_0_1_0 := v_0_1.Args[0]
 12041  		if v_0_1_0.Op != OpRsh32x64 {
 12042  			break
 12043  		}
 12044  		_ = v_0_1_0.Args[1]
 12045  		mul := v_0_1_0.Args[0]
 12046  		if mul.Op != OpMul32 {
 12047  			break
 12048  		}
 12049  		_ = mul.Args[1]
 12050  		mul_0 := mul.Args[0]
 12051  		if mul_0.Op != OpSignExt16to32 {
 12052  			break
 12053  		}
 12054  		if x != mul_0.Args[0] {
 12055  			break
 12056  		}
 12057  		mul_1 := mul.Args[1]
 12058  		if mul_1.Op != OpConst32 {
 12059  			break
 12060  		}
 12061  		m := mul_1.AuxInt
 12062  		v_0_1_0_1 := v_0_1_0.Args[1]
 12063  		if v_0_1_0_1.Op != OpConst64 {
 12064  			break
 12065  		}
 12066  		s := v_0_1_0_1.AuxInt
 12067  		v_0_1_1 := v_0_1.Args[1]
 12068  		if v_0_1_1.Op != OpRsh32x64 {
 12069  			break
 12070  		}
 12071  		_ = v_0_1_1.Args[1]
 12072  		v_0_1_1_0 := v_0_1_1.Args[0]
 12073  		if v_0_1_1_0.Op != OpSignExt16to32 {
 12074  			break
 12075  		}
 12076  		if x != v_0_1_1_0.Args[0] {
 12077  			break
 12078  		}
 12079  		v_0_1_1_1 := v_0_1_1.Args[1]
 12080  		if v_0_1_1_1.Op != OpConst64 {
 12081  			break
 12082  		}
 12083  		if v_0_1_1_1.AuxInt != 31 {
 12084  			break
 12085  		}
 12086  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 12087  			break
 12088  		}
 12089  		v.reset(OpLeq16U)
 12090  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 12091  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 12092  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 12093  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12094  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 12095  		v2.AddArg(v3)
 12096  		v2.AddArg(x)
 12097  		v1.AddArg(v2)
 12098  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12099  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 12100  		v1.AddArg(v4)
 12101  		v0.AddArg(v1)
 12102  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12103  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 12104  		v0.AddArg(v5)
 12105  		v.AddArg(v0)
 12106  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12107  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 12108  		v.AddArg(v6)
 12109  		return true
 12110  	}
 12111  	// match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (Const32 [m]) (SignExt16to32 x)) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
 12112  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 12113  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 12114  	for {
 12115  		x := v.Args[1]
 12116  		v_0 := v.Args[0]
 12117  		if v_0.Op != OpMul16 {
 12118  			break
 12119  		}
 12120  		_ = v_0.Args[1]
 12121  		v_0_0 := v_0.Args[0]
 12122  		if v_0_0.Op != OpSub16 {
 12123  			break
 12124  		}
 12125  		_ = v_0_0.Args[1]
 12126  		v_0_0_0 := v_0_0.Args[0]
 12127  		if v_0_0_0.Op != OpRsh32x64 {
 12128  			break
 12129  		}
 12130  		_ = v_0_0_0.Args[1]
 12131  		mul := v_0_0_0.Args[0]
 12132  		if mul.Op != OpMul32 {
 12133  			break
 12134  		}
 12135  		_ = mul.Args[1]
 12136  		mul_0 := mul.Args[0]
 12137  		if mul_0.Op != OpConst32 {
 12138  			break
 12139  		}
 12140  		m := mul_0.AuxInt
 12141  		mul_1 := mul.Args[1]
 12142  		if mul_1.Op != OpSignExt16to32 {
 12143  			break
 12144  		}
 12145  		if x != mul_1.Args[0] {
 12146  			break
 12147  		}
 12148  		v_0_0_0_1 := v_0_0_0.Args[1]
 12149  		if v_0_0_0_1.Op != OpConst64 {
 12150  			break
 12151  		}
 12152  		s := v_0_0_0_1.AuxInt
 12153  		v_0_0_1 := v_0_0.Args[1]
 12154  		if v_0_0_1.Op != OpRsh32x64 {
 12155  			break
 12156  		}
 12157  		_ = v_0_0_1.Args[1]
 12158  		v_0_0_1_0 := v_0_0_1.Args[0]
 12159  		if v_0_0_1_0.Op != OpSignExt16to32 {
 12160  			break
 12161  		}
 12162  		if x != v_0_0_1_0.Args[0] {
 12163  			break
 12164  		}
 12165  		v_0_0_1_1 := v_0_0_1.Args[1]
 12166  		if v_0_0_1_1.Op != OpConst64 {
 12167  			break
 12168  		}
 12169  		if v_0_0_1_1.AuxInt != 31 {
 12170  			break
 12171  		}
 12172  		v_0_1 := v_0.Args[1]
 12173  		if v_0_1.Op != OpConst16 {
 12174  			break
 12175  		}
 12176  		c := v_0_1.AuxInt
 12177  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 12178  			break
 12179  		}
 12180  		v.reset(OpLeq16U)
 12181  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 12182  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 12183  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 12184  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12185  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 12186  		v2.AddArg(v3)
 12187  		v2.AddArg(x)
 12188  		v1.AddArg(v2)
 12189  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12190  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 12191  		v1.AddArg(v4)
 12192  		v0.AddArg(v1)
 12193  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12194  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 12195  		v0.AddArg(v5)
 12196  		v.AddArg(v0)
 12197  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12198  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 12199  		v.AddArg(v6)
 12200  		return true
 12201  	}
 12202  	return false
 12203  }
 12204  func rewriteValuegeneric_OpEq16_50(v *Value) bool {
 12205  	b := v.Block
 12206  	typ := &b.Func.Config.Types
 12207  	// match: (Eq16 (Mul16 (Sub16 (Rsh32x64 mul:(Mul32 (SignExt16to32 x) (Const32 [m])) (Const64 [s])) (Rsh32x64 (SignExt16to32 x) (Const64 [31]))) (Const16 [c])) x)
 12208  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16,c).m) && s == 16+smagic(16,c).s && x.Op != OpConst16 && sdivisibleOK(16,c)
 12209  	// result: (Leq16U (RotateLeft16 <typ.UInt16> (Add16 <typ.UInt16> (Mul16 <typ.UInt16> (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).m))]) x) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).a))]) ) (Const16 <typ.UInt16> [int64(16-sdivisible(16,c).k)]) ) (Const16 <typ.UInt16> [int64(int16(sdivisible(16,c).max))]) )
 12210  	for {
 12211  		x := v.Args[1]
 12212  		v_0 := v.Args[0]
 12213  		if v_0.Op != OpMul16 {
 12214  			break
 12215  		}
 12216  		_ = v_0.Args[1]
 12217  		v_0_0 := v_0.Args[0]
 12218  		if v_0_0.Op != OpSub16 {
 12219  			break
 12220  		}
 12221  		_ = v_0_0.Args[1]
 12222  		v_0_0_0 := v_0_0.Args[0]
 12223  		if v_0_0_0.Op != OpRsh32x64 {
 12224  			break
 12225  		}
 12226  		_ = v_0_0_0.Args[1]
 12227  		mul := v_0_0_0.Args[0]
 12228  		if mul.Op != OpMul32 {
 12229  			break
 12230  		}
 12231  		_ = mul.Args[1]
 12232  		mul_0 := mul.Args[0]
 12233  		if mul_0.Op != OpSignExt16to32 {
 12234  			break
 12235  		}
 12236  		if x != mul_0.Args[0] {
 12237  			break
 12238  		}
 12239  		mul_1 := mul.Args[1]
 12240  		if mul_1.Op != OpConst32 {
 12241  			break
 12242  		}
 12243  		m := mul_1.AuxInt
 12244  		v_0_0_0_1 := v_0_0_0.Args[1]
 12245  		if v_0_0_0_1.Op != OpConst64 {
 12246  			break
 12247  		}
 12248  		s := v_0_0_0_1.AuxInt
 12249  		v_0_0_1 := v_0_0.Args[1]
 12250  		if v_0_0_1.Op != OpRsh32x64 {
 12251  			break
 12252  		}
 12253  		_ = v_0_0_1.Args[1]
 12254  		v_0_0_1_0 := v_0_0_1.Args[0]
 12255  		if v_0_0_1_0.Op != OpSignExt16to32 {
 12256  			break
 12257  		}
 12258  		if x != v_0_0_1_0.Args[0] {
 12259  			break
 12260  		}
 12261  		v_0_0_1_1 := v_0_0_1.Args[1]
 12262  		if v_0_0_1_1.Op != OpConst64 {
 12263  			break
 12264  		}
 12265  		if v_0_0_1_1.AuxInt != 31 {
 12266  			break
 12267  		}
 12268  		v_0_1 := v_0.Args[1]
 12269  		if v_0_1.Op != OpConst16 {
 12270  			break
 12271  		}
 12272  		c := v_0_1.AuxInt
 12273  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(smagic(16, c).m) && s == 16+smagic(16, c).s && x.Op != OpConst16 && sdivisibleOK(16, c)) {
 12274  			break
 12275  		}
 12276  		v.reset(OpLeq16U)
 12277  		v0 := b.NewValue0(v.Pos, OpRotateLeft16, typ.UInt16)
 12278  		v1 := b.NewValue0(v.Pos, OpAdd16, typ.UInt16)
 12279  		v2 := b.NewValue0(v.Pos, OpMul16, typ.UInt16)
 12280  		v3 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12281  		v3.AuxInt = int64(int16(sdivisible(16, c).m))
 12282  		v2.AddArg(v3)
 12283  		v2.AddArg(x)
 12284  		v1.AddArg(v2)
 12285  		v4 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12286  		v4.AuxInt = int64(int16(sdivisible(16, c).a))
 12287  		v1.AddArg(v4)
 12288  		v0.AddArg(v1)
 12289  		v5 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12290  		v5.AuxInt = int64(16 - sdivisible(16, c).k)
 12291  		v0.AddArg(v5)
 12292  		v.AddArg(v0)
 12293  		v6 := b.NewValue0(v.Pos, OpConst16, typ.UInt16)
 12294  		v6.AuxInt = int64(int16(sdivisible(16, c).max))
 12295  		v.AddArg(v6)
 12296  		return true
 12297  	}
 12298  	// match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
 12299  	// cond: k > 0 && k < 15 && kbar == 16 - k
 12300  	// result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
 12301  	for {
 12302  		_ = v.Args[1]
 12303  		n := v.Args[0]
 12304  		v_1 := v.Args[1]
 12305  		if v_1.Op != OpLsh16x64 {
 12306  			break
 12307  		}
 12308  		_ = v_1.Args[1]
 12309  		v_1_0 := v_1.Args[0]
 12310  		if v_1_0.Op != OpRsh16x64 {
 12311  			break
 12312  		}
 12313  		_ = v_1_0.Args[1]
 12314  		v_1_0_0 := v_1_0.Args[0]
 12315  		if v_1_0_0.Op != OpAdd16 {
 12316  			break
 12317  		}
 12318  		t := v_1_0_0.Type
 12319  		_ = v_1_0_0.Args[1]
 12320  		if n != v_1_0_0.Args[0] {
 12321  			break
 12322  		}
 12323  		v_1_0_0_1 := v_1_0_0.Args[1]
 12324  		if v_1_0_0_1.Op != OpRsh16Ux64 {
 12325  			break
 12326  		}
 12327  		if v_1_0_0_1.Type != t {
 12328  			break
 12329  		}
 12330  		_ = v_1_0_0_1.Args[1]
 12331  		v_1_0_0_1_0 := v_1_0_0_1.Args[0]
 12332  		if v_1_0_0_1_0.Op != OpRsh16x64 {
 12333  			break
 12334  		}
 12335  		if v_1_0_0_1_0.Type != t {
 12336  			break
 12337  		}
 12338  		_ = v_1_0_0_1_0.Args[1]
 12339  		if n != v_1_0_0_1_0.Args[0] {
 12340  			break
 12341  		}
 12342  		v_1_0_0_1_0_1 := v_1_0_0_1_0.Args[1]
 12343  		if v_1_0_0_1_0_1.Op != OpConst64 {
 12344  			break
 12345  		}
 12346  		if v_1_0_0_1_0_1.Type != typ.UInt64 {
 12347  			break
 12348  		}
 12349  		if v_1_0_0_1_0_1.AuxInt != 15 {
 12350  			break
 12351  		}
 12352  		v_1_0_0_1_1 := v_1_0_0_1.Args[1]
 12353  		if v_1_0_0_1_1.Op != OpConst64 {
 12354  			break
 12355  		}
 12356  		if v_1_0_0_1_1.Type != typ.UInt64 {
 12357  			break
 12358  		}
 12359  		kbar := v_1_0_0_1_1.AuxInt
 12360  		v_1_0_1 := v_1_0.Args[1]
 12361  		if v_1_0_1.Op != OpConst64 {
 12362  			break
 12363  		}
 12364  		if v_1_0_1.Type != typ.UInt64 {
 12365  			break
 12366  		}
 12367  		k := v_1_0_1.AuxInt
 12368  		v_1_1 := v_1.Args[1]
 12369  		if v_1_1.Op != OpConst64 {
 12370  			break
 12371  		}
 12372  		if v_1_1.Type != typ.UInt64 {
 12373  			break
 12374  		}
 12375  		if v_1_1.AuxInt != k {
 12376  			break
 12377  		}
 12378  		if !(k > 0 && k < 15 && kbar == 16-k) {
 12379  			break
 12380  		}
 12381  		v.reset(OpEq16)
 12382  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
 12383  		v0.AddArg(n)
 12384  		v1 := b.NewValue0(v.Pos, OpConst16, t)
 12385  		v1.AuxInt = int64(1<<uint(k) - 1)
 12386  		v0.AddArg(v1)
 12387  		v.AddArg(v0)
 12388  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 12389  		v2.AuxInt = 0
 12390  		v.AddArg(v2)
 12391  		return true
 12392  	}
 12393  	// match: (Eq16 n (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])))
 12394  	// cond: k > 0 && k < 15 && kbar == 16 - k
 12395  	// result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
 12396  	for {
 12397  		_ = v.Args[1]
 12398  		n := v.Args[0]
 12399  		v_1 := v.Args[1]
 12400  		if v_1.Op != OpLsh16x64 {
 12401  			break
 12402  		}
 12403  		_ = v_1.Args[1]
 12404  		v_1_0 := v_1.Args[0]
 12405  		if v_1_0.Op != OpRsh16x64 {
 12406  			break
 12407  		}
 12408  		_ = v_1_0.Args[1]
 12409  		v_1_0_0 := v_1_0.Args[0]
 12410  		if v_1_0_0.Op != OpAdd16 {
 12411  			break
 12412  		}
 12413  		t := v_1_0_0.Type
 12414  		_ = v_1_0_0.Args[1]
 12415  		v_1_0_0_0 := v_1_0_0.Args[0]
 12416  		if v_1_0_0_0.Op != OpRsh16Ux64 {
 12417  			break
 12418  		}
 12419  		if v_1_0_0_0.Type != t {
 12420  			break
 12421  		}
 12422  		_ = v_1_0_0_0.Args[1]
 12423  		v_1_0_0_0_0 := v_1_0_0_0.Args[0]
 12424  		if v_1_0_0_0_0.Op != OpRsh16x64 {
 12425  			break
 12426  		}
 12427  		if v_1_0_0_0_0.Type != t {
 12428  			break
 12429  		}
 12430  		_ = v_1_0_0_0_0.Args[1]
 12431  		if n != v_1_0_0_0_0.Args[0] {
 12432  			break
 12433  		}
 12434  		v_1_0_0_0_0_1 := v_1_0_0_0_0.Args[1]
 12435  		if v_1_0_0_0_0_1.Op != OpConst64 {
 12436  			break
 12437  		}
 12438  		if v_1_0_0_0_0_1.Type != typ.UInt64 {
 12439  			break
 12440  		}
 12441  		if v_1_0_0_0_0_1.AuxInt != 15 {
 12442  			break
 12443  		}
 12444  		v_1_0_0_0_1 := v_1_0_0_0.Args[1]
 12445  		if v_1_0_0_0_1.Op != OpConst64 {
 12446  			break
 12447  		}
 12448  		if v_1_0_0_0_1.Type != typ.UInt64 {
 12449  			break
 12450  		}
 12451  		kbar := v_1_0_0_0_1.AuxInt
 12452  		if n != v_1_0_0.Args[1] {
 12453  			break
 12454  		}
 12455  		v_1_0_1 := v_1_0.Args[1]
 12456  		if v_1_0_1.Op != OpConst64 {
 12457  			break
 12458  		}
 12459  		if v_1_0_1.Type != typ.UInt64 {
 12460  			break
 12461  		}
 12462  		k := v_1_0_1.AuxInt
 12463  		v_1_1 := v_1.Args[1]
 12464  		if v_1_1.Op != OpConst64 {
 12465  			break
 12466  		}
 12467  		if v_1_1.Type != typ.UInt64 {
 12468  			break
 12469  		}
 12470  		if v_1_1.AuxInt != k {
 12471  			break
 12472  		}
 12473  		if !(k > 0 && k < 15 && kbar == 16-k) {
 12474  			break
 12475  		}
 12476  		v.reset(OpEq16)
 12477  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
 12478  		v0.AddArg(n)
 12479  		v1 := b.NewValue0(v.Pos, OpConst16, t)
 12480  		v1.AuxInt = int64(1<<uint(k) - 1)
 12481  		v0.AddArg(v1)
 12482  		v.AddArg(v0)
 12483  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 12484  		v2.AuxInt = 0
 12485  		v.AddArg(v2)
 12486  		return true
 12487  	}
 12488  	// match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> n (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar]))) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
 12489  	// cond: k > 0 && k < 15 && kbar == 16 - k
 12490  	// result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
 12491  	for {
 12492  		n := v.Args[1]
 12493  		v_0 := v.Args[0]
 12494  		if v_0.Op != OpLsh16x64 {
 12495  			break
 12496  		}
 12497  		_ = v_0.Args[1]
 12498  		v_0_0 := v_0.Args[0]
 12499  		if v_0_0.Op != OpRsh16x64 {
 12500  			break
 12501  		}
 12502  		_ = v_0_0.Args[1]
 12503  		v_0_0_0 := v_0_0.Args[0]
 12504  		if v_0_0_0.Op != OpAdd16 {
 12505  			break
 12506  		}
 12507  		t := v_0_0_0.Type
 12508  		_ = v_0_0_0.Args[1]
 12509  		if n != v_0_0_0.Args[0] {
 12510  			break
 12511  		}
 12512  		v_0_0_0_1 := v_0_0_0.Args[1]
 12513  		if v_0_0_0_1.Op != OpRsh16Ux64 {
 12514  			break
 12515  		}
 12516  		if v_0_0_0_1.Type != t {
 12517  			break
 12518  		}
 12519  		_ = v_0_0_0_1.Args[1]
 12520  		v_0_0_0_1_0 := v_0_0_0_1.Args[0]
 12521  		if v_0_0_0_1_0.Op != OpRsh16x64 {
 12522  			break
 12523  		}
 12524  		if v_0_0_0_1_0.Type != t {
 12525  			break
 12526  		}
 12527  		_ = v_0_0_0_1_0.Args[1]
 12528  		if n != v_0_0_0_1_0.Args[0] {
 12529  			break
 12530  		}
 12531  		v_0_0_0_1_0_1 := v_0_0_0_1_0.Args[1]
 12532  		if v_0_0_0_1_0_1.Op != OpConst64 {
 12533  			break
 12534  		}
 12535  		if v_0_0_0_1_0_1.Type != typ.UInt64 {
 12536  			break
 12537  		}
 12538  		if v_0_0_0_1_0_1.AuxInt != 15 {
 12539  			break
 12540  		}
 12541  		v_0_0_0_1_1 := v_0_0_0_1.Args[1]
 12542  		if v_0_0_0_1_1.Op != OpConst64 {
 12543  			break
 12544  		}
 12545  		if v_0_0_0_1_1.Type != typ.UInt64 {
 12546  			break
 12547  		}
 12548  		kbar := v_0_0_0_1_1.AuxInt
 12549  		v_0_0_1 := v_0_0.Args[1]
 12550  		if v_0_0_1.Op != OpConst64 {
 12551  			break
 12552  		}
 12553  		if v_0_0_1.Type != typ.UInt64 {
 12554  			break
 12555  		}
 12556  		k := v_0_0_1.AuxInt
 12557  		v_0_1 := v_0.Args[1]
 12558  		if v_0_1.Op != OpConst64 {
 12559  			break
 12560  		}
 12561  		if v_0_1.Type != typ.UInt64 {
 12562  			break
 12563  		}
 12564  		if v_0_1.AuxInt != k {
 12565  			break
 12566  		}
 12567  		if !(k > 0 && k < 15 && kbar == 16-k) {
 12568  			break
 12569  		}
 12570  		v.reset(OpEq16)
 12571  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
 12572  		v0.AddArg(n)
 12573  		v1 := b.NewValue0(v.Pos, OpConst16, t)
 12574  		v1.AuxInt = int64(1<<uint(k) - 1)
 12575  		v0.AddArg(v1)
 12576  		v.AddArg(v0)
 12577  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 12578  		v2.AuxInt = 0
 12579  		v.AddArg(v2)
 12580  		return true
 12581  	}
 12582  	// match: (Eq16 (Lsh16x64 (Rsh16x64 (Add16 <t> (Rsh16Ux64 <t> (Rsh16x64 <t> n (Const64 <typ.UInt64> [15])) (Const64 <typ.UInt64> [kbar])) n) (Const64 <typ.UInt64> [k])) (Const64 <typ.UInt64> [k])) n)
 12583  	// cond: k > 0 && k < 15 && kbar == 16 - k
 12584  	// result: (Eq16 (And16 <t> n (Const16 <t> [int64(1<<uint(k)-1)])) (Const16 <t> [0]))
 12585  	for {
 12586  		n := v.Args[1]
 12587  		v_0 := v.Args[0]
 12588  		if v_0.Op != OpLsh16x64 {
 12589  			break
 12590  		}
 12591  		_ = v_0.Args[1]
 12592  		v_0_0 := v_0.Args[0]
 12593  		if v_0_0.Op != OpRsh16x64 {
 12594  			break
 12595  		}
 12596  		_ = v_0_0.Args[1]
 12597  		v_0_0_0 := v_0_0.Args[0]
 12598  		if v_0_0_0.Op != OpAdd16 {
 12599  			break
 12600  		}
 12601  		t := v_0_0_0.Type
 12602  		_ = v_0_0_0.Args[1]
 12603  		v_0_0_0_0 := v_0_0_0.Args[0]
 12604  		if v_0_0_0_0.Op != OpRsh16Ux64 {
 12605  			break
 12606  		}
 12607  		if v_0_0_0_0.Type != t {
 12608  			break
 12609  		}
 12610  		_ = v_0_0_0_0.Args[1]
 12611  		v_0_0_0_0_0 := v_0_0_0_0.Args[0]
 12612  		if v_0_0_0_0_0.Op != OpRsh16x64 {
 12613  			break
 12614  		}
 12615  		if v_0_0_0_0_0.Type != t {
 12616  			break
 12617  		}
 12618  		_ = v_0_0_0_0_0.Args[1]
 12619  		if n != v_0_0_0_0_0.Args[0] {
 12620  			break
 12621  		}
 12622  		v_0_0_0_0_0_1 := v_0_0_0_0_0.Args[1]
 12623  		if v_0_0_0_0_0_1.Op != OpConst64 {
 12624  			break
 12625  		}
 12626  		if v_0_0_0_0_0_1.Type != typ.UInt64 {
 12627  			break
 12628  		}
 12629  		if v_0_0_0_0_0_1.AuxInt != 15 {
 12630  			break
 12631  		}
 12632  		v_0_0_0_0_1 := v_0_0_0_0.Args[1]
 12633  		if v_0_0_0_0_1.Op != OpConst64 {
 12634  			break
 12635  		}
 12636  		if v_0_0_0_0_1.Type != typ.UInt64 {
 12637  			break
 12638  		}
 12639  		kbar := v_0_0_0_0_1.AuxInt
 12640  		if n != v_0_0_0.Args[1] {
 12641  			break
 12642  		}
 12643  		v_0_0_1 := v_0_0.Args[1]
 12644  		if v_0_0_1.Op != OpConst64 {
 12645  			break
 12646  		}
 12647  		if v_0_0_1.Type != typ.UInt64 {
 12648  			break
 12649  		}
 12650  		k := v_0_0_1.AuxInt
 12651  		v_0_1 := v_0.Args[1]
 12652  		if v_0_1.Op != OpConst64 {
 12653  			break
 12654  		}
 12655  		if v_0_1.Type != typ.UInt64 {
 12656  			break
 12657  		}
 12658  		if v_0_1.AuxInt != k {
 12659  			break
 12660  		}
 12661  		if !(k > 0 && k < 15 && kbar == 16-k) {
 12662  			break
 12663  		}
 12664  		v.reset(OpEq16)
 12665  		v0 := b.NewValue0(v.Pos, OpAnd16, t)
 12666  		v0.AddArg(n)
 12667  		v1 := b.NewValue0(v.Pos, OpConst16, t)
 12668  		v1.AuxInt = int64(1<<uint(k) - 1)
 12669  		v0.AddArg(v1)
 12670  		v.AddArg(v0)
 12671  		v2 := b.NewValue0(v.Pos, OpConst16, t)
 12672  		v2.AuxInt = 0
 12673  		v.AddArg(v2)
 12674  		return true
 12675  	}
 12676  	// match: (Eq16 s:(Sub16 x y) (Const16 [0]))
 12677  	// cond: s.Uses == 1
 12678  	// result: (Eq16 x y)
 12679  	for {
 12680  		_ = v.Args[1]
 12681  		s := v.Args[0]
 12682  		if s.Op != OpSub16 {
 12683  			break
 12684  		}
 12685  		y := s.Args[1]
 12686  		x := s.Args[0]
 12687  		v_1 := v.Args[1]
 12688  		if v_1.Op != OpConst16 {
 12689  			break
 12690  		}
 12691  		if v_1.AuxInt != 0 {
 12692  			break
 12693  		}
 12694  		if !(s.Uses == 1) {
 12695  			break
 12696  		}
 12697  		v.reset(OpEq16)
 12698  		v.AddArg(x)
 12699  		v.AddArg(y)
 12700  		return true
 12701  	}
 12702  	// match: (Eq16 (Const16 [0]) s:(Sub16 x y))
 12703  	// cond: s.Uses == 1
 12704  	// result: (Eq16 x y)
 12705  	for {
 12706  		_ = v.Args[1]
 12707  		v_0 := v.Args[0]
 12708  		if v_0.Op != OpConst16 {
 12709  			break
 12710  		}
 12711  		if v_0.AuxInt != 0 {
 12712  			break
 12713  		}
 12714  		s := v.Args[1]
 12715  		if s.Op != OpSub16 {
 12716  			break
 12717  		}
 12718  		y := s.Args[1]
 12719  		x := s.Args[0]
 12720  		if !(s.Uses == 1) {
 12721  			break
 12722  		}
 12723  		v.reset(OpEq16)
 12724  		v.AddArg(x)
 12725  		v.AddArg(y)
 12726  		return true
 12727  	}
 12728  	return false
 12729  }
 12730  func rewriteValuegeneric_OpEq32_0(v *Value) bool {
 12731  	b := v.Block
 12732  	typ := &b.Func.Config.Types
 12733  	// match: (Eq32 x x)
 12734  	// cond:
 12735  	// result: (ConstBool [1])
 12736  	for {
 12737  		x := v.Args[1]
 12738  		if x != v.Args[0] {
 12739  			break
 12740  		}
 12741  		v.reset(OpConstBool)
 12742  		v.AuxInt = 1
 12743  		return true
 12744  	}
 12745  	// match: (Eq32 (Const32 <t> [c]) (Add32 (Const32 <t> [d]) x))
 12746  	// cond:
 12747  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
 12748  	for {
 12749  		_ = v.Args[1]
 12750  		v_0 := v.Args[0]
 12751  		if v_0.Op != OpConst32 {
 12752  			break
 12753  		}
 12754  		t := v_0.Type
 12755  		c := v_0.AuxInt
 12756  		v_1 := v.Args[1]
 12757  		if v_1.Op != OpAdd32 {
 12758  			break
 12759  		}
 12760  		x := v_1.Args[1]
 12761  		v_1_0 := v_1.Args[0]
 12762  		if v_1_0.Op != OpConst32 {
 12763  			break
 12764  		}
 12765  		if v_1_0.Type != t {
 12766  			break
 12767  		}
 12768  		d := v_1_0.AuxInt
 12769  		v.reset(OpEq32)
 12770  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12771  		v0.AuxInt = int64(int32(c - d))
 12772  		v.AddArg(v0)
 12773  		v.AddArg(x)
 12774  		return true
 12775  	}
 12776  	// match: (Eq32 (Const32 <t> [c]) (Add32 x (Const32 <t> [d])))
 12777  	// cond:
 12778  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
 12779  	for {
 12780  		_ = v.Args[1]
 12781  		v_0 := v.Args[0]
 12782  		if v_0.Op != OpConst32 {
 12783  			break
 12784  		}
 12785  		t := v_0.Type
 12786  		c := v_0.AuxInt
 12787  		v_1 := v.Args[1]
 12788  		if v_1.Op != OpAdd32 {
 12789  			break
 12790  		}
 12791  		_ = v_1.Args[1]
 12792  		x := v_1.Args[0]
 12793  		v_1_1 := v_1.Args[1]
 12794  		if v_1_1.Op != OpConst32 {
 12795  			break
 12796  		}
 12797  		if v_1_1.Type != t {
 12798  			break
 12799  		}
 12800  		d := v_1_1.AuxInt
 12801  		v.reset(OpEq32)
 12802  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12803  		v0.AuxInt = int64(int32(c - d))
 12804  		v.AddArg(v0)
 12805  		v.AddArg(x)
 12806  		return true
 12807  	}
 12808  	// match: (Eq32 (Add32 (Const32 <t> [d]) x) (Const32 <t> [c]))
 12809  	// cond:
 12810  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
 12811  	for {
 12812  		_ = v.Args[1]
 12813  		v_0 := v.Args[0]
 12814  		if v_0.Op != OpAdd32 {
 12815  			break
 12816  		}
 12817  		x := v_0.Args[1]
 12818  		v_0_0 := v_0.Args[0]
 12819  		if v_0_0.Op != OpConst32 {
 12820  			break
 12821  		}
 12822  		t := v_0_0.Type
 12823  		d := v_0_0.AuxInt
 12824  		v_1 := v.Args[1]
 12825  		if v_1.Op != OpConst32 {
 12826  			break
 12827  		}
 12828  		if v_1.Type != t {
 12829  			break
 12830  		}
 12831  		c := v_1.AuxInt
 12832  		v.reset(OpEq32)
 12833  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12834  		v0.AuxInt = int64(int32(c - d))
 12835  		v.AddArg(v0)
 12836  		v.AddArg(x)
 12837  		return true
 12838  	}
 12839  	// match: (Eq32 (Add32 x (Const32 <t> [d])) (Const32 <t> [c]))
 12840  	// cond:
 12841  	// result: (Eq32 (Const32 <t> [int64(int32(c-d))]) x)
 12842  	for {
 12843  		_ = v.Args[1]
 12844  		v_0 := v.Args[0]
 12845  		if v_0.Op != OpAdd32 {
 12846  			break
 12847  		}
 12848  		_ = v_0.Args[1]
 12849  		x := v_0.Args[0]
 12850  		v_0_1 := v_0.Args[1]
 12851  		if v_0_1.Op != OpConst32 {
 12852  			break
 12853  		}
 12854  		t := v_0_1.Type
 12855  		d := v_0_1.AuxInt
 12856  		v_1 := v.Args[1]
 12857  		if v_1.Op != OpConst32 {
 12858  			break
 12859  		}
 12860  		if v_1.Type != t {
 12861  			break
 12862  		}
 12863  		c := v_1.AuxInt
 12864  		v.reset(OpEq32)
 12865  		v0 := b.NewValue0(v.Pos, OpConst32, t)
 12866  		v0.AuxInt = int64(int32(c - d))
 12867  		v.AddArg(v0)
 12868  		v.AddArg(x)
 12869  		return true
 12870  	}
 12871  	// match: (Eq32 (Const32 [c]) (Const32 [d]))
 12872  	// cond:
 12873  	// result: (ConstBool [b2i(c == d)])
 12874  	for {
 12875  		_ = v.Args[1]
 12876  		v_0 := v.Args[0]
 12877  		if v_0.Op != OpConst32 {
 12878  			break
 12879  		}
 12880  		c := v_0.AuxInt
 12881  		v_1 := v.Args[1]
 12882  		if v_1.Op != OpConst32 {
 12883  			break
 12884  		}
 12885  		d := v_1.AuxInt
 12886  		v.reset(OpConstBool)
 12887  		v.AuxInt = b2i(c == d)
 12888  		return true
 12889  	}
 12890  	// match: (Eq32 (Const32 [d]) (Const32 [c]))
 12891  	// cond:
 12892  	// result: (ConstBool [b2i(c == d)])
 12893  	for {
 12894  		_ = v.Args[1]
 12895  		v_0 := v.Args[0]
 12896  		if v_0.Op != OpConst32 {
 12897  			break
 12898  		}
 12899  		d := v_0.AuxInt
 12900  		v_1 := v.Args[1]
 12901  		if v_1.Op != OpConst32 {
 12902  			break
 12903  		}
 12904  		c := v_1.AuxInt
 12905  		v.reset(OpConstBool)
 12906  		v.AuxInt = b2i(c == d)
 12907  		return true
 12908  	}
 12909  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))))
 12910  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
 12911  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
 12912  	for {
 12913  		_ = v.Args[1]
 12914  		x := v.Args[0]
 12915  		v_1 := v.Args[1]
 12916  		if v_1.Op != OpMul32 {
 12917  			break
 12918  		}
 12919  		_ = v_1.Args[1]
 12920  		v_1_0 := v_1.Args[0]
 12921  		if v_1_0.Op != OpConst32 {
 12922  			break
 12923  		}
 12924  		c := v_1_0.AuxInt
 12925  		v_1_1 := v_1.Args[1]
 12926  		if v_1_1.Op != OpRsh32Ux64 {
 12927  			break
 12928  		}
 12929  		_ = v_1_1.Args[1]
 12930  		mul := v_1_1.Args[0]
 12931  		if mul.Op != OpHmul32u {
 12932  			break
 12933  		}
 12934  		_ = mul.Args[1]
 12935  		mul_0 := mul.Args[0]
 12936  		if mul_0.Op != OpConst32 {
 12937  			break
 12938  		}
 12939  		m := mul_0.AuxInt
 12940  		if x != mul.Args[1] {
 12941  			break
 12942  		}
 12943  		v_1_1_1 := v_1_1.Args[1]
 12944  		if v_1_1_1.Op != OpConst64 {
 12945  			break
 12946  		}
 12947  		s := v_1_1_1.AuxInt
 12948  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
 12949  			break
 12950  		}
 12951  		v.reset(OpLeq32U)
 12952  		v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
 12953  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
 12954  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 12955  		v2.AuxInt = int64(int32(udivisible(32, c).m))
 12956  		v1.AddArg(v2)
 12957  		v1.AddArg(x)
 12958  		v0.AddArg(v1)
 12959  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 12960  		v3.AuxInt = int64(32 - udivisible(32, c).k)
 12961  		v0.AddArg(v3)
 12962  		v.AddArg(v0)
 12963  		v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 12964  		v4.AuxInt = int64(int32(udivisible(32, c).max))
 12965  		v.AddArg(v4)
 12966  		return true
 12967  	}
 12968  	// match: (Eq32 x (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s]))))
 12969  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
 12970  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
 12971  	for {
 12972  		_ = v.Args[1]
 12973  		x := v.Args[0]
 12974  		v_1 := v.Args[1]
 12975  		if v_1.Op != OpMul32 {
 12976  			break
 12977  		}
 12978  		_ = v_1.Args[1]
 12979  		v_1_0 := v_1.Args[0]
 12980  		if v_1_0.Op != OpConst32 {
 12981  			break
 12982  		}
 12983  		c := v_1_0.AuxInt
 12984  		v_1_1 := v_1.Args[1]
 12985  		if v_1_1.Op != OpRsh32Ux64 {
 12986  			break
 12987  		}
 12988  		_ = v_1_1.Args[1]
 12989  		mul := v_1_1.Args[0]
 12990  		if mul.Op != OpHmul32u {
 12991  			break
 12992  		}
 12993  		_ = mul.Args[1]
 12994  		if x != mul.Args[0] {
 12995  			break
 12996  		}
 12997  		mul_1 := mul.Args[1]
 12998  		if mul_1.Op != OpConst32 {
 12999  			break
 13000  		}
 13001  		m := mul_1.AuxInt
 13002  		v_1_1_1 := v_1_1.Args[1]
 13003  		if v_1_1_1.Op != OpConst64 {
 13004  			break
 13005  		}
 13006  		s := v_1_1_1.AuxInt
 13007  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
 13008  			break
 13009  		}
 13010  		v.reset(OpLeq32U)
 13011  		v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
 13012  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
 13013  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13014  		v2.AuxInt = int64(int32(udivisible(32, c).m))
 13015  		v1.AddArg(v2)
 13016  		v1.AddArg(x)
 13017  		v0.AddArg(v1)
 13018  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13019  		v3.AuxInt = int64(32 - udivisible(32, c).k)
 13020  		v0.AddArg(v3)
 13021  		v.AddArg(v0)
 13022  		v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13023  		v4.AuxInt = int64(int32(udivisible(32, c).max))
 13024  		v.AddArg(v4)
 13025  		return true
 13026  	}
 13027  	// match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s])) (Const32 [c])))
 13028  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
 13029  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
 13030  	for {
 13031  		_ = v.Args[1]
 13032  		x := v.Args[0]
 13033  		v_1 := v.Args[1]
 13034  		if v_1.Op != OpMul32 {
 13035  			break
 13036  		}
 13037  		_ = v_1.Args[1]
 13038  		v_1_0 := v_1.Args[0]
 13039  		if v_1_0.Op != OpRsh32Ux64 {
 13040  			break
 13041  		}
 13042  		_ = v_1_0.Args[1]
 13043  		mul := v_1_0.Args[0]
 13044  		if mul.Op != OpHmul32u {
 13045  			break
 13046  		}
 13047  		_ = mul.Args[1]
 13048  		mul_0 := mul.Args[0]
 13049  		if mul_0.Op != OpConst32 {
 13050  			break
 13051  		}
 13052  		m := mul_0.AuxInt
 13053  		if x != mul.Args[1] {
 13054  			break
 13055  		}
 13056  		v_1_0_1 := v_1_0.Args[1]
 13057  		if v_1_0_1.Op != OpConst64 {
 13058  			break
 13059  		}
 13060  		s := v_1_0_1.AuxInt
 13061  		v_1_1 := v_1.Args[1]
 13062  		if v_1_1.Op != OpConst32 {
 13063  			break
 13064  		}
 13065  		c := v_1_1.AuxInt
 13066  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
 13067  			break
 13068  		}
 13069  		v.reset(OpLeq32U)
 13070  		v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
 13071  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
 13072  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13073  		v2.AuxInt = int64(int32(udivisible(32, c).m))
 13074  		v1.AddArg(v2)
 13075  		v1.AddArg(x)
 13076  		v0.AddArg(v1)
 13077  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13078  		v3.AuxInt = int64(32 - udivisible(32, c).k)
 13079  		v0.AddArg(v3)
 13080  		v.AddArg(v0)
 13081  		v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13082  		v4.AuxInt = int64(int32(udivisible(32, c).max))
 13083  		v.AddArg(v4)
 13084  		return true
 13085  	}
 13086  	return false
 13087  }
 13088  func rewriteValuegeneric_OpEq32_10(v *Value) bool {
 13089  	b := v.Block
 13090  	typ := &b.Func.Config.Types
 13091  	// match: (Eq32 x (Mul32 (Rsh32Ux64 mul:(Hmul32u x (Const32 [m])) (Const64 [s])) (Const32 [c])))
 13092  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
 13093  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).m))]) x) (Const32 <typ.UInt32> [int64(32-udivisible(32,c).k)]) ) (Const32 <typ.UInt32> [int64(int32(udivisible(32,c).max))]) )
 13094  	for {
 13095  		_ = v.Args[1]
 13096  		x := v.Args[0]
 13097  		v_1 := v.Args[1]
 13098  		if v_1.Op != OpMul32 {
 13099  			break
 13100  		}
 13101  		_ = v_1.Args[1]
 13102  		v_1_0 := v_1.Args[0]
 13103  		if v_1_0.Op != OpRsh32Ux64 {
 13104  			break
 13105  		}
 13106  		_ = v_1_0.Args[1]
 13107  		mul := v_1_0.Args[0]
 13108  		if mul.Op != OpHmul32u {
 13109  			break
 13110  		}
 13111  		_ = mul.Args[1]
 13112  		if x != mul.Args[0] {
 13113  			break
 13114  		}
 13115  		mul_1 := mul.Args[1]
 13116  		if mul_1.Op != OpConst32 {
 13117  			break
 13118  		}
 13119  		m := mul_1.AuxInt
 13120  		v_1_0_1 := v_1_0.Args[1]
 13121  		if v_1_0_1.Op != OpConst64 {
 13122  			break
 13123  		}
 13124  		s := v_1_0_1.AuxInt
 13125  		v_1_1 := v_1.Args[1]
 13126  		if v_1_1.Op != OpConst32 {
 13127  			break
 13128  		}
 13129  		c := v_1_1.AuxInt
 13130  		if !(v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32, c).m/2)) && s == umagic(32, c).s-1 && x.Op != OpConst32 && udivisibleOK(32, c)) {
 13131  			break
 13132  		}
 13133  		v.reset(OpLeq32U)
 13134  		v0 := b.NewValue0(v.Pos, OpRotateLeft32, typ.UInt32)
 13135  		v1 := b.NewValue0(v.Pos, OpMul32, typ.UInt32)
 13136  		v2 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13137  		v2.AuxInt = int64(int32(udivisible(32, c).m))
 13138  		v1.AddArg(v2)
 13139  		v1.AddArg(x)
 13140  		v0.AddArg(v1)
 13141  		v3 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13142  		v3.AuxInt = int64(32 - udivisible(32, c).k)
 13143  		v0.AddArg(v3)
 13144  		v.AddArg(v0)
 13145  		v4 := b.NewValue0(v.Pos, OpConst32, typ.UInt32)
 13146  		v4.AuxInt = int64(int32(udivisible(32, c).max))
 13147  		v.AddArg(v4)
 13148  		return true
 13149  	}
 13150  	// match: (Eq32 (Mul32 (Const32 [c]) (Rsh32Ux64 mul:(Hmul32u (Const32 [m]) x) (Const64 [s]))) x)
 13151  	// cond: v.Block.Func.pass.name != "opt" && mul.Uses == 1 && m == int64(int32(1<<31+umagic(32,c).m/2)) && s == umagic(32,c).s-1 && x.Op != OpConst32 && udivisibleOK(32,c)
 13152  	// result: (Leq32U (RotateLeft32 <typ.UInt32> (Mul32 <typ.UInt32> (Const32 <typ.