...
Run Format

Source file src/cmd/compile/internal/ssa/rewriteARM64.go

Documentation: cmd/compile/internal/ssa

     1  // Code generated from gen/ARM64.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  import "fmt"
     7  import "math"
     8  import "cmd/internal/obj"
     9  import "cmd/internal/objabi"
    10  import "cmd/compile/internal/types"
    11  
    12  var _ = fmt.Println   // in case not otherwise used
    13  var _ = math.MinInt8  // in case not otherwise used
    14  var _ = obj.ANOP      // in case not otherwise used
    15  var _ = objabi.GOROOT // in case not otherwise used
    16  var _ = types.TypeMem // in case not otherwise used
    17  
    18  func rewriteValueARM64(v *Value) bool {
    19  	switch v.Op {
    20  	case OpARM64ADD:
    21  		return rewriteValueARM64_OpARM64ADD_0(v) || rewriteValueARM64_OpARM64ADD_10(v) || rewriteValueARM64_OpARM64ADD_20(v)
    22  	case OpARM64ADDconst:
    23  		return rewriteValueARM64_OpARM64ADDconst_0(v)
    24  	case OpARM64ADDshiftLL:
    25  		return rewriteValueARM64_OpARM64ADDshiftLL_0(v)
    26  	case OpARM64ADDshiftRA:
    27  		return rewriteValueARM64_OpARM64ADDshiftRA_0(v)
    28  	case OpARM64ADDshiftRL:
    29  		return rewriteValueARM64_OpARM64ADDshiftRL_0(v)
    30  	case OpARM64AND:
    31  		return rewriteValueARM64_OpARM64AND_0(v) || rewriteValueARM64_OpARM64AND_10(v)
    32  	case OpARM64ANDconst:
    33  		return rewriteValueARM64_OpARM64ANDconst_0(v)
    34  	case OpARM64ANDshiftLL:
    35  		return rewriteValueARM64_OpARM64ANDshiftLL_0(v)
    36  	case OpARM64ANDshiftRA:
    37  		return rewriteValueARM64_OpARM64ANDshiftRA_0(v)
    38  	case OpARM64ANDshiftRL:
    39  		return rewriteValueARM64_OpARM64ANDshiftRL_0(v)
    40  	case OpARM64BIC:
    41  		return rewriteValueARM64_OpARM64BIC_0(v)
    42  	case OpARM64BICshiftLL:
    43  		return rewriteValueARM64_OpARM64BICshiftLL_0(v)
    44  	case OpARM64BICshiftRA:
    45  		return rewriteValueARM64_OpARM64BICshiftRA_0(v)
    46  	case OpARM64BICshiftRL:
    47  		return rewriteValueARM64_OpARM64BICshiftRL_0(v)
    48  	case OpARM64CMN:
    49  		return rewriteValueARM64_OpARM64CMN_0(v)
    50  	case OpARM64CMNW:
    51  		return rewriteValueARM64_OpARM64CMNW_0(v)
    52  	case OpARM64CMNWconst:
    53  		return rewriteValueARM64_OpARM64CMNWconst_0(v)
    54  	case OpARM64CMNconst:
    55  		return rewriteValueARM64_OpARM64CMNconst_0(v)
    56  	case OpARM64CMNshiftLL:
    57  		return rewriteValueARM64_OpARM64CMNshiftLL_0(v)
    58  	case OpARM64CMNshiftRA:
    59  		return rewriteValueARM64_OpARM64CMNshiftRA_0(v)
    60  	case OpARM64CMNshiftRL:
    61  		return rewriteValueARM64_OpARM64CMNshiftRL_0(v)
    62  	case OpARM64CMP:
    63  		return rewriteValueARM64_OpARM64CMP_0(v)
    64  	case OpARM64CMPW:
    65  		return rewriteValueARM64_OpARM64CMPW_0(v)
    66  	case OpARM64CMPWconst:
    67  		return rewriteValueARM64_OpARM64CMPWconst_0(v)
    68  	case OpARM64CMPconst:
    69  		return rewriteValueARM64_OpARM64CMPconst_0(v)
    70  	case OpARM64CMPshiftLL:
    71  		return rewriteValueARM64_OpARM64CMPshiftLL_0(v)
    72  	case OpARM64CMPshiftRA:
    73  		return rewriteValueARM64_OpARM64CMPshiftRA_0(v)
    74  	case OpARM64CMPshiftRL:
    75  		return rewriteValueARM64_OpARM64CMPshiftRL_0(v)
    76  	case OpARM64CSEL:
    77  		return rewriteValueARM64_OpARM64CSEL_0(v)
    78  	case OpARM64CSEL0:
    79  		return rewriteValueARM64_OpARM64CSEL0_0(v)
    80  	case OpARM64DIV:
    81  		return rewriteValueARM64_OpARM64DIV_0(v)
    82  	case OpARM64DIVW:
    83  		return rewriteValueARM64_OpARM64DIVW_0(v)
    84  	case OpARM64EON:
    85  		return rewriteValueARM64_OpARM64EON_0(v)
    86  	case OpARM64EONshiftLL:
    87  		return rewriteValueARM64_OpARM64EONshiftLL_0(v)
    88  	case OpARM64EONshiftRA:
    89  		return rewriteValueARM64_OpARM64EONshiftRA_0(v)
    90  	case OpARM64EONshiftRL:
    91  		return rewriteValueARM64_OpARM64EONshiftRL_0(v)
    92  	case OpARM64Equal:
    93  		return rewriteValueARM64_OpARM64Equal_0(v)
    94  	case OpARM64FADDD:
    95  		return rewriteValueARM64_OpARM64FADDD_0(v)
    96  	case OpARM64FADDS:
    97  		return rewriteValueARM64_OpARM64FADDS_0(v)
    98  	case OpARM64FMOVDfpgp:
    99  		return rewriteValueARM64_OpARM64FMOVDfpgp_0(v)
   100  	case OpARM64FMOVDgpfp:
   101  		return rewriteValueARM64_OpARM64FMOVDgpfp_0(v)
   102  	case OpARM64FMOVDload:
   103  		return rewriteValueARM64_OpARM64FMOVDload_0(v)
   104  	case OpARM64FMOVDloadidx:
   105  		return rewriteValueARM64_OpARM64FMOVDloadidx_0(v)
   106  	case OpARM64FMOVDstore:
   107  		return rewriteValueARM64_OpARM64FMOVDstore_0(v)
   108  	case OpARM64FMOVDstoreidx:
   109  		return rewriteValueARM64_OpARM64FMOVDstoreidx_0(v)
   110  	case OpARM64FMOVSload:
   111  		return rewriteValueARM64_OpARM64FMOVSload_0(v)
   112  	case OpARM64FMOVSloadidx:
   113  		return rewriteValueARM64_OpARM64FMOVSloadidx_0(v)
   114  	case OpARM64FMOVSstore:
   115  		return rewriteValueARM64_OpARM64FMOVSstore_0(v)
   116  	case OpARM64FMOVSstoreidx:
   117  		return rewriteValueARM64_OpARM64FMOVSstoreidx_0(v)
   118  	case OpARM64FMULD:
   119  		return rewriteValueARM64_OpARM64FMULD_0(v)
   120  	case OpARM64FMULS:
   121  		return rewriteValueARM64_OpARM64FMULS_0(v)
   122  	case OpARM64FNEGD:
   123  		return rewriteValueARM64_OpARM64FNEGD_0(v)
   124  	case OpARM64FNEGS:
   125  		return rewriteValueARM64_OpARM64FNEGS_0(v)
   126  	case OpARM64FNMULD:
   127  		return rewriteValueARM64_OpARM64FNMULD_0(v)
   128  	case OpARM64FNMULS:
   129  		return rewriteValueARM64_OpARM64FNMULS_0(v)
   130  	case OpARM64FSUBD:
   131  		return rewriteValueARM64_OpARM64FSUBD_0(v)
   132  	case OpARM64FSUBS:
   133  		return rewriteValueARM64_OpARM64FSUBS_0(v)
   134  	case OpARM64GreaterEqual:
   135  		return rewriteValueARM64_OpARM64GreaterEqual_0(v)
   136  	case OpARM64GreaterEqualU:
   137  		return rewriteValueARM64_OpARM64GreaterEqualU_0(v)
   138  	case OpARM64GreaterThan:
   139  		return rewriteValueARM64_OpARM64GreaterThan_0(v)
   140  	case OpARM64GreaterThanU:
   141  		return rewriteValueARM64_OpARM64GreaterThanU_0(v)
   142  	case OpARM64LessEqual:
   143  		return rewriteValueARM64_OpARM64LessEqual_0(v)
   144  	case OpARM64LessEqualU:
   145  		return rewriteValueARM64_OpARM64LessEqualU_0(v)
   146  	case OpARM64LessThan:
   147  		return rewriteValueARM64_OpARM64LessThan_0(v)
   148  	case OpARM64LessThanU:
   149  		return rewriteValueARM64_OpARM64LessThanU_0(v)
   150  	case OpARM64MADD:
   151  		return rewriteValueARM64_OpARM64MADD_0(v) || rewriteValueARM64_OpARM64MADD_10(v) || rewriteValueARM64_OpARM64MADD_20(v)
   152  	case OpARM64MADDW:
   153  		return rewriteValueARM64_OpARM64MADDW_0(v) || rewriteValueARM64_OpARM64MADDW_10(v) || rewriteValueARM64_OpARM64MADDW_20(v)
   154  	case OpARM64MNEG:
   155  		return rewriteValueARM64_OpARM64MNEG_0(v) || rewriteValueARM64_OpARM64MNEG_10(v) || rewriteValueARM64_OpARM64MNEG_20(v)
   156  	case OpARM64MNEGW:
   157  		return rewriteValueARM64_OpARM64MNEGW_0(v) || rewriteValueARM64_OpARM64MNEGW_10(v) || rewriteValueARM64_OpARM64MNEGW_20(v)
   158  	case OpARM64MOD:
   159  		return rewriteValueARM64_OpARM64MOD_0(v)
   160  	case OpARM64MODW:
   161  		return rewriteValueARM64_OpARM64MODW_0(v)
   162  	case OpARM64MOVBUload:
   163  		return rewriteValueARM64_OpARM64MOVBUload_0(v)
   164  	case OpARM64MOVBUloadidx:
   165  		return rewriteValueARM64_OpARM64MOVBUloadidx_0(v)
   166  	case OpARM64MOVBUreg:
   167  		return rewriteValueARM64_OpARM64MOVBUreg_0(v)
   168  	case OpARM64MOVBload:
   169  		return rewriteValueARM64_OpARM64MOVBload_0(v)
   170  	case OpARM64MOVBloadidx:
   171  		return rewriteValueARM64_OpARM64MOVBloadidx_0(v)
   172  	case OpARM64MOVBreg:
   173  		return rewriteValueARM64_OpARM64MOVBreg_0(v)
   174  	case OpARM64MOVBstore:
   175  		return rewriteValueARM64_OpARM64MOVBstore_0(v) || rewriteValueARM64_OpARM64MOVBstore_10(v) || rewriteValueARM64_OpARM64MOVBstore_20(v) || rewriteValueARM64_OpARM64MOVBstore_30(v) || rewriteValueARM64_OpARM64MOVBstore_40(v)
   176  	case OpARM64MOVBstoreidx:
   177  		return rewriteValueARM64_OpARM64MOVBstoreidx_0(v) || rewriteValueARM64_OpARM64MOVBstoreidx_10(v)
   178  	case OpARM64MOVBstorezero:
   179  		return rewriteValueARM64_OpARM64MOVBstorezero_0(v)
   180  	case OpARM64MOVBstorezeroidx:
   181  		return rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v)
   182  	case OpARM64MOVDload:
   183  		return rewriteValueARM64_OpARM64MOVDload_0(v)
   184  	case OpARM64MOVDloadidx:
   185  		return rewriteValueARM64_OpARM64MOVDloadidx_0(v)
   186  	case OpARM64MOVDloadidx8:
   187  		return rewriteValueARM64_OpARM64MOVDloadidx8_0(v)
   188  	case OpARM64MOVDreg:
   189  		return rewriteValueARM64_OpARM64MOVDreg_0(v)
   190  	case OpARM64MOVDstore:
   191  		return rewriteValueARM64_OpARM64MOVDstore_0(v)
   192  	case OpARM64MOVDstoreidx:
   193  		return rewriteValueARM64_OpARM64MOVDstoreidx_0(v)
   194  	case OpARM64MOVDstoreidx8:
   195  		return rewriteValueARM64_OpARM64MOVDstoreidx8_0(v)
   196  	case OpARM64MOVDstorezero:
   197  		return rewriteValueARM64_OpARM64MOVDstorezero_0(v)
   198  	case OpARM64MOVDstorezeroidx:
   199  		return rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v)
   200  	case OpARM64MOVDstorezeroidx8:
   201  		return rewriteValueARM64_OpARM64MOVDstorezeroidx8_0(v)
   202  	case OpARM64MOVHUload:
   203  		return rewriteValueARM64_OpARM64MOVHUload_0(v)
   204  	case OpARM64MOVHUloadidx:
   205  		return rewriteValueARM64_OpARM64MOVHUloadidx_0(v)
   206  	case OpARM64MOVHUloadidx2:
   207  		return rewriteValueARM64_OpARM64MOVHUloadidx2_0(v)
   208  	case OpARM64MOVHUreg:
   209  		return rewriteValueARM64_OpARM64MOVHUreg_0(v) || rewriteValueARM64_OpARM64MOVHUreg_10(v)
   210  	case OpARM64MOVHload:
   211  		return rewriteValueARM64_OpARM64MOVHload_0(v)
   212  	case OpARM64MOVHloadidx:
   213  		return rewriteValueARM64_OpARM64MOVHloadidx_0(v)
   214  	case OpARM64MOVHloadidx2:
   215  		return rewriteValueARM64_OpARM64MOVHloadidx2_0(v)
   216  	case OpARM64MOVHreg:
   217  		return rewriteValueARM64_OpARM64MOVHreg_0(v) || rewriteValueARM64_OpARM64MOVHreg_10(v)
   218  	case OpARM64MOVHstore:
   219  		return rewriteValueARM64_OpARM64MOVHstore_0(v) || rewriteValueARM64_OpARM64MOVHstore_10(v) || rewriteValueARM64_OpARM64MOVHstore_20(v)
   220  	case OpARM64MOVHstoreidx:
   221  		return rewriteValueARM64_OpARM64MOVHstoreidx_0(v) || rewriteValueARM64_OpARM64MOVHstoreidx_10(v)
   222  	case OpARM64MOVHstoreidx2:
   223  		return rewriteValueARM64_OpARM64MOVHstoreidx2_0(v)
   224  	case OpARM64MOVHstorezero:
   225  		return rewriteValueARM64_OpARM64MOVHstorezero_0(v)
   226  	case OpARM64MOVHstorezeroidx:
   227  		return rewriteValueARM64_OpARM64MOVHstorezeroidx_0(v)
   228  	case OpARM64MOVHstorezeroidx2:
   229  		return rewriteValueARM64_OpARM64MOVHstorezeroidx2_0(v)
   230  	case OpARM64MOVQstorezero:
   231  		return rewriteValueARM64_OpARM64MOVQstorezero_0(v)
   232  	case OpARM64MOVWUload:
   233  		return rewriteValueARM64_OpARM64MOVWUload_0(v)
   234  	case OpARM64MOVWUloadidx:
   235  		return rewriteValueARM64_OpARM64MOVWUloadidx_0(v)
   236  	case OpARM64MOVWUloadidx4:
   237  		return rewriteValueARM64_OpARM64MOVWUloadidx4_0(v)
   238  	case OpARM64MOVWUreg:
   239  		return rewriteValueARM64_OpARM64MOVWUreg_0(v) || rewriteValueARM64_OpARM64MOVWUreg_10(v)
   240  	case OpARM64MOVWload:
   241  		return rewriteValueARM64_OpARM64MOVWload_0(v)
   242  	case OpARM64MOVWloadidx:
   243  		return rewriteValueARM64_OpARM64MOVWloadidx_0(v)
   244  	case OpARM64MOVWloadidx4:
   245  		return rewriteValueARM64_OpARM64MOVWloadidx4_0(v)
   246  	case OpARM64MOVWreg:
   247  		return rewriteValueARM64_OpARM64MOVWreg_0(v) || rewriteValueARM64_OpARM64MOVWreg_10(v)
   248  	case OpARM64MOVWstore:
   249  		return rewriteValueARM64_OpARM64MOVWstore_0(v) || rewriteValueARM64_OpARM64MOVWstore_10(v)
   250  	case OpARM64MOVWstoreidx:
   251  		return rewriteValueARM64_OpARM64MOVWstoreidx_0(v)
   252  	case OpARM64MOVWstoreidx4:
   253  		return rewriteValueARM64_OpARM64MOVWstoreidx4_0(v)
   254  	case OpARM64MOVWstorezero:
   255  		return rewriteValueARM64_OpARM64MOVWstorezero_0(v)
   256  	case OpARM64MOVWstorezeroidx:
   257  		return rewriteValueARM64_OpARM64MOVWstorezeroidx_0(v)
   258  	case OpARM64MOVWstorezeroidx4:
   259  		return rewriteValueARM64_OpARM64MOVWstorezeroidx4_0(v)
   260  	case OpARM64MSUB:
   261  		return rewriteValueARM64_OpARM64MSUB_0(v) || rewriteValueARM64_OpARM64MSUB_10(v) || rewriteValueARM64_OpARM64MSUB_20(v)
   262  	case OpARM64MSUBW:
   263  		return rewriteValueARM64_OpARM64MSUBW_0(v) || rewriteValueARM64_OpARM64MSUBW_10(v) || rewriteValueARM64_OpARM64MSUBW_20(v)
   264  	case OpARM64MUL:
   265  		return rewriteValueARM64_OpARM64MUL_0(v) || rewriteValueARM64_OpARM64MUL_10(v) || rewriteValueARM64_OpARM64MUL_20(v)
   266  	case OpARM64MULW:
   267  		return rewriteValueARM64_OpARM64MULW_0(v) || rewriteValueARM64_OpARM64MULW_10(v) || rewriteValueARM64_OpARM64MULW_20(v)
   268  	case OpARM64MVN:
   269  		return rewriteValueARM64_OpARM64MVN_0(v)
   270  	case OpARM64MVNshiftLL:
   271  		return rewriteValueARM64_OpARM64MVNshiftLL_0(v)
   272  	case OpARM64MVNshiftRA:
   273  		return rewriteValueARM64_OpARM64MVNshiftRA_0(v)
   274  	case OpARM64MVNshiftRL:
   275  		return rewriteValueARM64_OpARM64MVNshiftRL_0(v)
   276  	case OpARM64NEG:
   277  		return rewriteValueARM64_OpARM64NEG_0(v)
   278  	case OpARM64NEGshiftLL:
   279  		return rewriteValueARM64_OpARM64NEGshiftLL_0(v)
   280  	case OpARM64NEGshiftRA:
   281  		return rewriteValueARM64_OpARM64NEGshiftRA_0(v)
   282  	case OpARM64NEGshiftRL:
   283  		return rewriteValueARM64_OpARM64NEGshiftRL_0(v)
   284  	case OpARM64NotEqual:
   285  		return rewriteValueARM64_OpARM64NotEqual_0(v)
   286  	case OpARM64OR:
   287  		return rewriteValueARM64_OpARM64OR_0(v) || rewriteValueARM64_OpARM64OR_10(v) || rewriteValueARM64_OpARM64OR_20(v) || rewriteValueARM64_OpARM64OR_30(v) || rewriteValueARM64_OpARM64OR_40(v)
   288  	case OpARM64ORN:
   289  		return rewriteValueARM64_OpARM64ORN_0(v)
   290  	case OpARM64ORNshiftLL:
   291  		return rewriteValueARM64_OpARM64ORNshiftLL_0(v)
   292  	case OpARM64ORNshiftRA:
   293  		return rewriteValueARM64_OpARM64ORNshiftRA_0(v)
   294  	case OpARM64ORNshiftRL:
   295  		return rewriteValueARM64_OpARM64ORNshiftRL_0(v)
   296  	case OpARM64ORconst:
   297  		return rewriteValueARM64_OpARM64ORconst_0(v)
   298  	case OpARM64ORshiftLL:
   299  		return rewriteValueARM64_OpARM64ORshiftLL_0(v) || rewriteValueARM64_OpARM64ORshiftLL_10(v) || rewriteValueARM64_OpARM64ORshiftLL_20(v)
   300  	case OpARM64ORshiftRA:
   301  		return rewriteValueARM64_OpARM64ORshiftRA_0(v)
   302  	case OpARM64ORshiftRL:
   303  		return rewriteValueARM64_OpARM64ORshiftRL_0(v)
   304  	case OpARM64RORWconst:
   305  		return rewriteValueARM64_OpARM64RORWconst_0(v)
   306  	case OpARM64RORconst:
   307  		return rewriteValueARM64_OpARM64RORconst_0(v)
   308  	case OpARM64SLL:
   309  		return rewriteValueARM64_OpARM64SLL_0(v)
   310  	case OpARM64SLLconst:
   311  		return rewriteValueARM64_OpARM64SLLconst_0(v)
   312  	case OpARM64SRA:
   313  		return rewriteValueARM64_OpARM64SRA_0(v)
   314  	case OpARM64SRAconst:
   315  		return rewriteValueARM64_OpARM64SRAconst_0(v)
   316  	case OpARM64SRL:
   317  		return rewriteValueARM64_OpARM64SRL_0(v)
   318  	case OpARM64SRLconst:
   319  		return rewriteValueARM64_OpARM64SRLconst_0(v) || rewriteValueARM64_OpARM64SRLconst_10(v)
   320  	case OpARM64STP:
   321  		return rewriteValueARM64_OpARM64STP_0(v)
   322  	case OpARM64SUB:
   323  		return rewriteValueARM64_OpARM64SUB_0(v) || rewriteValueARM64_OpARM64SUB_10(v)
   324  	case OpARM64SUBconst:
   325  		return rewriteValueARM64_OpARM64SUBconst_0(v)
   326  	case OpARM64SUBshiftLL:
   327  		return rewriteValueARM64_OpARM64SUBshiftLL_0(v)
   328  	case OpARM64SUBshiftRA:
   329  		return rewriteValueARM64_OpARM64SUBshiftRA_0(v)
   330  	case OpARM64SUBshiftRL:
   331  		return rewriteValueARM64_OpARM64SUBshiftRL_0(v)
   332  	case OpARM64TST:
   333  		return rewriteValueARM64_OpARM64TST_0(v)
   334  	case OpARM64TSTW:
   335  		return rewriteValueARM64_OpARM64TSTW_0(v)
   336  	case OpARM64TSTWconst:
   337  		return rewriteValueARM64_OpARM64TSTWconst_0(v)
   338  	case OpARM64TSTconst:
   339  		return rewriteValueARM64_OpARM64TSTconst_0(v)
   340  	case OpARM64TSTshiftLL:
   341  		return rewriteValueARM64_OpARM64TSTshiftLL_0(v)
   342  	case OpARM64TSTshiftRA:
   343  		return rewriteValueARM64_OpARM64TSTshiftRA_0(v)
   344  	case OpARM64TSTshiftRL:
   345  		return rewriteValueARM64_OpARM64TSTshiftRL_0(v)
   346  	case OpARM64UBFIZ:
   347  		return rewriteValueARM64_OpARM64UBFIZ_0(v)
   348  	case OpARM64UBFX:
   349  		return rewriteValueARM64_OpARM64UBFX_0(v)
   350  	case OpARM64UDIV:
   351  		return rewriteValueARM64_OpARM64UDIV_0(v)
   352  	case OpARM64UDIVW:
   353  		return rewriteValueARM64_OpARM64UDIVW_0(v)
   354  	case OpARM64UMOD:
   355  		return rewriteValueARM64_OpARM64UMOD_0(v)
   356  	case OpARM64UMODW:
   357  		return rewriteValueARM64_OpARM64UMODW_0(v)
   358  	case OpARM64XOR:
   359  		return rewriteValueARM64_OpARM64XOR_0(v) || rewriteValueARM64_OpARM64XOR_10(v)
   360  	case OpARM64XORconst:
   361  		return rewriteValueARM64_OpARM64XORconst_0(v)
   362  	case OpARM64XORshiftLL:
   363  		return rewriteValueARM64_OpARM64XORshiftLL_0(v)
   364  	case OpARM64XORshiftRA:
   365  		return rewriteValueARM64_OpARM64XORshiftRA_0(v)
   366  	case OpARM64XORshiftRL:
   367  		return rewriteValueARM64_OpARM64XORshiftRL_0(v)
   368  	case OpAbs:
   369  		return rewriteValueARM64_OpAbs_0(v)
   370  	case OpAdd16:
   371  		return rewriteValueARM64_OpAdd16_0(v)
   372  	case OpAdd32:
   373  		return rewriteValueARM64_OpAdd32_0(v)
   374  	case OpAdd32F:
   375  		return rewriteValueARM64_OpAdd32F_0(v)
   376  	case OpAdd64:
   377  		return rewriteValueARM64_OpAdd64_0(v)
   378  	case OpAdd64F:
   379  		return rewriteValueARM64_OpAdd64F_0(v)
   380  	case OpAdd8:
   381  		return rewriteValueARM64_OpAdd8_0(v)
   382  	case OpAddPtr:
   383  		return rewriteValueARM64_OpAddPtr_0(v)
   384  	case OpAddr:
   385  		return rewriteValueARM64_OpAddr_0(v)
   386  	case OpAnd16:
   387  		return rewriteValueARM64_OpAnd16_0(v)
   388  	case OpAnd32:
   389  		return rewriteValueARM64_OpAnd32_0(v)
   390  	case OpAnd64:
   391  		return rewriteValueARM64_OpAnd64_0(v)
   392  	case OpAnd8:
   393  		return rewriteValueARM64_OpAnd8_0(v)
   394  	case OpAndB:
   395  		return rewriteValueARM64_OpAndB_0(v)
   396  	case OpAtomicAdd32:
   397  		return rewriteValueARM64_OpAtomicAdd32_0(v)
   398  	case OpAtomicAdd32Variant:
   399  		return rewriteValueARM64_OpAtomicAdd32Variant_0(v)
   400  	case OpAtomicAdd64:
   401  		return rewriteValueARM64_OpAtomicAdd64_0(v)
   402  	case OpAtomicAdd64Variant:
   403  		return rewriteValueARM64_OpAtomicAdd64Variant_0(v)
   404  	case OpAtomicAnd8:
   405  		return rewriteValueARM64_OpAtomicAnd8_0(v)
   406  	case OpAtomicCompareAndSwap32:
   407  		return rewriteValueARM64_OpAtomicCompareAndSwap32_0(v)
   408  	case OpAtomicCompareAndSwap64:
   409  		return rewriteValueARM64_OpAtomicCompareAndSwap64_0(v)
   410  	case OpAtomicExchange32:
   411  		return rewriteValueARM64_OpAtomicExchange32_0(v)
   412  	case OpAtomicExchange64:
   413  		return rewriteValueARM64_OpAtomicExchange64_0(v)
   414  	case OpAtomicLoad32:
   415  		return rewriteValueARM64_OpAtomicLoad32_0(v)
   416  	case OpAtomicLoad64:
   417  		return rewriteValueARM64_OpAtomicLoad64_0(v)
   418  	case OpAtomicLoadPtr:
   419  		return rewriteValueARM64_OpAtomicLoadPtr_0(v)
   420  	case OpAtomicOr8:
   421  		return rewriteValueARM64_OpAtomicOr8_0(v)
   422  	case OpAtomicStore32:
   423  		return rewriteValueARM64_OpAtomicStore32_0(v)
   424  	case OpAtomicStore64:
   425  		return rewriteValueARM64_OpAtomicStore64_0(v)
   426  	case OpAtomicStorePtrNoWB:
   427  		return rewriteValueARM64_OpAtomicStorePtrNoWB_0(v)
   428  	case OpAvg64u:
   429  		return rewriteValueARM64_OpAvg64u_0(v)
   430  	case OpBitLen64:
   431  		return rewriteValueARM64_OpBitLen64_0(v)
   432  	case OpBitRev16:
   433  		return rewriteValueARM64_OpBitRev16_0(v)
   434  	case OpBitRev32:
   435  		return rewriteValueARM64_OpBitRev32_0(v)
   436  	case OpBitRev64:
   437  		return rewriteValueARM64_OpBitRev64_0(v)
   438  	case OpBitRev8:
   439  		return rewriteValueARM64_OpBitRev8_0(v)
   440  	case OpBswap32:
   441  		return rewriteValueARM64_OpBswap32_0(v)
   442  	case OpBswap64:
   443  		return rewriteValueARM64_OpBswap64_0(v)
   444  	case OpCeil:
   445  		return rewriteValueARM64_OpCeil_0(v)
   446  	case OpClosureCall:
   447  		return rewriteValueARM64_OpClosureCall_0(v)
   448  	case OpCom16:
   449  		return rewriteValueARM64_OpCom16_0(v)
   450  	case OpCom32:
   451  		return rewriteValueARM64_OpCom32_0(v)
   452  	case OpCom64:
   453  		return rewriteValueARM64_OpCom64_0(v)
   454  	case OpCom8:
   455  		return rewriteValueARM64_OpCom8_0(v)
   456  	case OpCondSelect:
   457  		return rewriteValueARM64_OpCondSelect_0(v)
   458  	case OpConst16:
   459  		return rewriteValueARM64_OpConst16_0(v)
   460  	case OpConst32:
   461  		return rewriteValueARM64_OpConst32_0(v)
   462  	case OpConst32F:
   463  		return rewriteValueARM64_OpConst32F_0(v)
   464  	case OpConst64:
   465  		return rewriteValueARM64_OpConst64_0(v)
   466  	case OpConst64F:
   467  		return rewriteValueARM64_OpConst64F_0(v)
   468  	case OpConst8:
   469  		return rewriteValueARM64_OpConst8_0(v)
   470  	case OpConstBool:
   471  		return rewriteValueARM64_OpConstBool_0(v)
   472  	case OpConstNil:
   473  		return rewriteValueARM64_OpConstNil_0(v)
   474  	case OpCtz32:
   475  		return rewriteValueARM64_OpCtz32_0(v)
   476  	case OpCtz32NonZero:
   477  		return rewriteValueARM64_OpCtz32NonZero_0(v)
   478  	case OpCtz64:
   479  		return rewriteValueARM64_OpCtz64_0(v)
   480  	case OpCtz64NonZero:
   481  		return rewriteValueARM64_OpCtz64NonZero_0(v)
   482  	case OpCvt32Fto32:
   483  		return rewriteValueARM64_OpCvt32Fto32_0(v)
   484  	case OpCvt32Fto32U:
   485  		return rewriteValueARM64_OpCvt32Fto32U_0(v)
   486  	case OpCvt32Fto64:
   487  		return rewriteValueARM64_OpCvt32Fto64_0(v)
   488  	case OpCvt32Fto64F:
   489  		return rewriteValueARM64_OpCvt32Fto64F_0(v)
   490  	case OpCvt32Fto64U:
   491  		return rewriteValueARM64_OpCvt32Fto64U_0(v)
   492  	case OpCvt32Uto32F:
   493  		return rewriteValueARM64_OpCvt32Uto32F_0(v)
   494  	case OpCvt32Uto64F:
   495  		return rewriteValueARM64_OpCvt32Uto64F_0(v)
   496  	case OpCvt32to32F:
   497  		return rewriteValueARM64_OpCvt32to32F_0(v)
   498  	case OpCvt32to64F:
   499  		return rewriteValueARM64_OpCvt32to64F_0(v)
   500  	case OpCvt64Fto32:
   501  		return rewriteValueARM64_OpCvt64Fto32_0(v)
   502  	case OpCvt64Fto32F:
   503  		return rewriteValueARM64_OpCvt64Fto32F_0(v)
   504  	case OpCvt64Fto32U:
   505  		return rewriteValueARM64_OpCvt64Fto32U_0(v)
   506  	case OpCvt64Fto64:
   507  		return rewriteValueARM64_OpCvt64Fto64_0(v)
   508  	case OpCvt64Fto64U:
   509  		return rewriteValueARM64_OpCvt64Fto64U_0(v)
   510  	case OpCvt64Uto32F:
   511  		return rewriteValueARM64_OpCvt64Uto32F_0(v)
   512  	case OpCvt64Uto64F:
   513  		return rewriteValueARM64_OpCvt64Uto64F_0(v)
   514  	case OpCvt64to32F:
   515  		return rewriteValueARM64_OpCvt64to32F_0(v)
   516  	case OpCvt64to64F:
   517  		return rewriteValueARM64_OpCvt64to64F_0(v)
   518  	case OpDiv16:
   519  		return rewriteValueARM64_OpDiv16_0(v)
   520  	case OpDiv16u:
   521  		return rewriteValueARM64_OpDiv16u_0(v)
   522  	case OpDiv32:
   523  		return rewriteValueARM64_OpDiv32_0(v)
   524  	case OpDiv32F:
   525  		return rewriteValueARM64_OpDiv32F_0(v)
   526  	case OpDiv32u:
   527  		return rewriteValueARM64_OpDiv32u_0(v)
   528  	case OpDiv64:
   529  		return rewriteValueARM64_OpDiv64_0(v)
   530  	case OpDiv64F:
   531  		return rewriteValueARM64_OpDiv64F_0(v)
   532  	case OpDiv64u:
   533  		return rewriteValueARM64_OpDiv64u_0(v)
   534  	case OpDiv8:
   535  		return rewriteValueARM64_OpDiv8_0(v)
   536  	case OpDiv8u:
   537  		return rewriteValueARM64_OpDiv8u_0(v)
   538  	case OpEq16:
   539  		return rewriteValueARM64_OpEq16_0(v)
   540  	case OpEq32:
   541  		return rewriteValueARM64_OpEq32_0(v)
   542  	case OpEq32F:
   543  		return rewriteValueARM64_OpEq32F_0(v)
   544  	case OpEq64:
   545  		return rewriteValueARM64_OpEq64_0(v)
   546  	case OpEq64F:
   547  		return rewriteValueARM64_OpEq64F_0(v)
   548  	case OpEq8:
   549  		return rewriteValueARM64_OpEq8_0(v)
   550  	case OpEqB:
   551  		return rewriteValueARM64_OpEqB_0(v)
   552  	case OpEqPtr:
   553  		return rewriteValueARM64_OpEqPtr_0(v)
   554  	case OpFloor:
   555  		return rewriteValueARM64_OpFloor_0(v)
   556  	case OpGeq16:
   557  		return rewriteValueARM64_OpGeq16_0(v)
   558  	case OpGeq16U:
   559  		return rewriteValueARM64_OpGeq16U_0(v)
   560  	case OpGeq32:
   561  		return rewriteValueARM64_OpGeq32_0(v)
   562  	case OpGeq32F:
   563  		return rewriteValueARM64_OpGeq32F_0(v)
   564  	case OpGeq32U:
   565  		return rewriteValueARM64_OpGeq32U_0(v)
   566  	case OpGeq64:
   567  		return rewriteValueARM64_OpGeq64_0(v)
   568  	case OpGeq64F:
   569  		return rewriteValueARM64_OpGeq64F_0(v)
   570  	case OpGeq64U:
   571  		return rewriteValueARM64_OpGeq64U_0(v)
   572  	case OpGeq8:
   573  		return rewriteValueARM64_OpGeq8_0(v)
   574  	case OpGeq8U:
   575  		return rewriteValueARM64_OpGeq8U_0(v)
   576  	case OpGetCallerPC:
   577  		return rewriteValueARM64_OpGetCallerPC_0(v)
   578  	case OpGetCallerSP:
   579  		return rewriteValueARM64_OpGetCallerSP_0(v)
   580  	case OpGetClosurePtr:
   581  		return rewriteValueARM64_OpGetClosurePtr_0(v)
   582  	case OpGreater16:
   583  		return rewriteValueARM64_OpGreater16_0(v)
   584  	case OpGreater16U:
   585  		return rewriteValueARM64_OpGreater16U_0(v)
   586  	case OpGreater32:
   587  		return rewriteValueARM64_OpGreater32_0(v)
   588  	case OpGreater32F:
   589  		return rewriteValueARM64_OpGreater32F_0(v)
   590  	case OpGreater32U:
   591  		return rewriteValueARM64_OpGreater32U_0(v)
   592  	case OpGreater64:
   593  		return rewriteValueARM64_OpGreater64_0(v)
   594  	case OpGreater64F:
   595  		return rewriteValueARM64_OpGreater64F_0(v)
   596  	case OpGreater64U:
   597  		return rewriteValueARM64_OpGreater64U_0(v)
   598  	case OpGreater8:
   599  		return rewriteValueARM64_OpGreater8_0(v)
   600  	case OpGreater8U:
   601  		return rewriteValueARM64_OpGreater8U_0(v)
   602  	case OpHmul32:
   603  		return rewriteValueARM64_OpHmul32_0(v)
   604  	case OpHmul32u:
   605  		return rewriteValueARM64_OpHmul32u_0(v)
   606  	case OpHmul64:
   607  		return rewriteValueARM64_OpHmul64_0(v)
   608  	case OpHmul64u:
   609  		return rewriteValueARM64_OpHmul64u_0(v)
   610  	case OpInterCall:
   611  		return rewriteValueARM64_OpInterCall_0(v)
   612  	case OpIsInBounds:
   613  		return rewriteValueARM64_OpIsInBounds_0(v)
   614  	case OpIsNonNil:
   615  		return rewriteValueARM64_OpIsNonNil_0(v)
   616  	case OpIsSliceInBounds:
   617  		return rewriteValueARM64_OpIsSliceInBounds_0(v)
   618  	case OpLeq16:
   619  		return rewriteValueARM64_OpLeq16_0(v)
   620  	case OpLeq16U:
   621  		return rewriteValueARM64_OpLeq16U_0(v)
   622  	case OpLeq32:
   623  		return rewriteValueARM64_OpLeq32_0(v)
   624  	case OpLeq32F:
   625  		return rewriteValueARM64_OpLeq32F_0(v)
   626  	case OpLeq32U:
   627  		return rewriteValueARM64_OpLeq32U_0(v)
   628  	case OpLeq64:
   629  		return rewriteValueARM64_OpLeq64_0(v)
   630  	case OpLeq64F:
   631  		return rewriteValueARM64_OpLeq64F_0(v)
   632  	case OpLeq64U:
   633  		return rewriteValueARM64_OpLeq64U_0(v)
   634  	case OpLeq8:
   635  		return rewriteValueARM64_OpLeq8_0(v)
   636  	case OpLeq8U:
   637  		return rewriteValueARM64_OpLeq8U_0(v)
   638  	case OpLess16:
   639  		return rewriteValueARM64_OpLess16_0(v)
   640  	case OpLess16U:
   641  		return rewriteValueARM64_OpLess16U_0(v)
   642  	case OpLess32:
   643  		return rewriteValueARM64_OpLess32_0(v)
   644  	case OpLess32F:
   645  		return rewriteValueARM64_OpLess32F_0(v)
   646  	case OpLess32U:
   647  		return rewriteValueARM64_OpLess32U_0(v)
   648  	case OpLess64:
   649  		return rewriteValueARM64_OpLess64_0(v)
   650  	case OpLess64F:
   651  		return rewriteValueARM64_OpLess64F_0(v)
   652  	case OpLess64U:
   653  		return rewriteValueARM64_OpLess64U_0(v)
   654  	case OpLess8:
   655  		return rewriteValueARM64_OpLess8_0(v)
   656  	case OpLess8U:
   657  		return rewriteValueARM64_OpLess8U_0(v)
   658  	case OpLoad:
   659  		return rewriteValueARM64_OpLoad_0(v)
   660  	case OpLocalAddr:
   661  		return rewriteValueARM64_OpLocalAddr_0(v)
   662  	case OpLsh16x16:
   663  		return rewriteValueARM64_OpLsh16x16_0(v)
   664  	case OpLsh16x32:
   665  		return rewriteValueARM64_OpLsh16x32_0(v)
   666  	case OpLsh16x64:
   667  		return rewriteValueARM64_OpLsh16x64_0(v)
   668  	case OpLsh16x8:
   669  		return rewriteValueARM64_OpLsh16x8_0(v)
   670  	case OpLsh32x16:
   671  		return rewriteValueARM64_OpLsh32x16_0(v)
   672  	case OpLsh32x32:
   673  		return rewriteValueARM64_OpLsh32x32_0(v)
   674  	case OpLsh32x64:
   675  		return rewriteValueARM64_OpLsh32x64_0(v)
   676  	case OpLsh32x8:
   677  		return rewriteValueARM64_OpLsh32x8_0(v)
   678  	case OpLsh64x16:
   679  		return rewriteValueARM64_OpLsh64x16_0(v)
   680  	case OpLsh64x32:
   681  		return rewriteValueARM64_OpLsh64x32_0(v)
   682  	case OpLsh64x64:
   683  		return rewriteValueARM64_OpLsh64x64_0(v)
   684  	case OpLsh64x8:
   685  		return rewriteValueARM64_OpLsh64x8_0(v)
   686  	case OpLsh8x16:
   687  		return rewriteValueARM64_OpLsh8x16_0(v)
   688  	case OpLsh8x32:
   689  		return rewriteValueARM64_OpLsh8x32_0(v)
   690  	case OpLsh8x64:
   691  		return rewriteValueARM64_OpLsh8x64_0(v)
   692  	case OpLsh8x8:
   693  		return rewriteValueARM64_OpLsh8x8_0(v)
   694  	case OpMod16:
   695  		return rewriteValueARM64_OpMod16_0(v)
   696  	case OpMod16u:
   697  		return rewriteValueARM64_OpMod16u_0(v)
   698  	case OpMod32:
   699  		return rewriteValueARM64_OpMod32_0(v)
   700  	case OpMod32u:
   701  		return rewriteValueARM64_OpMod32u_0(v)
   702  	case OpMod64:
   703  		return rewriteValueARM64_OpMod64_0(v)
   704  	case OpMod64u:
   705  		return rewriteValueARM64_OpMod64u_0(v)
   706  	case OpMod8:
   707  		return rewriteValueARM64_OpMod8_0(v)
   708  	case OpMod8u:
   709  		return rewriteValueARM64_OpMod8u_0(v)
   710  	case OpMove:
   711  		return rewriteValueARM64_OpMove_0(v) || rewriteValueARM64_OpMove_10(v)
   712  	case OpMul16:
   713  		return rewriteValueARM64_OpMul16_0(v)
   714  	case OpMul32:
   715  		return rewriteValueARM64_OpMul32_0(v)
   716  	case OpMul32F:
   717  		return rewriteValueARM64_OpMul32F_0(v)
   718  	case OpMul64:
   719  		return rewriteValueARM64_OpMul64_0(v)
   720  	case OpMul64F:
   721  		return rewriteValueARM64_OpMul64F_0(v)
   722  	case OpMul64uhilo:
   723  		return rewriteValueARM64_OpMul64uhilo_0(v)
   724  	case OpMul8:
   725  		return rewriteValueARM64_OpMul8_0(v)
   726  	case OpNeg16:
   727  		return rewriteValueARM64_OpNeg16_0(v)
   728  	case OpNeg32:
   729  		return rewriteValueARM64_OpNeg32_0(v)
   730  	case OpNeg32F:
   731  		return rewriteValueARM64_OpNeg32F_0(v)
   732  	case OpNeg64:
   733  		return rewriteValueARM64_OpNeg64_0(v)
   734  	case OpNeg64F:
   735  		return rewriteValueARM64_OpNeg64F_0(v)
   736  	case OpNeg8:
   737  		return rewriteValueARM64_OpNeg8_0(v)
   738  	case OpNeq16:
   739  		return rewriteValueARM64_OpNeq16_0(v)
   740  	case OpNeq32:
   741  		return rewriteValueARM64_OpNeq32_0(v)
   742  	case OpNeq32F:
   743  		return rewriteValueARM64_OpNeq32F_0(v)
   744  	case OpNeq64:
   745  		return rewriteValueARM64_OpNeq64_0(v)
   746  	case OpNeq64F:
   747  		return rewriteValueARM64_OpNeq64F_0(v)
   748  	case OpNeq8:
   749  		return rewriteValueARM64_OpNeq8_0(v)
   750  	case OpNeqB:
   751  		return rewriteValueARM64_OpNeqB_0(v)
   752  	case OpNeqPtr:
   753  		return rewriteValueARM64_OpNeqPtr_0(v)
   754  	case OpNilCheck:
   755  		return rewriteValueARM64_OpNilCheck_0(v)
   756  	case OpNot:
   757  		return rewriteValueARM64_OpNot_0(v)
   758  	case OpOffPtr:
   759  		return rewriteValueARM64_OpOffPtr_0(v)
   760  	case OpOr16:
   761  		return rewriteValueARM64_OpOr16_0(v)
   762  	case OpOr32:
   763  		return rewriteValueARM64_OpOr32_0(v)
   764  	case OpOr64:
   765  		return rewriteValueARM64_OpOr64_0(v)
   766  	case OpOr8:
   767  		return rewriteValueARM64_OpOr8_0(v)
   768  	case OpOrB:
   769  		return rewriteValueARM64_OpOrB_0(v)
   770  	case OpPopCount16:
   771  		return rewriteValueARM64_OpPopCount16_0(v)
   772  	case OpPopCount32:
   773  		return rewriteValueARM64_OpPopCount32_0(v)
   774  	case OpPopCount64:
   775  		return rewriteValueARM64_OpPopCount64_0(v)
   776  	case OpRotateLeft32:
   777  		return rewriteValueARM64_OpRotateLeft32_0(v)
   778  	case OpRotateLeft64:
   779  		return rewriteValueARM64_OpRotateLeft64_0(v)
   780  	case OpRound:
   781  		return rewriteValueARM64_OpRound_0(v)
   782  	case OpRound32F:
   783  		return rewriteValueARM64_OpRound32F_0(v)
   784  	case OpRound64F:
   785  		return rewriteValueARM64_OpRound64F_0(v)
   786  	case OpRoundToEven:
   787  		return rewriteValueARM64_OpRoundToEven_0(v)
   788  	case OpRsh16Ux16:
   789  		return rewriteValueARM64_OpRsh16Ux16_0(v)
   790  	case OpRsh16Ux32:
   791  		return rewriteValueARM64_OpRsh16Ux32_0(v)
   792  	case OpRsh16Ux64:
   793  		return rewriteValueARM64_OpRsh16Ux64_0(v)
   794  	case OpRsh16Ux8:
   795  		return rewriteValueARM64_OpRsh16Ux8_0(v)
   796  	case OpRsh16x16:
   797  		return rewriteValueARM64_OpRsh16x16_0(v)
   798  	case OpRsh16x32:
   799  		return rewriteValueARM64_OpRsh16x32_0(v)
   800  	case OpRsh16x64:
   801  		return rewriteValueARM64_OpRsh16x64_0(v)
   802  	case OpRsh16x8:
   803  		return rewriteValueARM64_OpRsh16x8_0(v)
   804  	case OpRsh32Ux16:
   805  		return rewriteValueARM64_OpRsh32Ux16_0(v)
   806  	case OpRsh32Ux32:
   807  		return rewriteValueARM64_OpRsh32Ux32_0(v)
   808  	case OpRsh32Ux64:
   809  		return rewriteValueARM64_OpRsh32Ux64_0(v)
   810  	case OpRsh32Ux8:
   811  		return rewriteValueARM64_OpRsh32Ux8_0(v)
   812  	case OpRsh32x16:
   813  		return rewriteValueARM64_OpRsh32x16_0(v)
   814  	case OpRsh32x32:
   815  		return rewriteValueARM64_OpRsh32x32_0(v)
   816  	case OpRsh32x64:
   817  		return rewriteValueARM64_OpRsh32x64_0(v)
   818  	case OpRsh32x8:
   819  		return rewriteValueARM64_OpRsh32x8_0(v)
   820  	case OpRsh64Ux16:
   821  		return rewriteValueARM64_OpRsh64Ux16_0(v)
   822  	case OpRsh64Ux32:
   823  		return rewriteValueARM64_OpRsh64Ux32_0(v)
   824  	case OpRsh64Ux64:
   825  		return rewriteValueARM64_OpRsh64Ux64_0(v)
   826  	case OpRsh64Ux8:
   827  		return rewriteValueARM64_OpRsh64Ux8_0(v)
   828  	case OpRsh64x16:
   829  		return rewriteValueARM64_OpRsh64x16_0(v)
   830  	case OpRsh64x32:
   831  		return rewriteValueARM64_OpRsh64x32_0(v)
   832  	case OpRsh64x64:
   833  		return rewriteValueARM64_OpRsh64x64_0(v)
   834  	case OpRsh64x8:
   835  		return rewriteValueARM64_OpRsh64x8_0(v)
   836  	case OpRsh8Ux16:
   837  		return rewriteValueARM64_OpRsh8Ux16_0(v)
   838  	case OpRsh8Ux32:
   839  		return rewriteValueARM64_OpRsh8Ux32_0(v)
   840  	case OpRsh8Ux64:
   841  		return rewriteValueARM64_OpRsh8Ux64_0(v)
   842  	case OpRsh8Ux8:
   843  		return rewriteValueARM64_OpRsh8Ux8_0(v)
   844  	case OpRsh8x16:
   845  		return rewriteValueARM64_OpRsh8x16_0(v)
   846  	case OpRsh8x32:
   847  		return rewriteValueARM64_OpRsh8x32_0(v)
   848  	case OpRsh8x64:
   849  		return rewriteValueARM64_OpRsh8x64_0(v)
   850  	case OpRsh8x8:
   851  		return rewriteValueARM64_OpRsh8x8_0(v)
   852  	case OpSignExt16to32:
   853  		return rewriteValueARM64_OpSignExt16to32_0(v)
   854  	case OpSignExt16to64:
   855  		return rewriteValueARM64_OpSignExt16to64_0(v)
   856  	case OpSignExt32to64:
   857  		return rewriteValueARM64_OpSignExt32to64_0(v)
   858  	case OpSignExt8to16:
   859  		return rewriteValueARM64_OpSignExt8to16_0(v)
   860  	case OpSignExt8to32:
   861  		return rewriteValueARM64_OpSignExt8to32_0(v)
   862  	case OpSignExt8to64:
   863  		return rewriteValueARM64_OpSignExt8to64_0(v)
   864  	case OpSlicemask:
   865  		return rewriteValueARM64_OpSlicemask_0(v)
   866  	case OpSqrt:
   867  		return rewriteValueARM64_OpSqrt_0(v)
   868  	case OpStaticCall:
   869  		return rewriteValueARM64_OpStaticCall_0(v)
   870  	case OpStore:
   871  		return rewriteValueARM64_OpStore_0(v)
   872  	case OpSub16:
   873  		return rewriteValueARM64_OpSub16_0(v)
   874  	case OpSub32:
   875  		return rewriteValueARM64_OpSub32_0(v)
   876  	case OpSub32F:
   877  		return rewriteValueARM64_OpSub32F_0(v)
   878  	case OpSub64:
   879  		return rewriteValueARM64_OpSub64_0(v)
   880  	case OpSub64F:
   881  		return rewriteValueARM64_OpSub64F_0(v)
   882  	case OpSub8:
   883  		return rewriteValueARM64_OpSub8_0(v)
   884  	case OpSubPtr:
   885  		return rewriteValueARM64_OpSubPtr_0(v)
   886  	case OpTrunc:
   887  		return rewriteValueARM64_OpTrunc_0(v)
   888  	case OpTrunc16to8:
   889  		return rewriteValueARM64_OpTrunc16to8_0(v)
   890  	case OpTrunc32to16:
   891  		return rewriteValueARM64_OpTrunc32to16_0(v)
   892  	case OpTrunc32to8:
   893  		return rewriteValueARM64_OpTrunc32to8_0(v)
   894  	case OpTrunc64to16:
   895  		return rewriteValueARM64_OpTrunc64to16_0(v)
   896  	case OpTrunc64to32:
   897  		return rewriteValueARM64_OpTrunc64to32_0(v)
   898  	case OpTrunc64to8:
   899  		return rewriteValueARM64_OpTrunc64to8_0(v)
   900  	case OpWB:
   901  		return rewriteValueARM64_OpWB_0(v)
   902  	case OpXor16:
   903  		return rewriteValueARM64_OpXor16_0(v)
   904  	case OpXor32:
   905  		return rewriteValueARM64_OpXor32_0(v)
   906  	case OpXor64:
   907  		return rewriteValueARM64_OpXor64_0(v)
   908  	case OpXor8:
   909  		return rewriteValueARM64_OpXor8_0(v)
   910  	case OpZero:
   911  		return rewriteValueARM64_OpZero_0(v) || rewriteValueARM64_OpZero_10(v) || rewriteValueARM64_OpZero_20(v)
   912  	case OpZeroExt16to32:
   913  		return rewriteValueARM64_OpZeroExt16to32_0(v)
   914  	case OpZeroExt16to64:
   915  		return rewriteValueARM64_OpZeroExt16to64_0(v)
   916  	case OpZeroExt32to64:
   917  		return rewriteValueARM64_OpZeroExt32to64_0(v)
   918  	case OpZeroExt8to16:
   919  		return rewriteValueARM64_OpZeroExt8to16_0(v)
   920  	case OpZeroExt8to32:
   921  		return rewriteValueARM64_OpZeroExt8to32_0(v)
   922  	case OpZeroExt8to64:
   923  		return rewriteValueARM64_OpZeroExt8to64_0(v)
   924  	}
   925  	return false
   926  }
   927  func rewriteValueARM64_OpARM64ADD_0(v *Value) bool {
   928  	// match: (ADD x (MOVDconst [c]))
   929  	// cond:
   930  	// result: (ADDconst [c] x)
   931  	for {
   932  		_ = v.Args[1]
   933  		x := v.Args[0]
   934  		v_1 := v.Args[1]
   935  		if v_1.Op != OpARM64MOVDconst {
   936  			break
   937  		}
   938  		c := v_1.AuxInt
   939  		v.reset(OpARM64ADDconst)
   940  		v.AuxInt = c
   941  		v.AddArg(x)
   942  		return true
   943  	}
   944  	// match: (ADD (MOVDconst [c]) x)
   945  	// cond:
   946  	// result: (ADDconst [c] x)
   947  	for {
   948  		_ = v.Args[1]
   949  		v_0 := v.Args[0]
   950  		if v_0.Op != OpARM64MOVDconst {
   951  			break
   952  		}
   953  		c := v_0.AuxInt
   954  		x := v.Args[1]
   955  		v.reset(OpARM64ADDconst)
   956  		v.AuxInt = c
   957  		v.AddArg(x)
   958  		return true
   959  	}
   960  	// match: (ADD a l:(MUL x y))
   961  	// cond: l.Uses==1 && clobber(l)
   962  	// result: (MADD a x y)
   963  	for {
   964  		_ = v.Args[1]
   965  		a := v.Args[0]
   966  		l := v.Args[1]
   967  		if l.Op != OpARM64MUL {
   968  			break
   969  		}
   970  		_ = l.Args[1]
   971  		x := l.Args[0]
   972  		y := l.Args[1]
   973  		if !(l.Uses == 1 && clobber(l)) {
   974  			break
   975  		}
   976  		v.reset(OpARM64MADD)
   977  		v.AddArg(a)
   978  		v.AddArg(x)
   979  		v.AddArg(y)
   980  		return true
   981  	}
   982  	// match: (ADD l:(MUL x y) a)
   983  	// cond: l.Uses==1 && clobber(l)
   984  	// result: (MADD a x y)
   985  	for {
   986  		_ = v.Args[1]
   987  		l := v.Args[0]
   988  		if l.Op != OpARM64MUL {
   989  			break
   990  		}
   991  		_ = l.Args[1]
   992  		x := l.Args[0]
   993  		y := l.Args[1]
   994  		a := v.Args[1]
   995  		if !(l.Uses == 1 && clobber(l)) {
   996  			break
   997  		}
   998  		v.reset(OpARM64MADD)
   999  		v.AddArg(a)
  1000  		v.AddArg(x)
  1001  		v.AddArg(y)
  1002  		return true
  1003  	}
  1004  	// match: (ADD a l:(MNEG x y))
  1005  	// cond: l.Uses==1 && clobber(l)
  1006  	// result: (MSUB a x y)
  1007  	for {
  1008  		_ = v.Args[1]
  1009  		a := v.Args[0]
  1010  		l := v.Args[1]
  1011  		if l.Op != OpARM64MNEG {
  1012  			break
  1013  		}
  1014  		_ = l.Args[1]
  1015  		x := l.Args[0]
  1016  		y := l.Args[1]
  1017  		if !(l.Uses == 1 && clobber(l)) {
  1018  			break
  1019  		}
  1020  		v.reset(OpARM64MSUB)
  1021  		v.AddArg(a)
  1022  		v.AddArg(x)
  1023  		v.AddArg(y)
  1024  		return true
  1025  	}
  1026  	// match: (ADD l:(MNEG x y) a)
  1027  	// cond: l.Uses==1 && clobber(l)
  1028  	// result: (MSUB a x y)
  1029  	for {
  1030  		_ = v.Args[1]
  1031  		l := v.Args[0]
  1032  		if l.Op != OpARM64MNEG {
  1033  			break
  1034  		}
  1035  		_ = l.Args[1]
  1036  		x := l.Args[0]
  1037  		y := l.Args[1]
  1038  		a := v.Args[1]
  1039  		if !(l.Uses == 1 && clobber(l)) {
  1040  			break
  1041  		}
  1042  		v.reset(OpARM64MSUB)
  1043  		v.AddArg(a)
  1044  		v.AddArg(x)
  1045  		v.AddArg(y)
  1046  		return true
  1047  	}
  1048  	// match: (ADD a l:(MULW x y))
  1049  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1050  	// result: (MADDW a x y)
  1051  	for {
  1052  		_ = v.Args[1]
  1053  		a := v.Args[0]
  1054  		l := v.Args[1]
  1055  		if l.Op != OpARM64MULW {
  1056  			break
  1057  		}
  1058  		_ = l.Args[1]
  1059  		x := l.Args[0]
  1060  		y := l.Args[1]
  1061  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1062  			break
  1063  		}
  1064  		v.reset(OpARM64MADDW)
  1065  		v.AddArg(a)
  1066  		v.AddArg(x)
  1067  		v.AddArg(y)
  1068  		return true
  1069  	}
  1070  	// match: (ADD l:(MULW x y) a)
  1071  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1072  	// result: (MADDW a x y)
  1073  	for {
  1074  		_ = v.Args[1]
  1075  		l := v.Args[0]
  1076  		if l.Op != OpARM64MULW {
  1077  			break
  1078  		}
  1079  		_ = l.Args[1]
  1080  		x := l.Args[0]
  1081  		y := l.Args[1]
  1082  		a := v.Args[1]
  1083  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1084  			break
  1085  		}
  1086  		v.reset(OpARM64MADDW)
  1087  		v.AddArg(a)
  1088  		v.AddArg(x)
  1089  		v.AddArg(y)
  1090  		return true
  1091  	}
  1092  	// match: (ADD a l:(MNEGW x y))
  1093  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1094  	// result: (MSUBW a x y)
  1095  	for {
  1096  		_ = v.Args[1]
  1097  		a := v.Args[0]
  1098  		l := v.Args[1]
  1099  		if l.Op != OpARM64MNEGW {
  1100  			break
  1101  		}
  1102  		_ = l.Args[1]
  1103  		x := l.Args[0]
  1104  		y := l.Args[1]
  1105  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1106  			break
  1107  		}
  1108  		v.reset(OpARM64MSUBW)
  1109  		v.AddArg(a)
  1110  		v.AddArg(x)
  1111  		v.AddArg(y)
  1112  		return true
  1113  	}
  1114  	// match: (ADD l:(MNEGW x y) a)
  1115  	// cond: a.Type.Size() != 8 && l.Uses==1 && clobber(l)
  1116  	// result: (MSUBW a x y)
  1117  	for {
  1118  		_ = v.Args[1]
  1119  		l := v.Args[0]
  1120  		if l.Op != OpARM64MNEGW {
  1121  			break
  1122  		}
  1123  		_ = l.Args[1]
  1124  		x := l.Args[0]
  1125  		y := l.Args[1]
  1126  		a := v.Args[1]
  1127  		if !(a.Type.Size() != 8 && l.Uses == 1 && clobber(l)) {
  1128  			break
  1129  		}
  1130  		v.reset(OpARM64MSUBW)
  1131  		v.AddArg(a)
  1132  		v.AddArg(x)
  1133  		v.AddArg(y)
  1134  		return true
  1135  	}
  1136  	return false
  1137  }
  1138  func rewriteValueARM64_OpARM64ADD_10(v *Value) bool {
  1139  	b := v.Block
  1140  	_ = b
  1141  	typ := &b.Func.Config.Types
  1142  	_ = typ
  1143  	// match: (ADD x (NEG y))
  1144  	// cond:
  1145  	// result: (SUB x y)
  1146  	for {
  1147  		_ = v.Args[1]
  1148  		x := v.Args[0]
  1149  		v_1 := v.Args[1]
  1150  		if v_1.Op != OpARM64NEG {
  1151  			break
  1152  		}
  1153  		y := v_1.Args[0]
  1154  		v.reset(OpARM64SUB)
  1155  		v.AddArg(x)
  1156  		v.AddArg(y)
  1157  		return true
  1158  	}
  1159  	// match: (ADD (NEG y) x)
  1160  	// cond:
  1161  	// result: (SUB x y)
  1162  	for {
  1163  		_ = v.Args[1]
  1164  		v_0 := v.Args[0]
  1165  		if v_0.Op != OpARM64NEG {
  1166  			break
  1167  		}
  1168  		y := v_0.Args[0]
  1169  		x := v.Args[1]
  1170  		v.reset(OpARM64SUB)
  1171  		v.AddArg(x)
  1172  		v.AddArg(y)
  1173  		return true
  1174  	}
  1175  	// match: (ADD x0 x1:(SLLconst [c] y))
  1176  	// cond: clobberIfDead(x1)
  1177  	// result: (ADDshiftLL x0 y [c])
  1178  	for {
  1179  		_ = v.Args[1]
  1180  		x0 := v.Args[0]
  1181  		x1 := v.Args[1]
  1182  		if x1.Op != OpARM64SLLconst {
  1183  			break
  1184  		}
  1185  		c := x1.AuxInt
  1186  		y := x1.Args[0]
  1187  		if !(clobberIfDead(x1)) {
  1188  			break
  1189  		}
  1190  		v.reset(OpARM64ADDshiftLL)
  1191  		v.AuxInt = c
  1192  		v.AddArg(x0)
  1193  		v.AddArg(y)
  1194  		return true
  1195  	}
  1196  	// match: (ADD x1:(SLLconst [c] y) x0)
  1197  	// cond: clobberIfDead(x1)
  1198  	// result: (ADDshiftLL x0 y [c])
  1199  	for {
  1200  		_ = v.Args[1]
  1201  		x1 := v.Args[0]
  1202  		if x1.Op != OpARM64SLLconst {
  1203  			break
  1204  		}
  1205  		c := x1.AuxInt
  1206  		y := x1.Args[0]
  1207  		x0 := v.Args[1]
  1208  		if !(clobberIfDead(x1)) {
  1209  			break
  1210  		}
  1211  		v.reset(OpARM64ADDshiftLL)
  1212  		v.AuxInt = c
  1213  		v.AddArg(x0)
  1214  		v.AddArg(y)
  1215  		return true
  1216  	}
  1217  	// match: (ADD x0 x1:(SRLconst [c] y))
  1218  	// cond: clobberIfDead(x1)
  1219  	// result: (ADDshiftRL x0 y [c])
  1220  	for {
  1221  		_ = v.Args[1]
  1222  		x0 := v.Args[0]
  1223  		x1 := v.Args[1]
  1224  		if x1.Op != OpARM64SRLconst {
  1225  			break
  1226  		}
  1227  		c := x1.AuxInt
  1228  		y := x1.Args[0]
  1229  		if !(clobberIfDead(x1)) {
  1230  			break
  1231  		}
  1232  		v.reset(OpARM64ADDshiftRL)
  1233  		v.AuxInt = c
  1234  		v.AddArg(x0)
  1235  		v.AddArg(y)
  1236  		return true
  1237  	}
  1238  	// match: (ADD x1:(SRLconst [c] y) x0)
  1239  	// cond: clobberIfDead(x1)
  1240  	// result: (ADDshiftRL x0 y [c])
  1241  	for {
  1242  		_ = v.Args[1]
  1243  		x1 := v.Args[0]
  1244  		if x1.Op != OpARM64SRLconst {
  1245  			break
  1246  		}
  1247  		c := x1.AuxInt
  1248  		y := x1.Args[0]
  1249  		x0 := v.Args[1]
  1250  		if !(clobberIfDead(x1)) {
  1251  			break
  1252  		}
  1253  		v.reset(OpARM64ADDshiftRL)
  1254  		v.AuxInt = c
  1255  		v.AddArg(x0)
  1256  		v.AddArg(y)
  1257  		return true
  1258  	}
  1259  	// match: (ADD x0 x1:(SRAconst [c] y))
  1260  	// cond: clobberIfDead(x1)
  1261  	// result: (ADDshiftRA x0 y [c])
  1262  	for {
  1263  		_ = v.Args[1]
  1264  		x0 := v.Args[0]
  1265  		x1 := v.Args[1]
  1266  		if x1.Op != OpARM64SRAconst {
  1267  			break
  1268  		}
  1269  		c := x1.AuxInt
  1270  		y := x1.Args[0]
  1271  		if !(clobberIfDead(x1)) {
  1272  			break
  1273  		}
  1274  		v.reset(OpARM64ADDshiftRA)
  1275  		v.AuxInt = c
  1276  		v.AddArg(x0)
  1277  		v.AddArg(y)
  1278  		return true
  1279  	}
  1280  	// match: (ADD x1:(SRAconst [c] y) x0)
  1281  	// cond: clobberIfDead(x1)
  1282  	// result: (ADDshiftRA x0 y [c])
  1283  	for {
  1284  		_ = v.Args[1]
  1285  		x1 := v.Args[0]
  1286  		if x1.Op != OpARM64SRAconst {
  1287  			break
  1288  		}
  1289  		c := x1.AuxInt
  1290  		y := x1.Args[0]
  1291  		x0 := v.Args[1]
  1292  		if !(clobberIfDead(x1)) {
  1293  			break
  1294  		}
  1295  		v.reset(OpARM64ADDshiftRA)
  1296  		v.AuxInt = c
  1297  		v.AddArg(x0)
  1298  		v.AddArg(y)
  1299  		return true
  1300  	}
  1301  	// match: (ADD (SLL x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
  1302  	// cond: cc.(Op) == OpARM64LessThanU
  1303  	// result: (ROR x (NEG <t> y))
  1304  	for {
  1305  		_ = v.Args[1]
  1306  		v_0 := v.Args[0]
  1307  		if v_0.Op != OpARM64SLL {
  1308  			break
  1309  		}
  1310  		_ = v_0.Args[1]
  1311  		x := v_0.Args[0]
  1312  		v_0_1 := v_0.Args[1]
  1313  		if v_0_1.Op != OpARM64ANDconst {
  1314  			break
  1315  		}
  1316  		t := v_0_1.Type
  1317  		if v_0_1.AuxInt != 63 {
  1318  			break
  1319  		}
  1320  		y := v_0_1.Args[0]
  1321  		v_1 := v.Args[1]
  1322  		if v_1.Op != OpARM64CSEL0 {
  1323  			break
  1324  		}
  1325  		if v_1.Type != typ.UInt64 {
  1326  			break
  1327  		}
  1328  		cc := v_1.Aux
  1329  		_ = v_1.Args[1]
  1330  		v_1_0 := v_1.Args[0]
  1331  		if v_1_0.Op != OpARM64SRL {
  1332  			break
  1333  		}
  1334  		if v_1_0.Type != typ.UInt64 {
  1335  			break
  1336  		}
  1337  		_ = v_1_0.Args[1]
  1338  		if x != v_1_0.Args[0] {
  1339  			break
  1340  		}
  1341  		v_1_0_1 := v_1_0.Args[1]
  1342  		if v_1_0_1.Op != OpARM64SUB {
  1343  			break
  1344  		}
  1345  		if v_1_0_1.Type != t {
  1346  			break
  1347  		}
  1348  		_ = v_1_0_1.Args[1]
  1349  		v_1_0_1_0 := v_1_0_1.Args[0]
  1350  		if v_1_0_1_0.Op != OpARM64MOVDconst {
  1351  			break
  1352  		}
  1353  		if v_1_0_1_0.AuxInt != 64 {
  1354  			break
  1355  		}
  1356  		v_1_0_1_1 := v_1_0_1.Args[1]
  1357  		if v_1_0_1_1.Op != OpARM64ANDconst {
  1358  			break
  1359  		}
  1360  		if v_1_0_1_1.Type != t {
  1361  			break
  1362  		}
  1363  		if v_1_0_1_1.AuxInt != 63 {
  1364  			break
  1365  		}
  1366  		if y != v_1_0_1_1.Args[0] {
  1367  			break
  1368  		}
  1369  		v_1_1 := v_1.Args[1]
  1370  		if v_1_1.Op != OpARM64CMPconst {
  1371  			break
  1372  		}
  1373  		if v_1_1.AuxInt != 64 {
  1374  			break
  1375  		}
  1376  		v_1_1_0 := v_1_1.Args[0]
  1377  		if v_1_1_0.Op != OpARM64SUB {
  1378  			break
  1379  		}
  1380  		if v_1_1_0.Type != t {
  1381  			break
  1382  		}
  1383  		_ = v_1_1_0.Args[1]
  1384  		v_1_1_0_0 := v_1_1_0.Args[0]
  1385  		if v_1_1_0_0.Op != OpARM64MOVDconst {
  1386  			break
  1387  		}
  1388  		if v_1_1_0_0.AuxInt != 64 {
  1389  			break
  1390  		}
  1391  		v_1_1_0_1 := v_1_1_0.Args[1]
  1392  		if v_1_1_0_1.Op != OpARM64ANDconst {
  1393  			break
  1394  		}
  1395  		if v_1_1_0_1.Type != t {
  1396  			break
  1397  		}
  1398  		if v_1_1_0_1.AuxInt != 63 {
  1399  			break
  1400  		}
  1401  		if y != v_1_1_0_1.Args[0] {
  1402  			break
  1403  		}
  1404  		if !(cc.(Op) == OpARM64LessThanU) {
  1405  			break
  1406  		}
  1407  		v.reset(OpARM64ROR)
  1408  		v.AddArg(x)
  1409  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
  1410  		v0.AddArg(y)
  1411  		v.AddArg(v0)
  1412  		return true
  1413  	}
  1414  	// match: (ADD (CSEL0 <typ.UInt64> {cc} (SRL <typ.UInt64> x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SLL x (ANDconst <t> [63] y)))
  1415  	// cond: cc.(Op) == OpARM64LessThanU
  1416  	// result: (ROR x (NEG <t> y))
  1417  	for {
  1418  		_ = v.Args[1]
  1419  		v_0 := v.Args[0]
  1420  		if v_0.Op != OpARM64CSEL0 {
  1421  			break
  1422  		}
  1423  		if v_0.Type != typ.UInt64 {
  1424  			break
  1425  		}
  1426  		cc := v_0.Aux
  1427  		_ = v_0.Args[1]
  1428  		v_0_0 := v_0.Args[0]
  1429  		if v_0_0.Op != OpARM64SRL {
  1430  			break
  1431  		}
  1432  		if v_0_0.Type != typ.UInt64 {
  1433  			break
  1434  		}
  1435  		_ = v_0_0.Args[1]
  1436  		x := v_0_0.Args[0]
  1437  		v_0_0_1 := v_0_0.Args[1]
  1438  		if v_0_0_1.Op != OpARM64SUB {
  1439  			break
  1440  		}
  1441  		t := v_0_0_1.Type
  1442  		_ = v_0_0_1.Args[1]
  1443  		v_0_0_1_0 := v_0_0_1.Args[0]
  1444  		if v_0_0_1_0.Op != OpARM64MOVDconst {
  1445  			break
  1446  		}
  1447  		if v_0_0_1_0.AuxInt != 64 {
  1448  			break
  1449  		}
  1450  		v_0_0_1_1 := v_0_0_1.Args[1]
  1451  		if v_0_0_1_1.Op != OpARM64ANDconst {
  1452  			break
  1453  		}
  1454  		if v_0_0_1_1.Type != t {
  1455  			break
  1456  		}
  1457  		if v_0_0_1_1.AuxInt != 63 {
  1458  			break
  1459  		}
  1460  		y := v_0_0_1_1.Args[0]
  1461  		v_0_1 := v_0.Args[1]
  1462  		if v_0_1.Op != OpARM64CMPconst {
  1463  			break
  1464  		}
  1465  		if v_0_1.AuxInt != 64 {
  1466  			break
  1467  		}
  1468  		v_0_1_0 := v_0_1.Args[0]
  1469  		if v_0_1_0.Op != OpARM64SUB {
  1470  			break
  1471  		}
  1472  		if v_0_1_0.Type != t {
  1473  			break
  1474  		}
  1475  		_ = v_0_1_0.Args[1]
  1476  		v_0_1_0_0 := v_0_1_0.Args[0]
  1477  		if v_0_1_0_0.Op != OpARM64MOVDconst {
  1478  			break
  1479  		}
  1480  		if v_0_1_0_0.AuxInt != 64 {
  1481  			break
  1482  		}
  1483  		v_0_1_0_1 := v_0_1_0.Args[1]
  1484  		if v_0_1_0_1.Op != OpARM64ANDconst {
  1485  			break
  1486  		}
  1487  		if v_0_1_0_1.Type != t {
  1488  			break
  1489  		}
  1490  		if v_0_1_0_1.AuxInt != 63 {
  1491  			break
  1492  		}
  1493  		if y != v_0_1_0_1.Args[0] {
  1494  			break
  1495  		}
  1496  		v_1 := v.Args[1]
  1497  		if v_1.Op != OpARM64SLL {
  1498  			break
  1499  		}
  1500  		_ = v_1.Args[1]
  1501  		if x != v_1.Args[0] {
  1502  			break
  1503  		}
  1504  		v_1_1 := v_1.Args[1]
  1505  		if v_1_1.Op != OpARM64ANDconst {
  1506  			break
  1507  		}
  1508  		if v_1_1.Type != t {
  1509  			break
  1510  		}
  1511  		if v_1_1.AuxInt != 63 {
  1512  			break
  1513  		}
  1514  		if y != v_1_1.Args[0] {
  1515  			break
  1516  		}
  1517  		if !(cc.(Op) == OpARM64LessThanU) {
  1518  			break
  1519  		}
  1520  		v.reset(OpARM64ROR)
  1521  		v.AddArg(x)
  1522  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
  1523  		v0.AddArg(y)
  1524  		v.AddArg(v0)
  1525  		return true
  1526  	}
  1527  	return false
  1528  }
  1529  func rewriteValueARM64_OpARM64ADD_20(v *Value) bool {
  1530  	b := v.Block
  1531  	_ = b
  1532  	typ := &b.Func.Config.Types
  1533  	_ = typ
  1534  	// match: (ADD (SRL <typ.UInt64> x (ANDconst <t> [63] y)) (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))))
  1535  	// cond: cc.(Op) == OpARM64LessThanU
  1536  	// result: (ROR x y)
  1537  	for {
  1538  		_ = v.Args[1]
  1539  		v_0 := v.Args[0]
  1540  		if v_0.Op != OpARM64SRL {
  1541  			break
  1542  		}
  1543  		if v_0.Type != typ.UInt64 {
  1544  			break
  1545  		}
  1546  		_ = v_0.Args[1]
  1547  		x := v_0.Args[0]
  1548  		v_0_1 := v_0.Args[1]
  1549  		if v_0_1.Op != OpARM64ANDconst {
  1550  			break
  1551  		}
  1552  		t := v_0_1.Type
  1553  		if v_0_1.AuxInt != 63 {
  1554  			break
  1555  		}
  1556  		y := v_0_1.Args[0]
  1557  		v_1 := v.Args[1]
  1558  		if v_1.Op != OpARM64CSEL0 {
  1559  			break
  1560  		}
  1561  		if v_1.Type != typ.UInt64 {
  1562  			break
  1563  		}
  1564  		cc := v_1.Aux
  1565  		_ = v_1.Args[1]
  1566  		v_1_0 := v_1.Args[0]
  1567  		if v_1_0.Op != OpARM64SLL {
  1568  			break
  1569  		}
  1570  		_ = v_1_0.Args[1]
  1571  		if x != v_1_0.Args[0] {
  1572  			break
  1573  		}
  1574  		v_1_0_1 := v_1_0.Args[1]
  1575  		if v_1_0_1.Op != OpARM64SUB {
  1576  			break
  1577  		}
  1578  		if v_1_0_1.Type != t {
  1579  			break
  1580  		}
  1581  		_ = v_1_0_1.Args[1]
  1582  		v_1_0_1_0 := v_1_0_1.Args[0]
  1583  		if v_1_0_1_0.Op != OpARM64MOVDconst {
  1584  			break
  1585  		}
  1586  		if v_1_0_1_0.AuxInt != 64 {
  1587  			break
  1588  		}
  1589  		v_1_0_1_1 := v_1_0_1.Args[1]
  1590  		if v_1_0_1_1.Op != OpARM64ANDconst {
  1591  			break
  1592  		}
  1593  		if v_1_0_1_1.Type != t {
  1594  			break
  1595  		}
  1596  		if v_1_0_1_1.AuxInt != 63 {
  1597  			break
  1598  		}
  1599  		if y != v_1_0_1_1.Args[0] {
  1600  			break
  1601  		}
  1602  		v_1_1 := v_1.Args[1]
  1603  		if v_1_1.Op != OpARM64CMPconst {
  1604  			break
  1605  		}
  1606  		if v_1_1.AuxInt != 64 {
  1607  			break
  1608  		}
  1609  		v_1_1_0 := v_1_1.Args[0]
  1610  		if v_1_1_0.Op != OpARM64SUB {
  1611  			break
  1612  		}
  1613  		if v_1_1_0.Type != t {
  1614  			break
  1615  		}
  1616  		_ = v_1_1_0.Args[1]
  1617  		v_1_1_0_0 := v_1_1_0.Args[0]
  1618  		if v_1_1_0_0.Op != OpARM64MOVDconst {
  1619  			break
  1620  		}
  1621  		if v_1_1_0_0.AuxInt != 64 {
  1622  			break
  1623  		}
  1624  		v_1_1_0_1 := v_1_1_0.Args[1]
  1625  		if v_1_1_0_1.Op != OpARM64ANDconst {
  1626  			break
  1627  		}
  1628  		if v_1_1_0_1.Type != t {
  1629  			break
  1630  		}
  1631  		if v_1_1_0_1.AuxInt != 63 {
  1632  			break
  1633  		}
  1634  		if y != v_1_1_0_1.Args[0] {
  1635  			break
  1636  		}
  1637  		if !(cc.(Op) == OpARM64LessThanU) {
  1638  			break
  1639  		}
  1640  		v.reset(OpARM64ROR)
  1641  		v.AddArg(x)
  1642  		v.AddArg(y)
  1643  		return true
  1644  	}
  1645  	// match: (ADD (CSEL0 <typ.UInt64> {cc} (SLL x (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y))) (CMPconst [64] (SUB <t> (MOVDconst [64]) (ANDconst <t> [63] y)))) (SRL <typ.UInt64> x (ANDconst <t> [63] y)))
  1646  	// cond: cc.(Op) == OpARM64LessThanU
  1647  	// result: (ROR x y)
  1648  	for {
  1649  		_ = v.Args[1]
  1650  		v_0 := v.Args[0]
  1651  		if v_0.Op != OpARM64CSEL0 {
  1652  			break
  1653  		}
  1654  		if v_0.Type != typ.UInt64 {
  1655  			break
  1656  		}
  1657  		cc := v_0.Aux
  1658  		_ = v_0.Args[1]
  1659  		v_0_0 := v_0.Args[0]
  1660  		if v_0_0.Op != OpARM64SLL {
  1661  			break
  1662  		}
  1663  		_ = v_0_0.Args[1]
  1664  		x := v_0_0.Args[0]
  1665  		v_0_0_1 := v_0_0.Args[1]
  1666  		if v_0_0_1.Op != OpARM64SUB {
  1667  			break
  1668  		}
  1669  		t := v_0_0_1.Type
  1670  		_ = v_0_0_1.Args[1]
  1671  		v_0_0_1_0 := v_0_0_1.Args[0]
  1672  		if v_0_0_1_0.Op != OpARM64MOVDconst {
  1673  			break
  1674  		}
  1675  		if v_0_0_1_0.AuxInt != 64 {
  1676  			break
  1677  		}
  1678  		v_0_0_1_1 := v_0_0_1.Args[1]
  1679  		if v_0_0_1_1.Op != OpARM64ANDconst {
  1680  			break
  1681  		}
  1682  		if v_0_0_1_1.Type != t {
  1683  			break
  1684  		}
  1685  		if v_0_0_1_1.AuxInt != 63 {
  1686  			break
  1687  		}
  1688  		y := v_0_0_1_1.Args[0]
  1689  		v_0_1 := v_0.Args[1]
  1690  		if v_0_1.Op != OpARM64CMPconst {
  1691  			break
  1692  		}
  1693  		if v_0_1.AuxInt != 64 {
  1694  			break
  1695  		}
  1696  		v_0_1_0 := v_0_1.Args[0]
  1697  		if v_0_1_0.Op != OpARM64SUB {
  1698  			break
  1699  		}
  1700  		if v_0_1_0.Type != t {
  1701  			break
  1702  		}
  1703  		_ = v_0_1_0.Args[1]
  1704  		v_0_1_0_0 := v_0_1_0.Args[0]
  1705  		if v_0_1_0_0.Op != OpARM64MOVDconst {
  1706  			break
  1707  		}
  1708  		if v_0_1_0_0.AuxInt != 64 {
  1709  			break
  1710  		}
  1711  		v_0_1_0_1 := v_0_1_0.Args[1]
  1712  		if v_0_1_0_1.Op != OpARM64ANDconst {
  1713  			break
  1714  		}
  1715  		if v_0_1_0_1.Type != t {
  1716  			break
  1717  		}
  1718  		if v_0_1_0_1.AuxInt != 63 {
  1719  			break
  1720  		}
  1721  		if y != v_0_1_0_1.Args[0] {
  1722  			break
  1723  		}
  1724  		v_1 := v.Args[1]
  1725  		if v_1.Op != OpARM64SRL {
  1726  			break
  1727  		}
  1728  		if v_1.Type != typ.UInt64 {
  1729  			break
  1730  		}
  1731  		_ = v_1.Args[1]
  1732  		if x != v_1.Args[0] {
  1733  			break
  1734  		}
  1735  		v_1_1 := v_1.Args[1]
  1736  		if v_1_1.Op != OpARM64ANDconst {
  1737  			break
  1738  		}
  1739  		if v_1_1.Type != t {
  1740  			break
  1741  		}
  1742  		if v_1_1.AuxInt != 63 {
  1743  			break
  1744  		}
  1745  		if y != v_1_1.Args[0] {
  1746  			break
  1747  		}
  1748  		if !(cc.(Op) == OpARM64LessThanU) {
  1749  			break
  1750  		}
  1751  		v.reset(OpARM64ROR)
  1752  		v.AddArg(x)
  1753  		v.AddArg(y)
  1754  		return true
  1755  	}
  1756  	// match: (ADD (SLL x (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))))
  1757  	// cond: cc.(Op) == OpARM64LessThanU
  1758  	// result: (RORW x (NEG <t> y))
  1759  	for {
  1760  		_ = v.Args[1]
  1761  		v_0 := v.Args[0]
  1762  		if v_0.Op != OpARM64SLL {
  1763  			break
  1764  		}
  1765  		_ = v_0.Args[1]
  1766  		x := v_0.Args[0]
  1767  		v_0_1 := v_0.Args[1]
  1768  		if v_0_1.Op != OpARM64ANDconst {
  1769  			break
  1770  		}
  1771  		t := v_0_1.Type
  1772  		if v_0_1.AuxInt != 31 {
  1773  			break
  1774  		}
  1775  		y := v_0_1.Args[0]
  1776  		v_1 := v.Args[1]
  1777  		if v_1.Op != OpARM64CSEL0 {
  1778  			break
  1779  		}
  1780  		if v_1.Type != typ.UInt32 {
  1781  			break
  1782  		}
  1783  		cc := v_1.Aux
  1784  		_ = v_1.Args[1]
  1785  		v_1_0 := v_1.Args[0]
  1786  		if v_1_0.Op != OpARM64SRL {
  1787  			break
  1788  		}
  1789  		if v_1_0.Type != typ.UInt32 {
  1790  			break
  1791  		}
  1792  		_ = v_1_0.Args[1]
  1793  		v_1_0_0 := v_1_0.Args[0]
  1794  		if v_1_0_0.Op != OpARM64MOVWUreg {
  1795  			break
  1796  		}
  1797  		if x != v_1_0_0.Args[0] {
  1798  			break
  1799  		}
  1800  		v_1_0_1 := v_1_0.Args[1]
  1801  		if v_1_0_1.Op != OpARM64SUB {
  1802  			break
  1803  		}
  1804  		if v_1_0_1.Type != t {
  1805  			break
  1806  		}
  1807  		_ = v_1_0_1.Args[1]
  1808  		v_1_0_1_0 := v_1_0_1.Args[0]
  1809  		if v_1_0_1_0.Op != OpARM64MOVDconst {
  1810  			break
  1811  		}
  1812  		if v_1_0_1_0.AuxInt != 32 {
  1813  			break
  1814  		}
  1815  		v_1_0_1_1 := v_1_0_1.Args[1]
  1816  		if v_1_0_1_1.Op != OpARM64ANDconst {
  1817  			break
  1818  		}
  1819  		if v_1_0_1_1.Type != t {
  1820  			break
  1821  		}
  1822  		if v_1_0_1_1.AuxInt != 31 {
  1823  			break
  1824  		}
  1825  		if y != v_1_0_1_1.Args[0] {
  1826  			break
  1827  		}
  1828  		v_1_1 := v_1.Args[1]
  1829  		if v_1_1.Op != OpARM64CMPconst {
  1830  			break
  1831  		}
  1832  		if v_1_1.AuxInt != 64 {
  1833  			break
  1834  		}
  1835  		v_1_1_0 := v_1_1.Args[0]
  1836  		if v_1_1_0.Op != OpARM64SUB {
  1837  			break
  1838  		}
  1839  		if v_1_1_0.Type != t {
  1840  			break
  1841  		}
  1842  		_ = v_1_1_0.Args[1]
  1843  		v_1_1_0_0 := v_1_1_0.Args[0]
  1844  		if v_1_1_0_0.Op != OpARM64MOVDconst {
  1845  			break
  1846  		}
  1847  		if v_1_1_0_0.AuxInt != 32 {
  1848  			break
  1849  		}
  1850  		v_1_1_0_1 := v_1_1_0.Args[1]
  1851  		if v_1_1_0_1.Op != OpARM64ANDconst {
  1852  			break
  1853  		}
  1854  		if v_1_1_0_1.Type != t {
  1855  			break
  1856  		}
  1857  		if v_1_1_0_1.AuxInt != 31 {
  1858  			break
  1859  		}
  1860  		if y != v_1_1_0_1.Args[0] {
  1861  			break
  1862  		}
  1863  		if !(cc.(Op) == OpARM64LessThanU) {
  1864  			break
  1865  		}
  1866  		v.reset(OpARM64RORW)
  1867  		v.AddArg(x)
  1868  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
  1869  		v0.AddArg(y)
  1870  		v.AddArg(v0)
  1871  		return true
  1872  	}
  1873  	// match: (ADD (CSEL0 <typ.UInt32> {cc} (SRL <typ.UInt32> (MOVWUreg x) (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SLL x (ANDconst <t> [31] y)))
  1874  	// cond: cc.(Op) == OpARM64LessThanU
  1875  	// result: (RORW x (NEG <t> y))
  1876  	for {
  1877  		_ = v.Args[1]
  1878  		v_0 := v.Args[0]
  1879  		if v_0.Op != OpARM64CSEL0 {
  1880  			break
  1881  		}
  1882  		if v_0.Type != typ.UInt32 {
  1883  			break
  1884  		}
  1885  		cc := v_0.Aux
  1886  		_ = v_0.Args[1]
  1887  		v_0_0 := v_0.Args[0]
  1888  		if v_0_0.Op != OpARM64SRL {
  1889  			break
  1890  		}
  1891  		if v_0_0.Type != typ.UInt32 {
  1892  			break
  1893  		}
  1894  		_ = v_0_0.Args[1]
  1895  		v_0_0_0 := v_0_0.Args[0]
  1896  		if v_0_0_0.Op != OpARM64MOVWUreg {
  1897  			break
  1898  		}
  1899  		x := v_0_0_0.Args[0]
  1900  		v_0_0_1 := v_0_0.Args[1]
  1901  		if v_0_0_1.Op != OpARM64SUB {
  1902  			break
  1903  		}
  1904  		t := v_0_0_1.Type
  1905  		_ = v_0_0_1.Args[1]
  1906  		v_0_0_1_0 := v_0_0_1.Args[0]
  1907  		if v_0_0_1_0.Op != OpARM64MOVDconst {
  1908  			break
  1909  		}
  1910  		if v_0_0_1_0.AuxInt != 32 {
  1911  			break
  1912  		}
  1913  		v_0_0_1_1 := v_0_0_1.Args[1]
  1914  		if v_0_0_1_1.Op != OpARM64ANDconst {
  1915  			break
  1916  		}
  1917  		if v_0_0_1_1.Type != t {
  1918  			break
  1919  		}
  1920  		if v_0_0_1_1.AuxInt != 31 {
  1921  			break
  1922  		}
  1923  		y := v_0_0_1_1.Args[0]
  1924  		v_0_1 := v_0.Args[1]
  1925  		if v_0_1.Op != OpARM64CMPconst {
  1926  			break
  1927  		}
  1928  		if v_0_1.AuxInt != 64 {
  1929  			break
  1930  		}
  1931  		v_0_1_0 := v_0_1.Args[0]
  1932  		if v_0_1_0.Op != OpARM64SUB {
  1933  			break
  1934  		}
  1935  		if v_0_1_0.Type != t {
  1936  			break
  1937  		}
  1938  		_ = v_0_1_0.Args[1]
  1939  		v_0_1_0_0 := v_0_1_0.Args[0]
  1940  		if v_0_1_0_0.Op != OpARM64MOVDconst {
  1941  			break
  1942  		}
  1943  		if v_0_1_0_0.AuxInt != 32 {
  1944  			break
  1945  		}
  1946  		v_0_1_0_1 := v_0_1_0.Args[1]
  1947  		if v_0_1_0_1.Op != OpARM64ANDconst {
  1948  			break
  1949  		}
  1950  		if v_0_1_0_1.Type != t {
  1951  			break
  1952  		}
  1953  		if v_0_1_0_1.AuxInt != 31 {
  1954  			break
  1955  		}
  1956  		if y != v_0_1_0_1.Args[0] {
  1957  			break
  1958  		}
  1959  		v_1 := v.Args[1]
  1960  		if v_1.Op != OpARM64SLL {
  1961  			break
  1962  		}
  1963  		_ = v_1.Args[1]
  1964  		if x != v_1.Args[0] {
  1965  			break
  1966  		}
  1967  		v_1_1 := v_1.Args[1]
  1968  		if v_1_1.Op != OpARM64ANDconst {
  1969  			break
  1970  		}
  1971  		if v_1_1.Type != t {
  1972  			break
  1973  		}
  1974  		if v_1_1.AuxInt != 31 {
  1975  			break
  1976  		}
  1977  		if y != v_1_1.Args[0] {
  1978  			break
  1979  		}
  1980  		if !(cc.(Op) == OpARM64LessThanU) {
  1981  			break
  1982  		}
  1983  		v.reset(OpARM64RORW)
  1984  		v.AddArg(x)
  1985  		v0 := b.NewValue0(v.Pos, OpARM64NEG, t)
  1986  		v0.AddArg(y)
  1987  		v.AddArg(v0)
  1988  		return true
  1989  	}
  1990  	// match: (ADD (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)) (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))))
  1991  	// cond: cc.(Op) == OpARM64LessThanU
  1992  	// result: (RORW x y)
  1993  	for {
  1994  		_ = v.Args[1]
  1995  		v_0 := v.Args[0]
  1996  		if v_0.Op != OpARM64SRL {
  1997  			break
  1998  		}
  1999  		if v_0.Type != typ.UInt32 {
  2000  			break
  2001  		}
  2002  		_ = v_0.Args[1]
  2003  		v_0_0 := v_0.Args[0]
  2004  		if v_0_0.Op != OpARM64MOVWUreg {
  2005  			break
  2006  		}
  2007  		x := v_0_0.Args[0]
  2008  		v_0_1 := v_0.Args[1]
  2009  		if v_0_1.Op != OpARM64ANDconst {
  2010  			break
  2011  		}
  2012  		t := v_0_1.Type
  2013  		if v_0_1.AuxInt != 31 {
  2014  			break
  2015  		}
  2016  		y := v_0_1.Args[0]
  2017  		v_1 := v.Args[1]
  2018  		if v_1.Op != OpARM64CSEL0 {
  2019  			break
  2020  		}
  2021  		if v_1.Type != typ.UInt32 {
  2022  			break
  2023  		}
  2024  		cc := v_1.Aux
  2025  		_ = v_1.Args[1]
  2026  		v_1_0 := v_1.Args[0]
  2027  		if v_1_0.Op != OpARM64SLL {
  2028  			break
  2029  		}
  2030  		_ = v_1_0.Args[1]
  2031  		if x != v_1_0.Args[0] {
  2032  			break
  2033  		}
  2034  		v_1_0_1 := v_1_0.Args[1]
  2035  		if v_1_0_1.Op != OpARM64SUB {
  2036  			break
  2037  		}
  2038  		if v_1_0_1.Type != t {
  2039  			break
  2040  		}
  2041  		_ = v_1_0_1.Args[1]
  2042  		v_1_0_1_0 := v_1_0_1.Args[0]
  2043  		if v_1_0_1_0.Op != OpARM64MOVDconst {
  2044  			break
  2045  		}
  2046  		if v_1_0_1_0.AuxInt != 32 {
  2047  			break
  2048  		}
  2049  		v_1_0_1_1 := v_1_0_1.Args[1]
  2050  		if v_1_0_1_1.Op != OpARM64ANDconst {
  2051  			break
  2052  		}
  2053  		if v_1_0_1_1.Type != t {
  2054  			break
  2055  		}
  2056  		if v_1_0_1_1.AuxInt != 31 {
  2057  			break
  2058  		}
  2059  		if y != v_1_0_1_1.Args[0] {
  2060  			break
  2061  		}
  2062  		v_1_1 := v_1.Args[1]
  2063  		if v_1_1.Op != OpARM64CMPconst {
  2064  			break
  2065  		}
  2066  		if v_1_1.AuxInt != 64 {
  2067  			break
  2068  		}
  2069  		v_1_1_0 := v_1_1.Args[0]
  2070  		if v_1_1_0.Op != OpARM64SUB {
  2071  			break
  2072  		}
  2073  		if v_1_1_0.Type != t {
  2074  			break
  2075  		}
  2076  		_ = v_1_1_0.Args[1]
  2077  		v_1_1_0_0 := v_1_1_0.Args[0]
  2078  		if v_1_1_0_0.Op != OpARM64MOVDconst {
  2079  			break
  2080  		}
  2081  		if v_1_1_0_0.AuxInt != 32 {
  2082  			break
  2083  		}
  2084  		v_1_1_0_1 := v_1_1_0.Args[1]
  2085  		if v_1_1_0_1.Op != OpARM64ANDconst {
  2086  			break
  2087  		}
  2088  		if v_1_1_0_1.Type != t {
  2089  			break
  2090  		}
  2091  		if v_1_1_0_1.AuxInt != 31 {
  2092  			break
  2093  		}
  2094  		if y != v_1_1_0_1.Args[0] {
  2095  			break
  2096  		}
  2097  		if !(cc.(Op) == OpARM64LessThanU) {
  2098  			break
  2099  		}
  2100  		v.reset(OpARM64RORW)
  2101  		v.AddArg(x)
  2102  		v.AddArg(y)
  2103  		return true
  2104  	}
  2105  	// match: (ADD (CSEL0 <typ.UInt32> {cc} (SLL x (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y))) (CMPconst [64] (SUB <t> (MOVDconst [32]) (ANDconst <t> [31] y)))) (SRL <typ.UInt32> (MOVWUreg x) (ANDconst <t> [31] y)))
  2106  	// cond: cc.(Op) == OpARM64LessThanU
  2107  	// result: (RORW x y)
  2108  	for {
  2109  		_ = v.Args[1]
  2110  		v_0 := v.Args[0]
  2111  		if v_0.Op != OpARM64CSEL0 {
  2112  			break
  2113  		}
  2114  		if v_0.Type != typ.UInt32 {
  2115  			break
  2116  		}
  2117  		cc := v_0.Aux
  2118  		_ = v_0.Args[1]
  2119  		v_0_0 := v_0.Args[0]
  2120  		if v_0_0.Op != OpARM64SLL {
  2121  			break
  2122  		}
  2123  		_ = v_0_0.Args[1]
  2124  		x := v_0_0.Args[0]
  2125  		v_0_0_1 := v_0_0.Args[1]
  2126  		if v_0_0_1.Op != OpARM64SUB {
  2127  			break
  2128  		}
  2129  		t := v_0_0_1.Type
  2130  		_ = v_0_0_1.Args[1]
  2131  		v_0_0_1_0 := v_0_0_1.Args[0]
  2132  		if v_0_0_1_0.Op != OpARM64MOVDconst {
  2133  			break
  2134  		}
  2135  		if v_0_0_1_0.AuxInt != 32 {
  2136  			break
  2137  		}
  2138  		v_0_0_1_1 := v_0_0_1.Args[1]
  2139  		if v_0_0_1_1.Op != OpARM64ANDconst {
  2140  			break
  2141  		}
  2142  		if v_0_0_1_1.Type != t {
  2143  			break
  2144  		}
  2145  		if v_0_0_1_1.AuxInt != 31 {
  2146  			break
  2147  		}
  2148  		y := v_0_0_1_1.Args[0]
  2149  		v_0_1 := v_0.Args[1]
  2150  		if v_0_1.Op != OpARM64CMPconst {
  2151  			break
  2152  		}
  2153  		if v_0_1.AuxInt != 64 {
  2154  			break
  2155  		}
  2156  		v_0_1_0 := v_0_1.Args[0]
  2157  		if v_0_1_0.Op != OpARM64SUB {
  2158  			break
  2159  		}
  2160  		if v_0_1_0.Type != t {
  2161  			break
  2162  		}
  2163  		_ = v_0_1_0.Args[1]
  2164  		v_0_1_0_0 := v_0_1_0.Args[0]
  2165  		if v_0_1_0_0.Op != OpARM64MOVDconst {
  2166  			break
  2167  		}
  2168  		if v_0_1_0_0.AuxInt != 32 {
  2169  			break
  2170  		}
  2171  		v_0_1_0_1 := v_0_1_0.Args[1]
  2172  		if v_0_1_0_1.Op != OpARM64ANDconst {
  2173  			break
  2174  		}
  2175  		if v_0_1_0_1.Type != t {
  2176  			break
  2177  		}
  2178  		if v_0_1_0_1.AuxInt != 31 {
  2179  			break
  2180  		}
  2181  		if y != v_0_1_0_1.Args[0] {
  2182  			break
  2183  		}
  2184  		v_1 := v.Args[1]
  2185  		if v_1.Op != OpARM64SRL {
  2186  			break
  2187  		}
  2188  		if v_1.Type != typ.UInt32 {
  2189  			break
  2190  		}
  2191  		_ = v_1.Args[1]
  2192  		v_1_0 := v_1.Args[0]
  2193  		if v_1_0.Op != OpARM64MOVWUreg {
  2194  			break
  2195  		}
  2196  		if x != v_1_0.Args[0] {
  2197  			break
  2198  		}
  2199  		v_1_1 := v_1.Args[1]
  2200  		if v_1_1.Op != OpARM64ANDconst {
  2201  			break
  2202  		}
  2203  		if v_1_1.Type != t {
  2204  			break
  2205  		}
  2206  		if v_1_1.AuxInt != 31 {
  2207  			break
  2208  		}
  2209  		if y != v_1_1.Args[0] {
  2210  			break
  2211  		}
  2212  		if !(cc.(Op) == OpARM64LessThanU) {
  2213  			break
  2214  		}
  2215  		v.reset(OpARM64RORW)
  2216  		v.AddArg(x)
  2217  		v.AddArg(y)
  2218  		return true
  2219  	}
  2220  	return false
  2221  }
  2222  func rewriteValueARM64_OpARM64ADDconst_0(v *Value) bool {
  2223  	// match: (ADDconst [off1] (MOVDaddr [off2] {sym} ptr))
  2224  	// cond:
  2225  	// result: (MOVDaddr [off1+off2] {sym} ptr)
  2226  	for {
  2227  		off1 := v.AuxInt
  2228  		v_0 := v.Args[0]
  2229  		if v_0.Op != OpARM64MOVDaddr {
  2230  			break
  2231  		}
  2232  		off2 := v_0.AuxInt
  2233  		sym := v_0.Aux
  2234  		ptr := v_0.Args[0]
  2235  		v.reset(OpARM64MOVDaddr)
  2236  		v.AuxInt = off1 + off2
  2237  		v.Aux = sym
  2238  		v.AddArg(ptr)
  2239  		return true
  2240  	}
  2241  	// match: (ADDconst [0] x)
  2242  	// cond:
  2243  	// result: x
  2244  	for {
  2245  		if v.AuxInt != 0 {
  2246  			break
  2247  		}
  2248  		x := v.Args[0]
  2249  		v.reset(OpCopy)
  2250  		v.Type = x.Type
  2251  		v.AddArg(x)
  2252  		return true
  2253  	}
  2254  	// match: (ADDconst [c] (MOVDconst [d]))
  2255  	// cond:
  2256  	// result: (MOVDconst [c+d])
  2257  	for {
  2258  		c := v.AuxInt
  2259  		v_0 := v.Args[0]
  2260  		if v_0.Op != OpARM64MOVDconst {
  2261  			break
  2262  		}
  2263  		d := v_0.AuxInt
  2264  		v.reset(OpARM64MOVDconst)
  2265  		v.AuxInt = c + d
  2266  		return true
  2267  	}
  2268  	// match: (ADDconst [c] (ADDconst [d] x))
  2269  	// cond:
  2270  	// result: (ADDconst [c+d] x)
  2271  	for {
  2272  		c := v.AuxInt
  2273  		v_0 := v.Args[0]
  2274  		if v_0.Op != OpARM64ADDconst {
  2275  			break
  2276  		}
  2277  		d := v_0.AuxInt
  2278  		x := v_0.Args[0]
  2279  		v.reset(OpARM64ADDconst)
  2280  		v.AuxInt = c + d
  2281  		v.AddArg(x)
  2282  		return true
  2283  	}
  2284  	// match: (ADDconst [c] (SUBconst [d] x))
  2285  	// cond:
  2286  	// result: (ADDconst [c-d] x)
  2287  	for {
  2288  		c := v.AuxInt
  2289  		v_0 := v.Args[0]
  2290  		if v_0.Op != OpARM64SUBconst {
  2291  			break
  2292  		}
  2293  		d := v_0.AuxInt
  2294  		x := v_0.Args[0]
  2295  		v.reset(OpARM64ADDconst)
  2296  		v.AuxInt = c - d
  2297  		v.AddArg(x)
  2298  		return true
  2299  	}
  2300  	return false
  2301  }
  2302  func rewriteValueARM64_OpARM64ADDshiftLL_0(v *Value) bool {
  2303  	b := v.Block
  2304  	_ = b
  2305  	// match: (ADDshiftLL (MOVDconst [c]) x [d])
  2306  	// cond:
  2307  	// result: (ADDconst [c] (SLLconst <x.Type> x [d]))
  2308  	for {
  2309  		d := v.AuxInt
  2310  		_ = v.Args[1]
  2311  		v_0 := v.Args[0]
  2312  		if v_0.Op != OpARM64MOVDconst {
  2313  			break
  2314  		}
  2315  		c := v_0.AuxInt
  2316  		x := v.Args[1]
  2317  		v.reset(OpARM64ADDconst)
  2318  		v.AuxInt = c
  2319  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  2320  		v0.AuxInt = d
  2321  		v0.AddArg(x)
  2322  		v.AddArg(v0)
  2323  		return true
  2324  	}
  2325  	// match: (ADDshiftLL x (MOVDconst [c]) [d])
  2326  	// cond:
  2327  	// result: (ADDconst x [int64(uint64(c)<<uint64(d))])
  2328  	for {
  2329  		d := v.AuxInt
  2330  		_ = v.Args[1]
  2331  		x := v.Args[0]
  2332  		v_1 := v.Args[1]
  2333  		if v_1.Op != OpARM64MOVDconst {
  2334  			break
  2335  		}
  2336  		c := v_1.AuxInt
  2337  		v.reset(OpARM64ADDconst)
  2338  		v.AuxInt = int64(uint64(c) << uint64(d))
  2339  		v.AddArg(x)
  2340  		return true
  2341  	}
  2342  	// match: (ADDshiftLL [c] (SRLconst x [64-c]) x)
  2343  	// cond:
  2344  	// result: (RORconst [64-c] x)
  2345  	for {
  2346  		c := v.AuxInt
  2347  		_ = v.Args[1]
  2348  		v_0 := v.Args[0]
  2349  		if v_0.Op != OpARM64SRLconst {
  2350  			break
  2351  		}
  2352  		if v_0.AuxInt != 64-c {
  2353  			break
  2354  		}
  2355  		x := v_0.Args[0]
  2356  		if x != v.Args[1] {
  2357  			break
  2358  		}
  2359  		v.reset(OpARM64RORconst)
  2360  		v.AuxInt = 64 - c
  2361  		v.AddArg(x)
  2362  		return true
  2363  	}
  2364  	// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x)
  2365  	// cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)
  2366  	// result: (RORWconst [32-c] x)
  2367  	for {
  2368  		t := v.Type
  2369  		c := v.AuxInt
  2370  		_ = v.Args[1]
  2371  		v_0 := v.Args[0]
  2372  		if v_0.Op != OpARM64UBFX {
  2373  			break
  2374  		}
  2375  		bfc := v_0.AuxInt
  2376  		x := v_0.Args[0]
  2377  		if x != v.Args[1] {
  2378  			break
  2379  		}
  2380  		if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) {
  2381  			break
  2382  		}
  2383  		v.reset(OpARM64RORWconst)
  2384  		v.AuxInt = 32 - c
  2385  		v.AddArg(x)
  2386  		return true
  2387  	}
  2388  	// match: (ADDshiftLL [c] (SRLconst x [64-c]) x2)
  2389  	// cond:
  2390  	// result: (EXTRconst [64-c] x2 x)
  2391  	for {
  2392  		c := v.AuxInt
  2393  		_ = v.Args[1]
  2394  		v_0 := v.Args[0]
  2395  		if v_0.Op != OpARM64SRLconst {
  2396  			break
  2397  		}
  2398  		if v_0.AuxInt != 64-c {
  2399  			break
  2400  		}
  2401  		x := v_0.Args[0]
  2402  		x2 := v.Args[1]
  2403  		v.reset(OpARM64EXTRconst)
  2404  		v.AuxInt = 64 - c
  2405  		v.AddArg(x2)
  2406  		v.AddArg(x)
  2407  		return true
  2408  	}
  2409  	// match: (ADDshiftLL <t> [c] (UBFX [bfc] x) x2)
  2410  	// cond: c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)
  2411  	// result: (EXTRWconst [32-c] x2 x)
  2412  	for {
  2413  		t := v.Type
  2414  		c := v.AuxInt
  2415  		_ = v.Args[1]
  2416  		v_0 := v.Args[0]
  2417  		if v_0.Op != OpARM64UBFX {
  2418  			break
  2419  		}
  2420  		bfc := v_0.AuxInt
  2421  		x := v_0.Args[0]
  2422  		x2 := v.Args[1]
  2423  		if !(c < 32 && t.Size() == 4 && bfc == arm64BFAuxInt(32-c, c)) {
  2424  			break
  2425  		}
  2426  		v.reset(OpARM64EXTRWconst)
  2427  		v.AuxInt = 32 - c
  2428  		v.AddArg(x2)
  2429  		v.AddArg(x)
  2430  		return true
  2431  	}
  2432  	return false
  2433  }
  2434  func rewriteValueARM64_OpARM64ADDshiftRA_0(v *Value) bool {
  2435  	b := v.Block
  2436  	_ = b
  2437  	// match: (ADDshiftRA (MOVDconst [c]) x [d])
  2438  	// cond:
  2439  	// result: (ADDconst [c] (SRAconst <x.Type> x [d]))
  2440  	for {
  2441  		d := v.AuxInt
  2442  		_ = v.Args[1]
  2443  		v_0 := v.Args[0]
  2444  		if v_0.Op != OpARM64MOVDconst {
  2445  			break
  2446  		}
  2447  		c := v_0.AuxInt
  2448  		x := v.Args[1]
  2449  		v.reset(OpARM64ADDconst)
  2450  		v.AuxInt = c
  2451  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  2452  		v0.AuxInt = d
  2453  		v0.AddArg(x)
  2454  		v.AddArg(v0)
  2455  		return true
  2456  	}
  2457  	// match: (ADDshiftRA x (MOVDconst [c]) [d])
  2458  	// cond:
  2459  	// result: (ADDconst x [c>>uint64(d)])
  2460  	for {
  2461  		d := v.AuxInt
  2462  		_ = v.Args[1]
  2463  		x := v.Args[0]
  2464  		v_1 := v.Args[1]
  2465  		if v_1.Op != OpARM64MOVDconst {
  2466  			break
  2467  		}
  2468  		c := v_1.AuxInt
  2469  		v.reset(OpARM64ADDconst)
  2470  		v.AuxInt = c >> uint64(d)
  2471  		v.AddArg(x)
  2472  		return true
  2473  	}
  2474  	return false
  2475  }
  2476  func rewriteValueARM64_OpARM64ADDshiftRL_0(v *Value) bool {
  2477  	b := v.Block
  2478  	_ = b
  2479  	// match: (ADDshiftRL (MOVDconst [c]) x [d])
  2480  	// cond:
  2481  	// result: (ADDconst [c] (SRLconst <x.Type> x [d]))
  2482  	for {
  2483  		d := v.AuxInt
  2484  		_ = v.Args[1]
  2485  		v_0 := v.Args[0]
  2486  		if v_0.Op != OpARM64MOVDconst {
  2487  			break
  2488  		}
  2489  		c := v_0.AuxInt
  2490  		x := v.Args[1]
  2491  		v.reset(OpARM64ADDconst)
  2492  		v.AuxInt = c
  2493  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  2494  		v0.AuxInt = d
  2495  		v0.AddArg(x)
  2496  		v.AddArg(v0)
  2497  		return true
  2498  	}
  2499  	// match: (ADDshiftRL x (MOVDconst [c]) [d])
  2500  	// cond:
  2501  	// result: (ADDconst x [int64(uint64(c)>>uint64(d))])
  2502  	for {
  2503  		d := v.AuxInt
  2504  		_ = v.Args[1]
  2505  		x := v.Args[0]
  2506  		v_1 := v.Args[1]
  2507  		if v_1.Op != OpARM64MOVDconst {
  2508  			break
  2509  		}
  2510  		c := v_1.AuxInt
  2511  		v.reset(OpARM64ADDconst)
  2512  		v.AuxInt = int64(uint64(c) >> uint64(d))
  2513  		v.AddArg(x)
  2514  		return true
  2515  	}
  2516  	// match: (ADDshiftRL [c] (SLLconst x [64-c]) x)
  2517  	// cond:
  2518  	// result: (RORconst [ c] x)
  2519  	for {
  2520  		c := v.AuxInt
  2521  		_ = v.Args[1]
  2522  		v_0 := v.Args[0]
  2523  		if v_0.Op != OpARM64SLLconst {
  2524  			break
  2525  		}
  2526  		if v_0.AuxInt != 64-c {
  2527  			break
  2528  		}
  2529  		x := v_0.Args[0]
  2530  		if x != v.Args[1] {
  2531  			break
  2532  		}
  2533  		v.reset(OpARM64RORconst)
  2534  		v.AuxInt = c
  2535  		v.AddArg(x)
  2536  		return true
  2537  	}
  2538  	// match: (ADDshiftRL <t> [c] (SLLconst x [32-c]) (MOVWUreg x))
  2539  	// cond: c < 32 && t.Size() == 4
  2540  	// result: (RORWconst [c] x)
  2541  	for {
  2542  		t := v.Type
  2543  		c := v.AuxInt
  2544  		_ = v.Args[1]
  2545  		v_0 := v.Args[0]
  2546  		if v_0.Op != OpARM64SLLconst {
  2547  			break
  2548  		}
  2549  		if v_0.AuxInt != 32-c {
  2550  			break
  2551  		}
  2552  		x := v_0.Args[0]
  2553  		v_1 := v.Args[1]
  2554  		if v_1.Op != OpARM64MOVWUreg {
  2555  			break
  2556  		}
  2557  		if x != v_1.Args[0] {
  2558  			break
  2559  		}
  2560  		if !(c < 32 && t.Size() == 4) {
  2561  			break
  2562  		}
  2563  		v.reset(OpARM64RORWconst)
  2564  		v.AuxInt = c
  2565  		v.AddArg(x)
  2566  		return true
  2567  	}
  2568  	return false
  2569  }
  2570  func rewriteValueARM64_OpARM64AND_0(v *Value) bool {
  2571  	// match: (AND x (MOVDconst [c]))
  2572  	// cond:
  2573  	// result: (ANDconst [c] x)
  2574  	for {
  2575  		_ = v.Args[1]
  2576  		x := v.Args[0]
  2577  		v_1 := v.Args[1]
  2578  		if v_1.Op != OpARM64MOVDconst {
  2579  			break
  2580  		}
  2581  		c := v_1.AuxInt
  2582  		v.reset(OpARM64ANDconst)
  2583  		v.AuxInt = c
  2584  		v.AddArg(x)
  2585  		return true
  2586  	}
  2587  	// match: (AND (MOVDconst [c]) x)
  2588  	// cond:
  2589  	// result: (ANDconst [c] x)
  2590  	for {
  2591  		_ = v.Args[1]
  2592  		v_0 := v.Args[0]
  2593  		if v_0.Op != OpARM64MOVDconst {
  2594  			break
  2595  		}
  2596  		c := v_0.AuxInt
  2597  		x := v.Args[1]
  2598  		v.reset(OpARM64ANDconst)
  2599  		v.AuxInt = c
  2600  		v.AddArg(x)
  2601  		return true
  2602  	}
  2603  	// match: (AND x x)
  2604  	// cond:
  2605  	// result: x
  2606  	for {
  2607  		_ = v.Args[1]
  2608  		x := v.Args[0]
  2609  		if x != v.Args[1] {
  2610  			break
  2611  		}
  2612  		v.reset(OpCopy)
  2613  		v.Type = x.Type
  2614  		v.AddArg(x)
  2615  		return true
  2616  	}
  2617  	// match: (AND x (MVN y))
  2618  	// cond:
  2619  	// result: (BIC x y)
  2620  	for {
  2621  		_ = v.Args[1]
  2622  		x := v.Args[0]
  2623  		v_1 := v.Args[1]
  2624  		if v_1.Op != OpARM64MVN {
  2625  			break
  2626  		}
  2627  		y := v_1.Args[0]
  2628  		v.reset(OpARM64BIC)
  2629  		v.AddArg(x)
  2630  		v.AddArg(y)
  2631  		return true
  2632  	}
  2633  	// match: (AND (MVN y) x)
  2634  	// cond:
  2635  	// result: (BIC x y)
  2636  	for {
  2637  		_ = v.Args[1]
  2638  		v_0 := v.Args[0]
  2639  		if v_0.Op != OpARM64MVN {
  2640  			break
  2641  		}
  2642  		y := v_0.Args[0]
  2643  		x := v.Args[1]
  2644  		v.reset(OpARM64BIC)
  2645  		v.AddArg(x)
  2646  		v.AddArg(y)
  2647  		return true
  2648  	}
  2649  	// match: (AND x0 x1:(SLLconst [c] y))
  2650  	// cond: clobberIfDead(x1)
  2651  	// result: (ANDshiftLL x0 y [c])
  2652  	for {
  2653  		_ = v.Args[1]
  2654  		x0 := v.Args[0]
  2655  		x1 := v.Args[1]
  2656  		if x1.Op != OpARM64SLLconst {
  2657  			break
  2658  		}
  2659  		c := x1.AuxInt
  2660  		y := x1.Args[0]
  2661  		if !(clobberIfDead(x1)) {
  2662  			break
  2663  		}
  2664  		v.reset(OpARM64ANDshiftLL)
  2665  		v.AuxInt = c
  2666  		v.AddArg(x0)
  2667  		v.AddArg(y)
  2668  		return true
  2669  	}
  2670  	// match: (AND x1:(SLLconst [c] y) x0)
  2671  	// cond: clobberIfDead(x1)
  2672  	// result: (ANDshiftLL x0 y [c])
  2673  	for {
  2674  		_ = v.Args[1]
  2675  		x1 := v.Args[0]
  2676  		if x1.Op != OpARM64SLLconst {
  2677  			break
  2678  		}
  2679  		c := x1.AuxInt
  2680  		y := x1.Args[0]
  2681  		x0 := v.Args[1]
  2682  		if !(clobberIfDead(x1)) {
  2683  			break
  2684  		}
  2685  		v.reset(OpARM64ANDshiftLL)
  2686  		v.AuxInt = c
  2687  		v.AddArg(x0)
  2688  		v.AddArg(y)
  2689  		return true
  2690  	}
  2691  	// match: (AND x0 x1:(SRLconst [c] y))
  2692  	// cond: clobberIfDead(x1)
  2693  	// result: (ANDshiftRL x0 y [c])
  2694  	for {
  2695  		_ = v.Args[1]
  2696  		x0 := v.Args[0]
  2697  		x1 := v.Args[1]
  2698  		if x1.Op != OpARM64SRLconst {
  2699  			break
  2700  		}
  2701  		c := x1.AuxInt
  2702  		y := x1.Args[0]
  2703  		if !(clobberIfDead(x1)) {
  2704  			break
  2705  		}
  2706  		v.reset(OpARM64ANDshiftRL)
  2707  		v.AuxInt = c
  2708  		v.AddArg(x0)
  2709  		v.AddArg(y)
  2710  		return true
  2711  	}
  2712  	// match: (AND x1:(SRLconst [c] y) x0)
  2713  	// cond: clobberIfDead(x1)
  2714  	// result: (ANDshiftRL x0 y [c])
  2715  	for {
  2716  		_ = v.Args[1]
  2717  		x1 := v.Args[0]
  2718  		if x1.Op != OpARM64SRLconst {
  2719  			break
  2720  		}
  2721  		c := x1.AuxInt
  2722  		y := x1.Args[0]
  2723  		x0 := v.Args[1]
  2724  		if !(clobberIfDead(x1)) {
  2725  			break
  2726  		}
  2727  		v.reset(OpARM64ANDshiftRL)
  2728  		v.AuxInt = c
  2729  		v.AddArg(x0)
  2730  		v.AddArg(y)
  2731  		return true
  2732  	}
  2733  	// match: (AND x0 x1:(SRAconst [c] y))
  2734  	// cond: clobberIfDead(x1)
  2735  	// result: (ANDshiftRA x0 y [c])
  2736  	for {
  2737  		_ = v.Args[1]
  2738  		x0 := v.Args[0]
  2739  		x1 := v.Args[1]
  2740  		if x1.Op != OpARM64SRAconst {
  2741  			break
  2742  		}
  2743  		c := x1.AuxInt
  2744  		y := x1.Args[0]
  2745  		if !(clobberIfDead(x1)) {
  2746  			break
  2747  		}
  2748  		v.reset(OpARM64ANDshiftRA)
  2749  		v.AuxInt = c
  2750  		v.AddArg(x0)
  2751  		v.AddArg(y)
  2752  		return true
  2753  	}
  2754  	return false
  2755  }
  2756  func rewriteValueARM64_OpARM64AND_10(v *Value) bool {
  2757  	// match: (AND x1:(SRAconst [c] y) x0)
  2758  	// cond: clobberIfDead(x1)
  2759  	// result: (ANDshiftRA x0 y [c])
  2760  	for {
  2761  		_ = v.Args[1]
  2762  		x1 := v.Args[0]
  2763  		if x1.Op != OpARM64SRAconst {
  2764  			break
  2765  		}
  2766  		c := x1.AuxInt
  2767  		y := x1.Args[0]
  2768  		x0 := v.Args[1]
  2769  		if !(clobberIfDead(x1)) {
  2770  			break
  2771  		}
  2772  		v.reset(OpARM64ANDshiftRA)
  2773  		v.AuxInt = c
  2774  		v.AddArg(x0)
  2775  		v.AddArg(y)
  2776  		return true
  2777  	}
  2778  	return false
  2779  }
  2780  func rewriteValueARM64_OpARM64ANDconst_0(v *Value) bool {
  2781  	// match: (ANDconst [0] _)
  2782  	// cond:
  2783  	// result: (MOVDconst [0])
  2784  	for {
  2785  		if v.AuxInt != 0 {
  2786  			break
  2787  		}
  2788  		v.reset(OpARM64MOVDconst)
  2789  		v.AuxInt = 0
  2790  		return true
  2791  	}
  2792  	// match: (ANDconst [-1] x)
  2793  	// cond:
  2794  	// result: x
  2795  	for {
  2796  		if v.AuxInt != -1 {
  2797  			break
  2798  		}
  2799  		x := v.Args[0]
  2800  		v.reset(OpCopy)
  2801  		v.Type = x.Type
  2802  		v.AddArg(x)
  2803  		return true
  2804  	}
  2805  	// match: (ANDconst [c] (MOVDconst [d]))
  2806  	// cond:
  2807  	// result: (MOVDconst [c&d])
  2808  	for {
  2809  		c := v.AuxInt
  2810  		v_0 := v.Args[0]
  2811  		if v_0.Op != OpARM64MOVDconst {
  2812  			break
  2813  		}
  2814  		d := v_0.AuxInt
  2815  		v.reset(OpARM64MOVDconst)
  2816  		v.AuxInt = c & d
  2817  		return true
  2818  	}
  2819  	// match: (ANDconst [c] (ANDconst [d] x))
  2820  	// cond:
  2821  	// result: (ANDconst [c&d] x)
  2822  	for {
  2823  		c := v.AuxInt
  2824  		v_0 := v.Args[0]
  2825  		if v_0.Op != OpARM64ANDconst {
  2826  			break
  2827  		}
  2828  		d := v_0.AuxInt
  2829  		x := v_0.Args[0]
  2830  		v.reset(OpARM64ANDconst)
  2831  		v.AuxInt = c & d
  2832  		v.AddArg(x)
  2833  		return true
  2834  	}
  2835  	// match: (ANDconst [c] (MOVWUreg x))
  2836  	// cond:
  2837  	// result: (ANDconst [c&(1<<32-1)] x)
  2838  	for {
  2839  		c := v.AuxInt
  2840  		v_0 := v.Args[0]
  2841  		if v_0.Op != OpARM64MOVWUreg {
  2842  			break
  2843  		}
  2844  		x := v_0.Args[0]
  2845  		v.reset(OpARM64ANDconst)
  2846  		v.AuxInt = c & (1<<32 - 1)
  2847  		v.AddArg(x)
  2848  		return true
  2849  	}
  2850  	// match: (ANDconst [c] (MOVHUreg x))
  2851  	// cond:
  2852  	// result: (ANDconst [c&(1<<16-1)] x)
  2853  	for {
  2854  		c := v.AuxInt
  2855  		v_0 := v.Args[0]
  2856  		if v_0.Op != OpARM64MOVHUreg {
  2857  			break
  2858  		}
  2859  		x := v_0.Args[0]
  2860  		v.reset(OpARM64ANDconst)
  2861  		v.AuxInt = c & (1<<16 - 1)
  2862  		v.AddArg(x)
  2863  		return true
  2864  	}
  2865  	// match: (ANDconst [c] (MOVBUreg x))
  2866  	// cond:
  2867  	// result: (ANDconst [c&(1<<8-1)] x)
  2868  	for {
  2869  		c := v.AuxInt
  2870  		v_0 := v.Args[0]
  2871  		if v_0.Op != OpARM64MOVBUreg {
  2872  			break
  2873  		}
  2874  		x := v_0.Args[0]
  2875  		v.reset(OpARM64ANDconst)
  2876  		v.AuxInt = c & (1<<8 - 1)
  2877  		v.AddArg(x)
  2878  		return true
  2879  	}
  2880  	// match: (ANDconst [ac] (SLLconst [sc] x))
  2881  	// cond: isARM64BFMask(sc, ac, sc)
  2882  	// result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(ac, sc))] x)
  2883  	for {
  2884  		ac := v.AuxInt
  2885  		v_0 := v.Args[0]
  2886  		if v_0.Op != OpARM64SLLconst {
  2887  			break
  2888  		}
  2889  		sc := v_0.AuxInt
  2890  		x := v_0.Args[0]
  2891  		if !(isARM64BFMask(sc, ac, sc)) {
  2892  			break
  2893  		}
  2894  		v.reset(OpARM64UBFIZ)
  2895  		v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, sc))
  2896  		v.AddArg(x)
  2897  		return true
  2898  	}
  2899  	// match: (ANDconst [ac] (SRLconst [sc] x))
  2900  	// cond: isARM64BFMask(sc, ac, 0)
  2901  	// result: (UBFX [arm64BFAuxInt(sc, arm64BFWidth(ac, 0))] x)
  2902  	for {
  2903  		ac := v.AuxInt
  2904  		v_0 := v.Args[0]
  2905  		if v_0.Op != OpARM64SRLconst {
  2906  			break
  2907  		}
  2908  		sc := v_0.AuxInt
  2909  		x := v_0.Args[0]
  2910  		if !(isARM64BFMask(sc, ac, 0)) {
  2911  			break
  2912  		}
  2913  		v.reset(OpARM64UBFX)
  2914  		v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(ac, 0))
  2915  		v.AddArg(x)
  2916  		return true
  2917  	}
  2918  	return false
  2919  }
  2920  func rewriteValueARM64_OpARM64ANDshiftLL_0(v *Value) bool {
  2921  	b := v.Block
  2922  	_ = b
  2923  	// match: (ANDshiftLL (MOVDconst [c]) x [d])
  2924  	// cond:
  2925  	// result: (ANDconst [c] (SLLconst <x.Type> x [d]))
  2926  	for {
  2927  		d := v.AuxInt
  2928  		_ = v.Args[1]
  2929  		v_0 := v.Args[0]
  2930  		if v_0.Op != OpARM64MOVDconst {
  2931  			break
  2932  		}
  2933  		c := v_0.AuxInt
  2934  		x := v.Args[1]
  2935  		v.reset(OpARM64ANDconst)
  2936  		v.AuxInt = c
  2937  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  2938  		v0.AuxInt = d
  2939  		v0.AddArg(x)
  2940  		v.AddArg(v0)
  2941  		return true
  2942  	}
  2943  	// match: (ANDshiftLL x (MOVDconst [c]) [d])
  2944  	// cond:
  2945  	// result: (ANDconst x [int64(uint64(c)<<uint64(d))])
  2946  	for {
  2947  		d := v.AuxInt
  2948  		_ = v.Args[1]
  2949  		x := v.Args[0]
  2950  		v_1 := v.Args[1]
  2951  		if v_1.Op != OpARM64MOVDconst {
  2952  			break
  2953  		}
  2954  		c := v_1.AuxInt
  2955  		v.reset(OpARM64ANDconst)
  2956  		v.AuxInt = int64(uint64(c) << uint64(d))
  2957  		v.AddArg(x)
  2958  		return true
  2959  	}
  2960  	// match: (ANDshiftLL x y:(SLLconst x [c]) [d])
  2961  	// cond: c==d
  2962  	// result: y
  2963  	for {
  2964  		d := v.AuxInt
  2965  		_ = v.Args[1]
  2966  		x := v.Args[0]
  2967  		y := v.Args[1]
  2968  		if y.Op != OpARM64SLLconst {
  2969  			break
  2970  		}
  2971  		c := y.AuxInt
  2972  		if x != y.Args[0] {
  2973  			break
  2974  		}
  2975  		if !(c == d) {
  2976  			break
  2977  		}
  2978  		v.reset(OpCopy)
  2979  		v.Type = y.Type
  2980  		v.AddArg(y)
  2981  		return true
  2982  	}
  2983  	return false
  2984  }
  2985  func rewriteValueARM64_OpARM64ANDshiftRA_0(v *Value) bool {
  2986  	b := v.Block
  2987  	_ = b
  2988  	// match: (ANDshiftRA (MOVDconst [c]) x [d])
  2989  	// cond:
  2990  	// result: (ANDconst [c] (SRAconst <x.Type> x [d]))
  2991  	for {
  2992  		d := v.AuxInt
  2993  		_ = v.Args[1]
  2994  		v_0 := v.Args[0]
  2995  		if v_0.Op != OpARM64MOVDconst {
  2996  			break
  2997  		}
  2998  		c := v_0.AuxInt
  2999  		x := v.Args[1]
  3000  		v.reset(OpARM64ANDconst)
  3001  		v.AuxInt = c
  3002  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  3003  		v0.AuxInt = d
  3004  		v0.AddArg(x)
  3005  		v.AddArg(v0)
  3006  		return true
  3007  	}
  3008  	// match: (ANDshiftRA x (MOVDconst [c]) [d])
  3009  	// cond:
  3010  	// result: (ANDconst x [c>>uint64(d)])
  3011  	for {
  3012  		d := v.AuxInt
  3013  		_ = v.Args[1]
  3014  		x := v.Args[0]
  3015  		v_1 := v.Args[1]
  3016  		if v_1.Op != OpARM64MOVDconst {
  3017  			break
  3018  		}
  3019  		c := v_1.AuxInt
  3020  		v.reset(OpARM64ANDconst)
  3021  		v.AuxInt = c >> uint64(d)
  3022  		v.AddArg(x)
  3023  		return true
  3024  	}
  3025  	// match: (ANDshiftRA x y:(SRAconst x [c]) [d])
  3026  	// cond: c==d
  3027  	// result: y
  3028  	for {
  3029  		d := v.AuxInt
  3030  		_ = v.Args[1]
  3031  		x := v.Args[0]
  3032  		y := v.Args[1]
  3033  		if y.Op != OpARM64SRAconst {
  3034  			break
  3035  		}
  3036  		c := y.AuxInt
  3037  		if x != y.Args[0] {
  3038  			break
  3039  		}
  3040  		if !(c == d) {
  3041  			break
  3042  		}
  3043  		v.reset(OpCopy)
  3044  		v.Type = y.Type
  3045  		v.AddArg(y)
  3046  		return true
  3047  	}
  3048  	return false
  3049  }
  3050  func rewriteValueARM64_OpARM64ANDshiftRL_0(v *Value) bool {
  3051  	b := v.Block
  3052  	_ = b
  3053  	// match: (ANDshiftRL (MOVDconst [c]) x [d])
  3054  	// cond:
  3055  	// result: (ANDconst [c] (SRLconst <x.Type> x [d]))
  3056  	for {
  3057  		d := v.AuxInt
  3058  		_ = v.Args[1]
  3059  		v_0 := v.Args[0]
  3060  		if v_0.Op != OpARM64MOVDconst {
  3061  			break
  3062  		}
  3063  		c := v_0.AuxInt
  3064  		x := v.Args[1]
  3065  		v.reset(OpARM64ANDconst)
  3066  		v.AuxInt = c
  3067  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  3068  		v0.AuxInt = d
  3069  		v0.AddArg(x)
  3070  		v.AddArg(v0)
  3071  		return true
  3072  	}
  3073  	// match: (ANDshiftRL x (MOVDconst [c]) [d])
  3074  	// cond:
  3075  	// result: (ANDconst x [int64(uint64(c)>>uint64(d))])
  3076  	for {
  3077  		d := v.AuxInt
  3078  		_ = v.Args[1]
  3079  		x := v.Args[0]
  3080  		v_1 := v.Args[1]
  3081  		if v_1.Op != OpARM64MOVDconst {
  3082  			break
  3083  		}
  3084  		c := v_1.AuxInt
  3085  		v.reset(OpARM64ANDconst)
  3086  		v.AuxInt = int64(uint64(c) >> uint64(d))
  3087  		v.AddArg(x)
  3088  		return true
  3089  	}
  3090  	// match: (ANDshiftRL x y:(SRLconst x [c]) [d])
  3091  	// cond: c==d
  3092  	// result: y
  3093  	for {
  3094  		d := v.AuxInt
  3095  		_ = v.Args[1]
  3096  		x := v.Args[0]
  3097  		y := v.Args[1]
  3098  		if y.Op != OpARM64SRLconst {
  3099  			break
  3100  		}
  3101  		c := y.AuxInt
  3102  		if x != y.Args[0] {
  3103  			break
  3104  		}
  3105  		if !(c == d) {
  3106  			break
  3107  		}
  3108  		v.reset(OpCopy)
  3109  		v.Type = y.Type
  3110  		v.AddArg(y)
  3111  		return true
  3112  	}
  3113  	return false
  3114  }
  3115  func rewriteValueARM64_OpARM64BIC_0(v *Value) bool {
  3116  	// match: (BIC x (MOVDconst [c]))
  3117  	// cond:
  3118  	// result: (ANDconst [^c] x)
  3119  	for {
  3120  		_ = v.Args[1]
  3121  		x := v.Args[0]
  3122  		v_1 := v.Args[1]
  3123  		if v_1.Op != OpARM64MOVDconst {
  3124  			break
  3125  		}
  3126  		c := v_1.AuxInt
  3127  		v.reset(OpARM64ANDconst)
  3128  		v.AuxInt = ^c
  3129  		v.AddArg(x)
  3130  		return true
  3131  	}
  3132  	// match: (BIC x x)
  3133  	// cond:
  3134  	// result: (MOVDconst [0])
  3135  	for {
  3136  		_ = v.Args[1]
  3137  		x := v.Args[0]
  3138  		if x != v.Args[1] {
  3139  			break
  3140  		}
  3141  		v.reset(OpARM64MOVDconst)
  3142  		v.AuxInt = 0
  3143  		return true
  3144  	}
  3145  	// match: (BIC x0 x1:(SLLconst [c] y))
  3146  	// cond: clobberIfDead(x1)
  3147  	// result: (BICshiftLL x0 y [c])
  3148  	for {
  3149  		_ = v.Args[1]
  3150  		x0 := v.Args[0]
  3151  		x1 := v.Args[1]
  3152  		if x1.Op != OpARM64SLLconst {
  3153  			break
  3154  		}
  3155  		c := x1.AuxInt
  3156  		y := x1.Args[0]
  3157  		if !(clobberIfDead(x1)) {
  3158  			break
  3159  		}
  3160  		v.reset(OpARM64BICshiftLL)
  3161  		v.AuxInt = c
  3162  		v.AddArg(x0)
  3163  		v.AddArg(y)
  3164  		return true
  3165  	}
  3166  	// match: (BIC x0 x1:(SRLconst [c] y))
  3167  	// cond: clobberIfDead(x1)
  3168  	// result: (BICshiftRL x0 y [c])
  3169  	for {
  3170  		_ = v.Args[1]
  3171  		x0 := v.Args[0]
  3172  		x1 := v.Args[1]
  3173  		if x1.Op != OpARM64SRLconst {
  3174  			break
  3175  		}
  3176  		c := x1.AuxInt
  3177  		y := x1.Args[0]
  3178  		if !(clobberIfDead(x1)) {
  3179  			break
  3180  		}
  3181  		v.reset(OpARM64BICshiftRL)
  3182  		v.AuxInt = c
  3183  		v.AddArg(x0)
  3184  		v.AddArg(y)
  3185  		return true
  3186  	}
  3187  	// match: (BIC x0 x1:(SRAconst [c] y))
  3188  	// cond: clobberIfDead(x1)
  3189  	// result: (BICshiftRA x0 y [c])
  3190  	for {
  3191  		_ = v.Args[1]
  3192  		x0 := v.Args[0]
  3193  		x1 := v.Args[1]
  3194  		if x1.Op != OpARM64SRAconst {
  3195  			break
  3196  		}
  3197  		c := x1.AuxInt
  3198  		y := x1.Args[0]
  3199  		if !(clobberIfDead(x1)) {
  3200  			break
  3201  		}
  3202  		v.reset(OpARM64BICshiftRA)
  3203  		v.AuxInt = c
  3204  		v.AddArg(x0)
  3205  		v.AddArg(y)
  3206  		return true
  3207  	}
  3208  	return false
  3209  }
  3210  func rewriteValueARM64_OpARM64BICshiftLL_0(v *Value) bool {
  3211  	// match: (BICshiftLL x (MOVDconst [c]) [d])
  3212  	// cond:
  3213  	// result: (ANDconst x [^int64(uint64(c)<<uint64(d))])
  3214  	for {
  3215  		d := v.AuxInt
  3216  		_ = v.Args[1]
  3217  		x := v.Args[0]
  3218  		v_1 := v.Args[1]
  3219  		if v_1.Op != OpARM64MOVDconst {
  3220  			break
  3221  		}
  3222  		c := v_1.AuxInt
  3223  		v.reset(OpARM64ANDconst)
  3224  		v.AuxInt = ^int64(uint64(c) << uint64(d))
  3225  		v.AddArg(x)
  3226  		return true
  3227  	}
  3228  	// match: (BICshiftLL x (SLLconst x [c]) [d])
  3229  	// cond: c==d
  3230  	// result: (MOVDconst [0])
  3231  	for {
  3232  		d := v.AuxInt
  3233  		_ = v.Args[1]
  3234  		x := v.Args[0]
  3235  		v_1 := v.Args[1]
  3236  		if v_1.Op != OpARM64SLLconst {
  3237  			break
  3238  		}
  3239  		c := v_1.AuxInt
  3240  		if x != v_1.Args[0] {
  3241  			break
  3242  		}
  3243  		if !(c == d) {
  3244  			break
  3245  		}
  3246  		v.reset(OpARM64MOVDconst)
  3247  		v.AuxInt = 0
  3248  		return true
  3249  	}
  3250  	return false
  3251  }
  3252  func rewriteValueARM64_OpARM64BICshiftRA_0(v *Value) bool {
  3253  	// match: (BICshiftRA x (MOVDconst [c]) [d])
  3254  	// cond:
  3255  	// result: (ANDconst x [^(c>>uint64(d))])
  3256  	for {
  3257  		d := v.AuxInt
  3258  		_ = v.Args[1]
  3259  		x := v.Args[0]
  3260  		v_1 := v.Args[1]
  3261  		if v_1.Op != OpARM64MOVDconst {
  3262  			break
  3263  		}
  3264  		c := v_1.AuxInt
  3265  		v.reset(OpARM64ANDconst)
  3266  		v.AuxInt = ^(c >> uint64(d))
  3267  		v.AddArg(x)
  3268  		return true
  3269  	}
  3270  	// match: (BICshiftRA x (SRAconst x [c]) [d])
  3271  	// cond: c==d
  3272  	// result: (MOVDconst [0])
  3273  	for {
  3274  		d := v.AuxInt
  3275  		_ = v.Args[1]
  3276  		x := v.Args[0]
  3277  		v_1 := v.Args[1]
  3278  		if v_1.Op != OpARM64SRAconst {
  3279  			break
  3280  		}
  3281  		c := v_1.AuxInt
  3282  		if x != v_1.Args[0] {
  3283  			break
  3284  		}
  3285  		if !(c == d) {
  3286  			break
  3287  		}
  3288  		v.reset(OpARM64MOVDconst)
  3289  		v.AuxInt = 0
  3290  		return true
  3291  	}
  3292  	return false
  3293  }
  3294  func rewriteValueARM64_OpARM64BICshiftRL_0(v *Value) bool {
  3295  	// match: (BICshiftRL x (MOVDconst [c]) [d])
  3296  	// cond:
  3297  	// result: (ANDconst x [^int64(uint64(c)>>uint64(d))])
  3298  	for {
  3299  		d := v.AuxInt
  3300  		_ = v.Args[1]
  3301  		x := v.Args[0]
  3302  		v_1 := v.Args[1]
  3303  		if v_1.Op != OpARM64MOVDconst {
  3304  			break
  3305  		}
  3306  		c := v_1.AuxInt
  3307  		v.reset(OpARM64ANDconst)
  3308  		v.AuxInt = ^int64(uint64(c) >> uint64(d))
  3309  		v.AddArg(x)
  3310  		return true
  3311  	}
  3312  	// match: (BICshiftRL x (SRLconst x [c]) [d])
  3313  	// cond: c==d
  3314  	// result: (MOVDconst [0])
  3315  	for {
  3316  		d := v.AuxInt
  3317  		_ = v.Args[1]
  3318  		x := v.Args[0]
  3319  		v_1 := v.Args[1]
  3320  		if v_1.Op != OpARM64SRLconst {
  3321  			break
  3322  		}
  3323  		c := v_1.AuxInt
  3324  		if x != v_1.Args[0] {
  3325  			break
  3326  		}
  3327  		if !(c == d) {
  3328  			break
  3329  		}
  3330  		v.reset(OpARM64MOVDconst)
  3331  		v.AuxInt = 0
  3332  		return true
  3333  	}
  3334  	return false
  3335  }
  3336  func rewriteValueARM64_OpARM64CMN_0(v *Value) bool {
  3337  	// match: (CMN x (MOVDconst [c]))
  3338  	// cond:
  3339  	// result: (CMNconst [c] x)
  3340  	for {
  3341  		_ = v.Args[1]
  3342  		x := v.Args[0]
  3343  		v_1 := v.Args[1]
  3344  		if v_1.Op != OpARM64MOVDconst {
  3345  			break
  3346  		}
  3347  		c := v_1.AuxInt
  3348  		v.reset(OpARM64CMNconst)
  3349  		v.AuxInt = c
  3350  		v.AddArg(x)
  3351  		return true
  3352  	}
  3353  	// match: (CMN (MOVDconst [c]) x)
  3354  	// cond:
  3355  	// result: (CMNconst [c] x)
  3356  	for {
  3357  		_ = v.Args[1]
  3358  		v_0 := v.Args[0]
  3359  		if v_0.Op != OpARM64MOVDconst {
  3360  			break
  3361  		}
  3362  		c := v_0.AuxInt
  3363  		x := v.Args[1]
  3364  		v.reset(OpARM64CMNconst)
  3365  		v.AuxInt = c
  3366  		v.AddArg(x)
  3367  		return true
  3368  	}
  3369  	// match: (CMN x0 x1:(SLLconst [c] y))
  3370  	// cond: clobberIfDead(x1)
  3371  	// result: (CMNshiftLL x0 y [c])
  3372  	for {
  3373  		_ = v.Args[1]
  3374  		x0 := v.Args[0]
  3375  		x1 := v.Args[1]
  3376  		if x1.Op != OpARM64SLLconst {
  3377  			break
  3378  		}
  3379  		c := x1.AuxInt
  3380  		y := x1.Args[0]
  3381  		if !(clobberIfDead(x1)) {
  3382  			break
  3383  		}
  3384  		v.reset(OpARM64CMNshiftLL)
  3385  		v.AuxInt = c
  3386  		v.AddArg(x0)
  3387  		v.AddArg(y)
  3388  		return true
  3389  	}
  3390  	// match: (CMN x1:(SLLconst [c] y) x0)
  3391  	// cond: clobberIfDead(x1)
  3392  	// result: (CMNshiftLL x0 y [c])
  3393  	for {
  3394  		_ = v.Args[1]
  3395  		x1 := v.Args[0]
  3396  		if x1.Op != OpARM64SLLconst {
  3397  			break
  3398  		}
  3399  		c := x1.AuxInt
  3400  		y := x1.Args[0]
  3401  		x0 := v.Args[1]
  3402  		if !(clobberIfDead(x1)) {
  3403  			break
  3404  		}
  3405  		v.reset(OpARM64CMNshiftLL)
  3406  		v.AuxInt = c
  3407  		v.AddArg(x0)
  3408  		v.AddArg(y)
  3409  		return true
  3410  	}
  3411  	// match: (CMN x0 x1:(SRLconst [c] y))
  3412  	// cond: clobberIfDead(x1)
  3413  	// result: (CMNshiftRL x0 y [c])
  3414  	for {
  3415  		_ = v.Args[1]
  3416  		x0 := v.Args[0]
  3417  		x1 := v.Args[1]
  3418  		if x1.Op != OpARM64SRLconst {
  3419  			break
  3420  		}
  3421  		c := x1.AuxInt
  3422  		y := x1.Args[0]
  3423  		if !(clobberIfDead(x1)) {
  3424  			break
  3425  		}
  3426  		v.reset(OpARM64CMNshiftRL)
  3427  		v.AuxInt = c
  3428  		v.AddArg(x0)
  3429  		v.AddArg(y)
  3430  		return true
  3431  	}
  3432  	// match: (CMN x1:(SRLconst [c] y) x0)
  3433  	// cond: clobberIfDead(x1)
  3434  	// result: (CMNshiftRL x0 y [c])
  3435  	for {
  3436  		_ = v.Args[1]
  3437  		x1 := v.Args[0]
  3438  		if x1.Op != OpARM64SRLconst {
  3439  			break
  3440  		}
  3441  		c := x1.AuxInt
  3442  		y := x1.Args[0]
  3443  		x0 := v.Args[1]
  3444  		if !(clobberIfDead(x1)) {
  3445  			break
  3446  		}
  3447  		v.reset(OpARM64CMNshiftRL)
  3448  		v.AuxInt = c
  3449  		v.AddArg(x0)
  3450  		v.AddArg(y)
  3451  		return true
  3452  	}
  3453  	// match: (CMN x0 x1:(SRAconst [c] y))
  3454  	// cond: clobberIfDead(x1)
  3455  	// result: (CMNshiftRA x0 y [c])
  3456  	for {
  3457  		_ = v.Args[1]
  3458  		x0 := v.Args[0]
  3459  		x1 := v.Args[1]
  3460  		if x1.Op != OpARM64SRAconst {
  3461  			break
  3462  		}
  3463  		c := x1.AuxInt
  3464  		y := x1.Args[0]
  3465  		if !(clobberIfDead(x1)) {
  3466  			break
  3467  		}
  3468  		v.reset(OpARM64CMNshiftRA)
  3469  		v.AuxInt = c
  3470  		v.AddArg(x0)
  3471  		v.AddArg(y)
  3472  		return true
  3473  	}
  3474  	// match: (CMN x1:(SRAconst [c] y) x0)
  3475  	// cond: clobberIfDead(x1)
  3476  	// result: (CMNshiftRA x0 y [c])
  3477  	for {
  3478  		_ = v.Args[1]
  3479  		x1 := v.Args[0]
  3480  		if x1.Op != OpARM64SRAconst {
  3481  			break
  3482  		}
  3483  		c := x1.AuxInt
  3484  		y := x1.Args[0]
  3485  		x0 := v.Args[1]
  3486  		if !(clobberIfDead(x1)) {
  3487  			break
  3488  		}
  3489  		v.reset(OpARM64CMNshiftRA)
  3490  		v.AuxInt = c
  3491  		v.AddArg(x0)
  3492  		v.AddArg(y)
  3493  		return true
  3494  	}
  3495  	return false
  3496  }
  3497  func rewriteValueARM64_OpARM64CMNW_0(v *Value) bool {
  3498  	// match: (CMNW x (MOVDconst [c]))
  3499  	// cond:
  3500  	// result: (CMNWconst [c] x)
  3501  	for {
  3502  		_ = v.Args[1]
  3503  		x := v.Args[0]
  3504  		v_1 := v.Args[1]
  3505  		if v_1.Op != OpARM64MOVDconst {
  3506  			break
  3507  		}
  3508  		c := v_1.AuxInt
  3509  		v.reset(OpARM64CMNWconst)
  3510  		v.AuxInt = c
  3511  		v.AddArg(x)
  3512  		return true
  3513  	}
  3514  	// match: (CMNW (MOVDconst [c]) x)
  3515  	// cond:
  3516  	// result: (CMNWconst [c] x)
  3517  	for {
  3518  		_ = v.Args[1]
  3519  		v_0 := v.Args[0]
  3520  		if v_0.Op != OpARM64MOVDconst {
  3521  			break
  3522  		}
  3523  		c := v_0.AuxInt
  3524  		x := v.Args[1]
  3525  		v.reset(OpARM64CMNWconst)
  3526  		v.AuxInt = c
  3527  		v.AddArg(x)
  3528  		return true
  3529  	}
  3530  	return false
  3531  }
  3532  func rewriteValueARM64_OpARM64CMNWconst_0(v *Value) bool {
  3533  	// match: (CMNWconst (MOVDconst [x]) [y])
  3534  	// cond: int32(x)==int32(-y)
  3535  	// result: (FlagEQ)
  3536  	for {
  3537  		y := v.AuxInt
  3538  		v_0 := v.Args[0]
  3539  		if v_0.Op != OpARM64MOVDconst {
  3540  			break
  3541  		}
  3542  		x := v_0.AuxInt
  3543  		if !(int32(x) == int32(-y)) {
  3544  			break
  3545  		}
  3546  		v.reset(OpARM64FlagEQ)
  3547  		return true
  3548  	}
  3549  	// match: (CMNWconst (MOVDconst [x]) [y])
  3550  	// cond: int32(x)<int32(-y) && uint32(x)<uint32(-y)
  3551  	// result: (FlagLT_ULT)
  3552  	for {
  3553  		y := v.AuxInt
  3554  		v_0 := v.Args[0]
  3555  		if v_0.Op != OpARM64MOVDconst {
  3556  			break
  3557  		}
  3558  		x := v_0.AuxInt
  3559  		if !(int32(x) < int32(-y) && uint32(x) < uint32(-y)) {
  3560  			break
  3561  		}
  3562  		v.reset(OpARM64FlagLT_ULT)
  3563  		return true
  3564  	}
  3565  	// match: (CMNWconst (MOVDconst [x]) [y])
  3566  	// cond: int32(x)<int32(-y) && uint32(x)>uint32(-y)
  3567  	// result: (FlagLT_UGT)
  3568  	for {
  3569  		y := v.AuxInt
  3570  		v_0 := v.Args[0]
  3571  		if v_0.Op != OpARM64MOVDconst {
  3572  			break
  3573  		}
  3574  		x := v_0.AuxInt
  3575  		if !(int32(x) < int32(-y) && uint32(x) > uint32(-y)) {
  3576  			break
  3577  		}
  3578  		v.reset(OpARM64FlagLT_UGT)
  3579  		return true
  3580  	}
  3581  	// match: (CMNWconst (MOVDconst [x]) [y])
  3582  	// cond: int32(x)>int32(-y) && uint32(x)<uint32(-y)
  3583  	// result: (FlagGT_ULT)
  3584  	for {
  3585  		y := v.AuxInt
  3586  		v_0 := v.Args[0]
  3587  		if v_0.Op != OpARM64MOVDconst {
  3588  			break
  3589  		}
  3590  		x := v_0.AuxInt
  3591  		if !(int32(x) > int32(-y) && uint32(x) < uint32(-y)) {
  3592  			break
  3593  		}
  3594  		v.reset(OpARM64FlagGT_ULT)
  3595  		return true
  3596  	}
  3597  	// match: (CMNWconst (MOVDconst [x]) [y])
  3598  	// cond: int32(x)>int32(-y) && uint32(x)>uint32(-y)
  3599  	// result: (FlagGT_UGT)
  3600  	for {
  3601  		y := v.AuxInt
  3602  		v_0 := v.Args[0]
  3603  		if v_0.Op != OpARM64MOVDconst {
  3604  			break
  3605  		}
  3606  		x := v_0.AuxInt
  3607  		if !(int32(x) > int32(-y) && uint32(x) > uint32(-y)) {
  3608  			break
  3609  		}
  3610  		v.reset(OpARM64FlagGT_UGT)
  3611  		return true
  3612  	}
  3613  	return false
  3614  }
  3615  func rewriteValueARM64_OpARM64CMNconst_0(v *Value) bool {
  3616  	// match: (CMNconst (MOVDconst [x]) [y])
  3617  	// cond: int64(x)==int64(-y)
  3618  	// result: (FlagEQ)
  3619  	for {
  3620  		y := v.AuxInt
  3621  		v_0 := v.Args[0]
  3622  		if v_0.Op != OpARM64MOVDconst {
  3623  			break
  3624  		}
  3625  		x := v_0.AuxInt
  3626  		if !(int64(x) == int64(-y)) {
  3627  			break
  3628  		}
  3629  		v.reset(OpARM64FlagEQ)
  3630  		return true
  3631  	}
  3632  	// match: (CMNconst (MOVDconst [x]) [y])
  3633  	// cond: int64(x)<int64(-y) && uint64(x)<uint64(-y)
  3634  	// result: (FlagLT_ULT)
  3635  	for {
  3636  		y := v.AuxInt
  3637  		v_0 := v.Args[0]
  3638  		if v_0.Op != OpARM64MOVDconst {
  3639  			break
  3640  		}
  3641  		x := v_0.AuxInt
  3642  		if !(int64(x) < int64(-y) && uint64(x) < uint64(-y)) {
  3643  			break
  3644  		}
  3645  		v.reset(OpARM64FlagLT_ULT)
  3646  		return true
  3647  	}
  3648  	// match: (CMNconst (MOVDconst [x]) [y])
  3649  	// cond: int64(x)<int64(-y) && uint64(x)>uint64(-y)
  3650  	// result: (FlagLT_UGT)
  3651  	for {
  3652  		y := v.AuxInt
  3653  		v_0 := v.Args[0]
  3654  		if v_0.Op != OpARM64MOVDconst {
  3655  			break
  3656  		}
  3657  		x := v_0.AuxInt
  3658  		if !(int64(x) < int64(-y) && uint64(x) > uint64(-y)) {
  3659  			break
  3660  		}
  3661  		v.reset(OpARM64FlagLT_UGT)
  3662  		return true
  3663  	}
  3664  	// match: (CMNconst (MOVDconst [x]) [y])
  3665  	// cond: int64(x)>int64(-y) && uint64(x)<uint64(-y)
  3666  	// result: (FlagGT_ULT)
  3667  	for {
  3668  		y := v.AuxInt
  3669  		v_0 := v.Args[0]
  3670  		if v_0.Op != OpARM64MOVDconst {
  3671  			break
  3672  		}
  3673  		x := v_0.AuxInt
  3674  		if !(int64(x) > int64(-y) && uint64(x) < uint64(-y)) {
  3675  			break
  3676  		}
  3677  		v.reset(OpARM64FlagGT_ULT)
  3678  		return true
  3679  	}
  3680  	// match: (CMNconst (MOVDconst [x]) [y])
  3681  	// cond: int64(x)>int64(-y) && uint64(x)>uint64(-y)
  3682  	// result: (FlagGT_UGT)
  3683  	for {
  3684  		y := v.AuxInt
  3685  		v_0 := v.Args[0]
  3686  		if v_0.Op != OpARM64MOVDconst {
  3687  			break
  3688  		}
  3689  		x := v_0.AuxInt
  3690  		if !(int64(x) > int64(-y) && uint64(x) > uint64(-y)) {
  3691  			break
  3692  		}
  3693  		v.reset(OpARM64FlagGT_UGT)
  3694  		return true
  3695  	}
  3696  	return false
  3697  }
  3698  func rewriteValueARM64_OpARM64CMNshiftLL_0(v *Value) bool {
  3699  	b := v.Block
  3700  	_ = b
  3701  	// match: (CMNshiftLL (MOVDconst [c]) x [d])
  3702  	// cond:
  3703  	// result: (CMNconst [c] (SLLconst <x.Type> x [d]))
  3704  	for {
  3705  		d := v.AuxInt
  3706  		_ = v.Args[1]
  3707  		v_0 := v.Args[0]
  3708  		if v_0.Op != OpARM64MOVDconst {
  3709  			break
  3710  		}
  3711  		c := v_0.AuxInt
  3712  		x := v.Args[1]
  3713  		v.reset(OpARM64CMNconst)
  3714  		v.AuxInt = c
  3715  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  3716  		v0.AuxInt = d
  3717  		v0.AddArg(x)
  3718  		v.AddArg(v0)
  3719  		return true
  3720  	}
  3721  	// match: (CMNshiftLL x (MOVDconst [c]) [d])
  3722  	// cond:
  3723  	// result: (CMNconst x [int64(uint64(c)<<uint64(d))])
  3724  	for {
  3725  		d := v.AuxInt
  3726  		_ = v.Args[1]
  3727  		x := v.Args[0]
  3728  		v_1 := v.Args[1]
  3729  		if v_1.Op != OpARM64MOVDconst {
  3730  			break
  3731  		}
  3732  		c := v_1.AuxInt
  3733  		v.reset(OpARM64CMNconst)
  3734  		v.AuxInt = int64(uint64(c) << uint64(d))
  3735  		v.AddArg(x)
  3736  		return true
  3737  	}
  3738  	return false
  3739  }
  3740  func rewriteValueARM64_OpARM64CMNshiftRA_0(v *Value) bool {
  3741  	b := v.Block
  3742  	_ = b
  3743  	// match: (CMNshiftRA (MOVDconst [c]) x [d])
  3744  	// cond:
  3745  	// result: (CMNconst [c] (SRAconst <x.Type> x [d]))
  3746  	for {
  3747  		d := v.AuxInt
  3748  		_ = v.Args[1]
  3749  		v_0 := v.Args[0]
  3750  		if v_0.Op != OpARM64MOVDconst {
  3751  			break
  3752  		}
  3753  		c := v_0.AuxInt
  3754  		x := v.Args[1]
  3755  		v.reset(OpARM64CMNconst)
  3756  		v.AuxInt = c
  3757  		v0 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  3758  		v0.AuxInt = d
  3759  		v0.AddArg(x)
  3760  		v.AddArg(v0)
  3761  		return true
  3762  	}
  3763  	// match: (CMNshiftRA x (MOVDconst [c]) [d])
  3764  	// cond:
  3765  	// result: (CMNconst x [c>>uint64(d)])
  3766  	for {
  3767  		d := v.AuxInt
  3768  		_ = v.Args[1]
  3769  		x := v.Args[0]
  3770  		v_1 := v.Args[1]
  3771  		if v_1.Op != OpARM64MOVDconst {
  3772  			break
  3773  		}
  3774  		c := v_1.AuxInt
  3775  		v.reset(OpARM64CMNconst)
  3776  		v.AuxInt = c >> uint64(d)
  3777  		v.AddArg(x)
  3778  		return true
  3779  	}
  3780  	return false
  3781  }
  3782  func rewriteValueARM64_OpARM64CMNshiftRL_0(v *Value) bool {
  3783  	b := v.Block
  3784  	_ = b
  3785  	// match: (CMNshiftRL (MOVDconst [c]) x [d])
  3786  	// cond:
  3787  	// result: (CMNconst [c] (SRLconst <x.Type> x [d]))
  3788  	for {
  3789  		d := v.AuxInt
  3790  		_ = v.Args[1]
  3791  		v_0 := v.Args[0]
  3792  		if v_0.Op != OpARM64MOVDconst {
  3793  			break
  3794  		}
  3795  		c := v_0.AuxInt
  3796  		x := v.Args[1]
  3797  		v.reset(OpARM64CMNconst)
  3798  		v.AuxInt = c
  3799  		v0 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  3800  		v0.AuxInt = d
  3801  		v0.AddArg(x)
  3802  		v.AddArg(v0)
  3803  		return true
  3804  	}
  3805  	// match: (CMNshiftRL x (MOVDconst [c]) [d])
  3806  	// cond:
  3807  	// result: (CMNconst x [int64(uint64(c)>>uint64(d))])
  3808  	for {
  3809  		d := v.AuxInt
  3810  		_ = v.Args[1]
  3811  		x := v.Args[0]
  3812  		v_1 := v.Args[1]
  3813  		if v_1.Op != OpARM64MOVDconst {
  3814  			break
  3815  		}
  3816  		c := v_1.AuxInt
  3817  		v.reset(OpARM64CMNconst)
  3818  		v.AuxInt = int64(uint64(c) >> uint64(d))
  3819  		v.AddArg(x)
  3820  		return true
  3821  	}
  3822  	return false
  3823  }
  3824  func rewriteValueARM64_OpARM64CMP_0(v *Value) bool {
  3825  	b := v.Block
  3826  	_ = b
  3827  	// match: (CMP x (MOVDconst [c]))
  3828  	// cond:
  3829  	// result: (CMPconst [c] x)
  3830  	for {
  3831  		_ = v.Args[1]
  3832  		x := v.Args[0]
  3833  		v_1 := v.Args[1]
  3834  		if v_1.Op != OpARM64MOVDconst {
  3835  			break
  3836  		}
  3837  		c := v_1.AuxInt
  3838  		v.reset(OpARM64CMPconst)
  3839  		v.AuxInt = c
  3840  		v.AddArg(x)
  3841  		return true
  3842  	}
  3843  	// match: (CMP (MOVDconst [c]) x)
  3844  	// cond:
  3845  	// result: (InvertFlags (CMPconst [c] x))
  3846  	for {
  3847  		_ = v.Args[1]
  3848  		v_0 := v.Args[0]
  3849  		if v_0.Op != OpARM64MOVDconst {
  3850  			break
  3851  		}
  3852  		c := v_0.AuxInt
  3853  		x := v.Args[1]
  3854  		v.reset(OpARM64InvertFlags)
  3855  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  3856  		v0.AuxInt = c
  3857  		v0.AddArg(x)
  3858  		v.AddArg(v0)
  3859  		return true
  3860  	}
  3861  	// match: (CMP x0 x1:(SLLconst [c] y))
  3862  	// cond: clobberIfDead(x1)
  3863  	// result: (CMPshiftLL x0 y [c])
  3864  	for {
  3865  		_ = v.Args[1]
  3866  		x0 := v.Args[0]
  3867  		x1 := v.Args[1]
  3868  		if x1.Op != OpARM64SLLconst {
  3869  			break
  3870  		}
  3871  		c := x1.AuxInt
  3872  		y := x1.Args[0]
  3873  		if !(clobberIfDead(x1)) {
  3874  			break
  3875  		}
  3876  		v.reset(OpARM64CMPshiftLL)
  3877  		v.AuxInt = c
  3878  		v.AddArg(x0)
  3879  		v.AddArg(y)
  3880  		return true
  3881  	}
  3882  	// match: (CMP x0:(SLLconst [c] y) x1)
  3883  	// cond: clobberIfDead(x0)
  3884  	// result: (InvertFlags (CMPshiftLL x1 y [c]))
  3885  	for {
  3886  		_ = v.Args[1]
  3887  		x0 := v.Args[0]
  3888  		if x0.Op != OpARM64SLLconst {
  3889  			break
  3890  		}
  3891  		c := x0.AuxInt
  3892  		y := x0.Args[0]
  3893  		x1 := v.Args[1]
  3894  		if !(clobberIfDead(x0)) {
  3895  			break
  3896  		}
  3897  		v.reset(OpARM64InvertFlags)
  3898  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftLL, types.TypeFlags)
  3899  		v0.AuxInt = c
  3900  		v0.AddArg(x1)
  3901  		v0.AddArg(y)
  3902  		v.AddArg(v0)
  3903  		return true
  3904  	}
  3905  	// match: (CMP x0 x1:(SRLconst [c] y))
  3906  	// cond: clobberIfDead(x1)
  3907  	// result: (CMPshiftRL x0 y [c])
  3908  	for {
  3909  		_ = v.Args[1]
  3910  		x0 := v.Args[0]
  3911  		x1 := v.Args[1]
  3912  		if x1.Op != OpARM64SRLconst {
  3913  			break
  3914  		}
  3915  		c := x1.AuxInt
  3916  		y := x1.Args[0]
  3917  		if !(clobberIfDead(x1)) {
  3918  			break
  3919  		}
  3920  		v.reset(OpARM64CMPshiftRL)
  3921  		v.AuxInt = c
  3922  		v.AddArg(x0)
  3923  		v.AddArg(y)
  3924  		return true
  3925  	}
  3926  	// match: (CMP x0:(SRLconst [c] y) x1)
  3927  	// cond: clobberIfDead(x0)
  3928  	// result: (InvertFlags (CMPshiftRL x1 y [c]))
  3929  	for {
  3930  		_ = v.Args[1]
  3931  		x0 := v.Args[0]
  3932  		if x0.Op != OpARM64SRLconst {
  3933  			break
  3934  		}
  3935  		c := x0.AuxInt
  3936  		y := x0.Args[0]
  3937  		x1 := v.Args[1]
  3938  		if !(clobberIfDead(x0)) {
  3939  			break
  3940  		}
  3941  		v.reset(OpARM64InvertFlags)
  3942  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRL, types.TypeFlags)
  3943  		v0.AuxInt = c
  3944  		v0.AddArg(x1)
  3945  		v0.AddArg(y)
  3946  		v.AddArg(v0)
  3947  		return true
  3948  	}
  3949  	// match: (CMP x0 x1:(SRAconst [c] y))
  3950  	// cond: clobberIfDead(x1)
  3951  	// result: (CMPshiftRA x0 y [c])
  3952  	for {
  3953  		_ = v.Args[1]
  3954  		x0 := v.Args[0]
  3955  		x1 := v.Args[1]
  3956  		if x1.Op != OpARM64SRAconst {
  3957  			break
  3958  		}
  3959  		c := x1.AuxInt
  3960  		y := x1.Args[0]
  3961  		if !(clobberIfDead(x1)) {
  3962  			break
  3963  		}
  3964  		v.reset(OpARM64CMPshiftRA)
  3965  		v.AuxInt = c
  3966  		v.AddArg(x0)
  3967  		v.AddArg(y)
  3968  		return true
  3969  	}
  3970  	// match: (CMP x0:(SRAconst [c] y) x1)
  3971  	// cond: clobberIfDead(x0)
  3972  	// result: (InvertFlags (CMPshiftRA x1 y [c]))
  3973  	for {
  3974  		_ = v.Args[1]
  3975  		x0 := v.Args[0]
  3976  		if x0.Op != OpARM64SRAconst {
  3977  			break
  3978  		}
  3979  		c := x0.AuxInt
  3980  		y := x0.Args[0]
  3981  		x1 := v.Args[1]
  3982  		if !(clobberIfDead(x0)) {
  3983  			break
  3984  		}
  3985  		v.reset(OpARM64InvertFlags)
  3986  		v0 := b.NewValue0(v.Pos, OpARM64CMPshiftRA, types.TypeFlags)
  3987  		v0.AuxInt = c
  3988  		v0.AddArg(x1)
  3989  		v0.AddArg(y)
  3990  		v.AddArg(v0)
  3991  		return true
  3992  	}
  3993  	return false
  3994  }
  3995  func rewriteValueARM64_OpARM64CMPW_0(v *Value) bool {
  3996  	b := v.Block
  3997  	_ = b
  3998  	// match: (CMPW x (MOVDconst [c]))
  3999  	// cond:
  4000  	// result: (CMPWconst [int64(int32(c))] x)
  4001  	for {
  4002  		_ = v.Args[1]
  4003  		x := v.Args[0]
  4004  		v_1 := v.Args[1]
  4005  		if v_1.Op != OpARM64MOVDconst {
  4006  			break
  4007  		}
  4008  		c := v_1.AuxInt
  4009  		v.reset(OpARM64CMPWconst)
  4010  		v.AuxInt = int64(int32(c))
  4011  		v.AddArg(x)
  4012  		return true
  4013  	}
  4014  	// match: (CMPW (MOVDconst [c]) x)
  4015  	// cond:
  4016  	// result: (InvertFlags (CMPWconst [int64(int32(c))] x))
  4017  	for {
  4018  		_ = v.Args[1]
  4019  		v_0 := v.Args[0]
  4020  		if v_0.Op != OpARM64MOVDconst {
  4021  			break
  4022  		}
  4023  		c := v_0.AuxInt
  4024  		x := v.Args[1]
  4025  		v.reset(OpARM64InvertFlags)
  4026  		v0 := b.NewValue0(v.Pos, OpARM64CMPWconst, types.TypeFlags)
  4027  		v0.AuxInt = int64(int32(c))
  4028  		v0.AddArg(x)
  4029  		v.AddArg(v0)
  4030  		return true
  4031  	}
  4032  	return false
  4033  }
  4034  func rewriteValueARM64_OpARM64CMPWconst_0(v *Value) bool {
  4035  	// match: (CMPWconst (MOVDconst [x]) [y])
  4036  	// cond: int32(x)==int32(y)
  4037  	// result: (FlagEQ)
  4038  	for {
  4039  		y := v.AuxInt
  4040  		v_0 := v.Args[0]
  4041  		if v_0.Op != OpARM64MOVDconst {
  4042  			break
  4043  		}
  4044  		x := v_0.AuxInt
  4045  		if !(int32(x) == int32(y)) {
  4046  			break
  4047  		}
  4048  		v.reset(OpARM64FlagEQ)
  4049  		return true
  4050  	}
  4051  	// match: (CMPWconst (MOVDconst [x]) [y])
  4052  	// cond: int32(x)<int32(y) && uint32(x)<uint32(y)
  4053  	// result: (FlagLT_ULT)
  4054  	for {
  4055  		y := v.AuxInt
  4056  		v_0 := v.Args[0]
  4057  		if v_0.Op != OpARM64MOVDconst {
  4058  			break
  4059  		}
  4060  		x := v_0.AuxInt
  4061  		if !(int32(x) < int32(y) && uint32(x) < uint32(y)) {
  4062  			break
  4063  		}
  4064  		v.reset(OpARM64FlagLT_ULT)
  4065  		return true
  4066  	}
  4067  	// match: (CMPWconst (MOVDconst [x]) [y])
  4068  	// cond: int32(x)<int32(y) && uint32(x)>uint32(y)
  4069  	// result: (FlagLT_UGT)
  4070  	for {
  4071  		y := v.AuxInt
  4072  		v_0 := v.Args[0]
  4073  		if v_0.Op != OpARM64MOVDconst {
  4074  			break
  4075  		}
  4076  		x := v_0.AuxInt
  4077  		if !(int32(x) < int32(y) && uint32(x) > uint32(y)) {
  4078  			break
  4079  		}
  4080  		v.reset(OpARM64FlagLT_UGT)
  4081  		return true
  4082  	}
  4083  	// match: (CMPWconst (MOVDconst [x]) [y])
  4084  	// cond: int32(x)>int32(y) && uint32(x)<uint32(y)
  4085  	// result: (FlagGT_ULT)
  4086  	for {
  4087  		y := v.AuxInt
  4088  		v_0 := v.Args[0]
  4089  		if v_0.Op != OpARM64MOVDconst {
  4090  			break
  4091  		}
  4092  		x := v_0.AuxInt
  4093  		if !(int32(x) > int32(y) && uint32(x) < uint32(y)) {
  4094  			break
  4095  		}
  4096  		v.reset(OpARM64FlagGT_ULT)
  4097  		return true
  4098  	}
  4099  	// match: (CMPWconst (MOVDconst [x]) [y])
  4100  	// cond: int32(x)>int32(y) && uint32(x)>uint32(y)
  4101  	// result: (FlagGT_UGT)
  4102  	for {
  4103  		y := v.AuxInt
  4104  		v_0 := v.Args[0]
  4105  		if v_0.Op != OpARM64MOVDconst {
  4106  			break
  4107  		}
  4108  		x := v_0.AuxInt
  4109  		if !(int32(x) > int32(y) && uint32(x) > uint32(y)) {
  4110  			break
  4111  		}
  4112  		v.reset(OpARM64FlagGT_UGT)
  4113  		return true
  4114  	}
  4115  	// match: (CMPWconst (MOVBUreg _) [c])
  4116  	// cond: 0xff < int32(c)
  4117  	// result: (FlagLT_ULT)
  4118  	for {
  4119  		c := v.AuxInt
  4120  		v_0 := v.Args[0]
  4121  		if v_0.Op != OpARM64MOVBUreg {
  4122  			break
  4123  		}
  4124  		if !(0xff < int32(c)) {
  4125  			break
  4126  		}
  4127  		v.reset(OpARM64FlagLT_ULT)
  4128  		return true
  4129  	}
  4130  	// match: (CMPWconst (MOVHUreg _) [c])
  4131  	// cond: 0xffff < int32(c)
  4132  	// result: (FlagLT_ULT)
  4133  	for {
  4134  		c := v.AuxInt
  4135  		v_0 := v.Args[0]
  4136  		if v_0.Op != OpARM64MOVHUreg {
  4137  			break
  4138  		}
  4139  		if !(0xffff < int32(c)) {
  4140  			break
  4141  		}
  4142  		v.reset(OpARM64FlagLT_ULT)
  4143  		return true
  4144  	}
  4145  	return false
  4146  }
  4147  func rewriteValueARM64_OpARM64CMPconst_0(v *Value) bool {
  4148  	// match: (CMPconst (MOVDconst [x]) [y])
  4149  	// cond: x==y
  4150  	// result: (FlagEQ)
  4151  	for {
  4152  		y := v.AuxInt
  4153  		v_0 := v.Args[0]
  4154  		if v_0.Op != OpARM64MOVDconst {
  4155  			break
  4156  		}
  4157  		x := v_0.AuxInt
  4158  		if !(x == y) {
  4159  			break
  4160  		}
  4161  		v.reset(OpARM64FlagEQ)
  4162  		return true
  4163  	}
  4164  	// match: (CMPconst (MOVDconst [x]) [y])
  4165  	// cond: x<y && uint64(x)<uint64(y)
  4166  	// result: (FlagLT_ULT)
  4167  	for {
  4168  		y := v.AuxInt
  4169  		v_0 := v.Args[0]
  4170  		if v_0.Op != OpARM64MOVDconst {
  4171  			break
  4172  		}
  4173  		x := v_0.AuxInt
  4174  		if !(x < y && uint64(x) < uint64(y)) {
  4175  			break
  4176  		}
  4177  		v.reset(OpARM64FlagLT_ULT)
  4178  		return true
  4179  	}
  4180  	// match: (CMPconst (MOVDconst [x]) [y])
  4181  	// cond: x<y && uint64(x)>uint64(y)
  4182  	// result: (FlagLT_UGT)
  4183  	for {
  4184  		y := v.AuxInt
  4185  		v_0 := v.Args[0]
  4186  		if v_0.Op != OpARM64MOVDconst {
  4187  			break
  4188  		}
  4189  		x := v_0.AuxInt
  4190  		if !(x < y && uint64(x) > uint64(y)) {
  4191  			break
  4192  		}
  4193  		v.reset(OpARM64FlagLT_UGT)
  4194  		return true
  4195  	}
  4196  	// match: (CMPconst (MOVDconst [x]) [y])
  4197  	// cond: x>y && uint64(x)<uint64(y)
  4198  	// result: (FlagGT_ULT)
  4199  	for {
  4200  		y := v.AuxInt
  4201  		v_0 := v.Args[0]
  4202  		if v_0.Op != OpARM64MOVDconst {
  4203  			break
  4204  		}
  4205  		x := v_0.AuxInt
  4206  		if !(x > y && uint64(x) < uint64(y)) {
  4207  			break
  4208  		}
  4209  		v.reset(OpARM64FlagGT_ULT)
  4210  		return true
  4211  	}
  4212  	// match: (CMPconst (MOVDconst [x]) [y])
  4213  	// cond: x>y && uint64(x)>uint64(y)
  4214  	// result: (FlagGT_UGT)
  4215  	for {
  4216  		y := v.AuxInt
  4217  		v_0 := v.Args[0]
  4218  		if v_0.Op != OpARM64MOVDconst {
  4219  			break
  4220  		}
  4221  		x := v_0.AuxInt
  4222  		if !(x > y && uint64(x) > uint64(y)) {
  4223  			break
  4224  		}
  4225  		v.reset(OpARM64FlagGT_UGT)
  4226  		return true
  4227  	}
  4228  	// match: (CMPconst (MOVBUreg _) [c])
  4229  	// cond: 0xff < c
  4230  	// result: (FlagLT_ULT)
  4231  	for {
  4232  		c := v.AuxInt
  4233  		v_0 := v.Args[0]
  4234  		if v_0.Op != OpARM64MOVBUreg {
  4235  			break
  4236  		}
  4237  		if !(0xff < c) {
  4238  			break
  4239  		}
  4240  		v.reset(OpARM64FlagLT_ULT)
  4241  		return true
  4242  	}
  4243  	// match: (CMPconst (MOVHUreg _) [c])
  4244  	// cond: 0xffff < c
  4245  	// result: (FlagLT_ULT)
  4246  	for {
  4247  		c := v.AuxInt
  4248  		v_0 := v.Args[0]
  4249  		if v_0.Op != OpARM64MOVHUreg {
  4250  			break
  4251  		}
  4252  		if !(0xffff < c) {
  4253  			break
  4254  		}
  4255  		v.reset(OpARM64FlagLT_ULT)
  4256  		return true
  4257  	}
  4258  	// match: (CMPconst (MOVWUreg _) [c])
  4259  	// cond: 0xffffffff < c
  4260  	// result: (FlagLT_ULT)
  4261  	for {
  4262  		c := v.AuxInt
  4263  		v_0 := v.Args[0]
  4264  		if v_0.Op != OpARM64MOVWUreg {
  4265  			break
  4266  		}
  4267  		if !(0xffffffff < c) {
  4268  			break
  4269  		}
  4270  		v.reset(OpARM64FlagLT_ULT)
  4271  		return true
  4272  	}
  4273  	// match: (CMPconst (ANDconst _ [m]) [n])
  4274  	// cond: 0 <= m && m < n
  4275  	// result: (FlagLT_ULT)
  4276  	for {
  4277  		n := v.AuxInt
  4278  		v_0 := v.Args[0]
  4279  		if v_0.Op != OpARM64ANDconst {
  4280  			break
  4281  		}
  4282  		m := v_0.AuxInt
  4283  		if !(0 <= m && m < n) {
  4284  			break
  4285  		}
  4286  		v.reset(OpARM64FlagLT_ULT)
  4287  		return true
  4288  	}
  4289  	// match: (CMPconst (SRLconst _ [c]) [n])
  4290  	// cond: 0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)
  4291  	// result: (FlagLT_ULT)
  4292  	for {
  4293  		n := v.AuxInt
  4294  		v_0 := v.Args[0]
  4295  		if v_0.Op != OpARM64SRLconst {
  4296  			break
  4297  		}
  4298  		c := v_0.AuxInt
  4299  		if !(0 <= n && 0 < c && c <= 63 && (1<<uint64(64-c)) <= uint64(n)) {
  4300  			break
  4301  		}
  4302  		v.reset(OpARM64FlagLT_ULT)
  4303  		return true
  4304  	}
  4305  	return false
  4306  }
  4307  func rewriteValueARM64_OpARM64CMPshiftLL_0(v *Value) bool {
  4308  	b := v.Block
  4309  	_ = b
  4310  	// match: (CMPshiftLL (MOVDconst [c]) x [d])
  4311  	// cond:
  4312  	// result: (InvertFlags (CMPconst [c] (SLLconst <x.Type> x [d])))
  4313  	for {
  4314  		d := v.AuxInt
  4315  		_ = v.Args[1]
  4316  		v_0 := v.Args[0]
  4317  		if v_0.Op != OpARM64MOVDconst {
  4318  			break
  4319  		}
  4320  		c := v_0.AuxInt
  4321  		x := v.Args[1]
  4322  		v.reset(OpARM64InvertFlags)
  4323  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  4324  		v0.AuxInt = c
  4325  		v1 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  4326  		v1.AuxInt = d
  4327  		v1.AddArg(x)
  4328  		v0.AddArg(v1)
  4329  		v.AddArg(v0)
  4330  		return true
  4331  	}
  4332  	// match: (CMPshiftLL x (MOVDconst [c]) [d])
  4333  	// cond:
  4334  	// result: (CMPconst x [int64(uint64(c)<<uint64(d))])
  4335  	for {
  4336  		d := v.AuxInt
  4337  		_ = v.Args[1]
  4338  		x := v.Args[0]
  4339  		v_1 := v.Args[1]
  4340  		if v_1.Op != OpARM64MOVDconst {
  4341  			break
  4342  		}
  4343  		c := v_1.AuxInt
  4344  		v.reset(OpARM64CMPconst)
  4345  		v.AuxInt = int64(uint64(c) << uint64(d))
  4346  		v.AddArg(x)
  4347  		return true
  4348  	}
  4349  	return false
  4350  }
  4351  func rewriteValueARM64_OpARM64CMPshiftRA_0(v *Value) bool {
  4352  	b := v.Block
  4353  	_ = b
  4354  	// match: (CMPshiftRA (MOVDconst [c]) x [d])
  4355  	// cond:
  4356  	// result: (InvertFlags (CMPconst [c] (SRAconst <x.Type> x [d])))
  4357  	for {
  4358  		d := v.AuxInt
  4359  		_ = v.Args[1]
  4360  		v_0 := v.Args[0]
  4361  		if v_0.Op != OpARM64MOVDconst {
  4362  			break
  4363  		}
  4364  		c := v_0.AuxInt
  4365  		x := v.Args[1]
  4366  		v.reset(OpARM64InvertFlags)
  4367  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  4368  		v0.AuxInt = c
  4369  		v1 := b.NewValue0(v.Pos, OpARM64SRAconst, x.Type)
  4370  		v1.AuxInt = d
  4371  		v1.AddArg(x)
  4372  		v0.AddArg(v1)
  4373  		v.AddArg(v0)
  4374  		return true
  4375  	}
  4376  	// match: (CMPshiftRA x (MOVDconst [c]) [d])
  4377  	// cond:
  4378  	// result: (CMPconst x [c>>uint64(d)])
  4379  	for {
  4380  		d := v.AuxInt
  4381  		_ = v.Args[1]
  4382  		x := v.Args[0]
  4383  		v_1 := v.Args[1]
  4384  		if v_1.Op != OpARM64MOVDconst {
  4385  			break
  4386  		}
  4387  		c := v_1.AuxInt
  4388  		v.reset(OpARM64CMPconst)
  4389  		v.AuxInt = c >> uint64(d)
  4390  		v.AddArg(x)
  4391  		return true
  4392  	}
  4393  	return false
  4394  }
  4395  func rewriteValueARM64_OpARM64CMPshiftRL_0(v *Value) bool {
  4396  	b := v.Block
  4397  	_ = b
  4398  	// match: (CMPshiftRL (MOVDconst [c]) x [d])
  4399  	// cond:
  4400  	// result: (InvertFlags (CMPconst [c] (SRLconst <x.Type> x [d])))
  4401  	for {
  4402  		d := v.AuxInt
  4403  		_ = v.Args[1]
  4404  		v_0 := v.Args[0]
  4405  		if v_0.Op != OpARM64MOVDconst {
  4406  			break
  4407  		}
  4408  		c := v_0.AuxInt
  4409  		x := v.Args[1]
  4410  		v.reset(OpARM64InvertFlags)
  4411  		v0 := b.NewValue0(v.Pos, OpARM64CMPconst, types.TypeFlags)
  4412  		v0.AuxInt = c
  4413  		v1 := b.NewValue0(v.Pos, OpARM64SRLconst, x.Type)
  4414  		v1.AuxInt = d
  4415  		v1.AddArg(x)
  4416  		v0.AddArg(v1)
  4417  		v.AddArg(v0)
  4418  		return true
  4419  	}
  4420  	// match: (CMPshiftRL x (MOVDconst [c]) [d])
  4421  	// cond:
  4422  	// result: (CMPconst x [int64(uint64(c)>>uint64(d))])
  4423  	for {
  4424  		d := v.AuxInt
  4425  		_ = v.Args[1]
  4426  		x := v.Args[0]
  4427  		v_1 := v.Args[1]
  4428  		if v_1.Op != OpARM64MOVDconst {
  4429  			break
  4430  		}
  4431  		c := v_1.AuxInt
  4432  		v.reset(OpARM64CMPconst)
  4433  		v.AuxInt = int64(uint64(c) >> uint64(d))
  4434  		v.AddArg(x)
  4435  		return true
  4436  	}
  4437  	return false
  4438  }
  4439  func rewriteValueARM64_OpARM64CSEL_0(v *Value) bool {
  4440  	// match: (CSEL {cc} x (MOVDconst [0]) flag)
  4441  	// cond:
  4442  	// result: (CSEL0 {cc} x flag)
  4443  	for {
  4444  		cc := v.Aux
  4445  		_ = v.Args[2]
  4446  		x := v.Args[0]
  4447  		v_1 := v.Args[1]
  4448  		if v_1.Op != OpARM64MOVDconst {
  4449  			break
  4450  		}
  4451  		if v_1.AuxInt != 0 {
  4452  			break
  4453  		}
  4454  		flag := v.Args[2]
  4455  		v.reset(OpARM64CSEL0)
  4456  		v.Aux = cc
  4457  		v.AddArg(x)
  4458  		v.AddArg(flag)
  4459  		return true
  4460  	}
  4461  	// match: (CSEL {cc} (MOVDconst [0]) y flag)
  4462  	// cond:
  4463  	// result: (CSEL0 {arm64Negate(cc.(Op))} y flag)
  4464  	for {
  4465  		cc := v.Aux
  4466  		_ = v.Args[2]
  4467  		v_0 := v.Args[0]
  4468  		if v_0.Op != OpARM64MOVDconst {
  4469  			break
  4470  		}
  4471  		if v_0.AuxInt != 0 {
  4472  			break
  4473  		}
  4474  		y := v.Args[1]
  4475  		flag := v.Args[2]
  4476  		v.reset(OpARM64CSEL0)
  4477  		v.Aux = arm64Negate(cc.(Op))
  4478  		v.AddArg(y)
  4479  		v.AddArg(flag)
  4480  		return true
  4481  	}
  4482  	// match: (CSEL {cc} x y (InvertFlags cmp))
  4483  	// cond:
  4484  	// result: (CSEL {arm64Invert(cc.(Op))} x y cmp)
  4485  	for {
  4486  		cc := v.Aux
  4487  		_ = v.Args[2]
  4488  		x := v.Args[0]
  4489  		y := v.Args[1]
  4490  		v_2 := v.Args[2]
  4491  		if v_2.Op != OpARM64InvertFlags {
  4492  			break
  4493  		}
  4494  		cmp := v_2.Args[0]
  4495  		v.reset(OpARM64CSEL)
  4496  		v.Aux = arm64Invert(cc.(Op))
  4497  		v.AddArg(x)
  4498  		v.AddArg(y)
  4499  		v.AddArg(cmp)
  4500  		return true
  4501  	}
  4502  	// match: (CSEL {cc} x _ flag)
  4503  	// cond: ccARM64Eval(cc, flag) > 0
  4504  	// result: x
  4505  	for {
  4506  		cc := v.Aux
  4507  		_ = v.Args[2]
  4508  		x := v.Args[0]
  4509  		flag := v.Args[2]
  4510  		if !(ccARM64Eval(cc, flag) > 0) {
  4511  			break
  4512  		}
  4513  		v.reset(OpCopy)
  4514  		v.Type = x.Type
  4515  		v.AddArg(x)
  4516  		return true
  4517  	}
  4518  	// match: (CSEL {cc} _ y flag)
  4519  	// cond: ccARM64Eval(cc, flag) < 0
  4520  	// result: y
  4521  	for {
  4522  		cc := v.Aux
  4523  		_ = v.Args[2]
  4524  		y := v.Args[1]
  4525  		flag := v.Args[2]
  4526  		if !(ccARM64Eval(cc, flag) < 0) {
  4527  			break
  4528  		}
  4529  		v.reset(OpCopy)
  4530  		v.Type = y.Type
  4531  		v.AddArg(y)
  4532  		return true
  4533  	}
  4534  	// match: (CSEL {cc} x y (CMPWconst [0] bool))
  4535  	// cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil
  4536  	// result: (CSEL {bool.Op} x y flagArg(bool))
  4537  	for {
  4538  		cc := v.Aux
  4539  		_ = v.Args[2]
  4540  		x := v.Args[0]
  4541  		y := v.Args[1]
  4542  		v_2 := v.Args[2]
  4543  		if v_2.Op != OpARM64CMPWconst {
  4544  			break
  4545  		}
  4546  		if v_2.AuxInt != 0 {
  4547  			break
  4548  		}
  4549  		bool := v_2.Args[0]
  4550  		if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) {
  4551  			break
  4552  		}
  4553  		v.reset(OpARM64CSEL)
  4554  		v.Aux = bool.Op
  4555  		v.AddArg(x)
  4556  		v.AddArg(y)
  4557  		v.AddArg(flagArg(bool))
  4558  		return true
  4559  	}
  4560  	// match: (CSEL {cc} x y (CMPWconst [0] bool))
  4561  	// cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil
  4562  	// result: (CSEL {arm64Negate(bool.Op)} x y flagArg(bool))
  4563  	for {
  4564  		cc := v.Aux
  4565  		_ = v.Args[2]
  4566  		x := v.Args[0]
  4567  		y := v.Args[1]
  4568  		v_2 := v.Args[2]
  4569  		if v_2.Op != OpARM64CMPWconst {
  4570  			break
  4571  		}
  4572  		if v_2.AuxInt != 0 {
  4573  			break
  4574  		}
  4575  		bool := v_2.Args[0]
  4576  		if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) {
  4577  			break
  4578  		}
  4579  		v.reset(OpARM64CSEL)
  4580  		v.Aux = arm64Negate(bool.Op)
  4581  		v.AddArg(x)
  4582  		v.AddArg(y)
  4583  		v.AddArg(flagArg(bool))
  4584  		return true
  4585  	}
  4586  	return false
  4587  }
  4588  func rewriteValueARM64_OpARM64CSEL0_0(v *Value) bool {
  4589  	// match: (CSEL0 {cc} x (InvertFlags cmp))
  4590  	// cond:
  4591  	// result: (CSEL0 {arm64Invert(cc.(Op))} x cmp)
  4592  	for {
  4593  		cc := v.Aux
  4594  		_ = v.Args[1]
  4595  		x := v.Args[0]
  4596  		v_1 := v.Args[1]
  4597  		if v_1.Op != OpARM64InvertFlags {
  4598  			break
  4599  		}
  4600  		cmp := v_1.Args[0]
  4601  		v.reset(OpARM64CSEL0)
  4602  		v.Aux = arm64Invert(cc.(Op))
  4603  		v.AddArg(x)
  4604  		v.AddArg(cmp)
  4605  		return true
  4606  	}
  4607  	// match: (CSEL0 {cc} x flag)
  4608  	// cond: ccARM64Eval(cc, flag) > 0
  4609  	// result: x
  4610  	for {
  4611  		cc := v.Aux
  4612  		_ = v.Args[1]
  4613  		x := v.Args[0]
  4614  		flag := v.Args[1]
  4615  		if !(ccARM64Eval(cc, flag) > 0) {
  4616  			break
  4617  		}
  4618  		v.reset(OpCopy)
  4619  		v.Type = x.Type
  4620  		v.AddArg(x)
  4621  		return true
  4622  	}
  4623  	// match: (CSEL0 {cc} _ flag)
  4624  	// cond: ccARM64Eval(cc, flag) < 0
  4625  	// result: (MOVDconst [0])
  4626  	for {
  4627  		cc := v.Aux
  4628  		_ = v.Args[1]
  4629  		flag := v.Args[1]
  4630  		if !(ccARM64Eval(cc, flag) < 0) {
  4631  			break
  4632  		}
  4633  		v.reset(OpARM64MOVDconst)
  4634  		v.AuxInt = 0
  4635  		return true
  4636  	}
  4637  	// match: (CSEL0 {cc} x (CMPWconst [0] bool))
  4638  	// cond: cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil
  4639  	// result: (CSEL0 {bool.Op} x flagArg(bool))
  4640  	for {
  4641  		cc := v.Aux
  4642  		_ = v.Args[1]
  4643  		x := v.Args[0]
  4644  		v_1 := v.Args[1]
  4645  		if v_1.Op != OpARM64CMPWconst {
  4646  			break
  4647  		}
  4648  		if v_1.AuxInt != 0 {
  4649  			break
  4650  		}
  4651  		bool := v_1.Args[0]
  4652  		if !(cc.(Op) == OpARM64NotEqual && flagArg(bool) != nil) {
  4653  			break
  4654  		}
  4655  		v.reset(OpARM64CSEL0)
  4656  		v.Aux = bool.Op
  4657  		v.AddArg(x)
  4658  		v.AddArg(flagArg(bool))
  4659  		return true
  4660  	}
  4661  	// match: (CSEL0 {cc} x (CMPWconst [0] bool))
  4662  	// cond: cc.(Op) == OpARM64Equal && flagArg(bool) != nil
  4663  	// result: (CSEL0 {arm64Negate(bool.Op)} x flagArg(bool))
  4664  	for {
  4665  		cc := v.Aux
  4666  		_ = v.Args[1]
  4667  		x := v.Args[0]
  4668  		v_1 := v.Args[1]
  4669  		if v_1.Op != OpARM64CMPWconst {
  4670  			break
  4671  		}
  4672  		if v_1.AuxInt != 0 {
  4673  			break
  4674  		}
  4675  		bool := v_1.Args[0]
  4676  		if !(cc.(Op) == OpARM64Equal && flagArg(bool) != nil) {
  4677  			break
  4678  		}
  4679  		v.reset(OpARM64CSEL0)
  4680  		v.Aux = arm64Negate(bool.Op)
  4681  		v.AddArg(x)
  4682  		v.AddArg(flagArg(bool))
  4683  		return true
  4684  	}
  4685  	return false
  4686  }
  4687  func rewriteValueARM64_OpARM64DIV_0(v *Value) bool {
  4688  	// match: (DIV (MOVDconst [c]) (MOVDconst [d]))
  4689  	// cond:
  4690  	// result: (MOVDconst [c/d])
  4691  	for {
  4692  		_ = v.Args[1]
  4693  		v_0 := v.Args[0]
  4694  		if v_0.Op != OpARM64MOVDconst {
  4695  			break
  4696  		}
  4697  		c := v_0.AuxInt
  4698  		v_1 := v.Args[1]
  4699  		if v_1.Op != OpARM64MOVDconst {
  4700  			break
  4701  		}
  4702  		d := v_1.AuxInt
  4703  		v.reset(OpARM64MOVDconst)
  4704  		v.AuxInt = c / d
  4705  		return true
  4706  	}
  4707  	return false
  4708  }
  4709  func rewriteValueARM64_OpARM64DIVW_0(v *Value) bool {
  4710  	// match: (DIVW (MOVDconst [c]) (MOVDconst [d]))
  4711  	// cond:
  4712  	// result: (MOVDconst [int64(int32(c)/int32(d))])
  4713  	for {
  4714  		_ = v.Args[1]
  4715  		v_0 := v.Args[0]
  4716  		if v_0.Op != OpARM64MOVDconst {
  4717  			break
  4718  		}
  4719  		c := v_0.AuxInt
  4720  		v_1 := v.Args[1]
  4721  		if v_1.Op != OpARM64MOVDconst {
  4722  			break
  4723  		}
  4724  		d := v_1.AuxInt
  4725  		v.reset(OpARM64MOVDconst)
  4726  		v.AuxInt = int64(int32(c) / int32(d))
  4727  		return true
  4728  	}
  4729  	return false
  4730  }
  4731  func rewriteValueARM64_OpARM64EON_0(v *Value) bool {
  4732  	// match: (EON x (MOVDconst [c]))
  4733  	// cond:
  4734  	// result: (XORconst [^c] x)
  4735  	for {
  4736  		_ = v.Args[1]
  4737  		x := v.Args[0]
  4738  		v_1 := v.Args[1]
  4739  		if v_1.Op != OpARM64MOVDconst {
  4740  			break
  4741  		}
  4742  		c := v_1.AuxInt
  4743  		v.reset(OpARM64XORconst)
  4744  		v.AuxInt = ^c
  4745  		v.AddArg(x)
  4746  		return true
  4747  	}
  4748  	// match: (EON x x)
  4749  	// cond:
  4750  	// result: (MOVDconst [-1])
  4751  	for {
  4752  		_ = v.Args[1]
  4753  		x := v.Args[0]
  4754  		if x != v.Args[1] {
  4755  			break
  4756  		}
  4757  		v.reset(OpARM64MOVDconst)
  4758  		v.AuxInt = -1
  4759  		return true
  4760  	}
  4761  	// match: (EON x0 x1:(SLLconst [c] y))
  4762  	// cond: clobberIfDead(x1)
  4763  	// result: (EONshiftLL x0 y [c])
  4764  	for {
  4765  		_ = v.Args[1]
  4766  		x0 := v.Args[0]
  4767  		x1 := v.Args[1]
  4768  		if x1.Op != OpARM64SLLconst {
  4769  			break
  4770  		}
  4771  		c := x1.AuxInt
  4772  		y := x1.Args[0]
  4773  		if !(clobberIfDead(x1)) {
  4774  			break
  4775  		}
  4776  		v.reset(OpARM64EONshiftLL)
  4777  		v.AuxInt = c
  4778  		v.AddArg(x0)
  4779  		v.AddArg(y)
  4780  		return true
  4781  	}
  4782  	// match: (EON x0 x1:(SRLconst [c] y))
  4783  	// cond: clobberIfDead(x1)
  4784  	// result: (EONshiftRL x0 y [c])
  4785  	for {
  4786  		_ = v.Args[1]
  4787  		x0 := v.Args[0]
  4788  		x1 := v.Args[1]
  4789  		if x1.Op != OpARM64SRLconst {
  4790  			break
  4791  		}
  4792  		c := x1.AuxInt
  4793  		y := x1.Args[0]
  4794  		if !(clobberIfDead(x1)) {
  4795  			break
  4796  		}
  4797  		v.reset(OpARM64EONshiftRL)
  4798  		v.AuxInt = c
  4799  		v.AddArg(x0)
  4800  		v.AddArg(y)
  4801  		return true
  4802  	}
  4803  	// match: (EON x0 x1:(SRAconst [c] y))
  4804  	// cond: clobberIfDead(x1)
  4805  	// result: (EONshiftRA x0 y [c])
  4806  	for {
  4807  		_ = v.Args[1]
  4808  		x0 := v.Args[0]
  4809  		x1 := v.Args[1]
  4810  		if x1.Op != OpARM64SRAconst {
  4811  			break
  4812  		}
  4813  		c := x1.AuxInt
  4814  		y := x1.Args[0]
  4815  		if !(clobberIfDead(x1)) {
  4816  			break
  4817  		}
  4818  		v.reset(OpARM64EONshiftRA)
  4819  		v.AuxInt = c
  4820  		v.AddArg(x0)
  4821  		v.AddArg(y)
  4822  		return true
  4823  	}
  4824  	return false
  4825  }
  4826  func rewriteValueARM64_OpARM64EONshiftLL_0(v *Value) bool {
  4827  	// match: (EONshiftLL x (MOVDconst [c]) [d])
  4828  	// cond:
  4829  	// result: (XORconst x [^int64(uint64(c)<<uint64(d))])
  4830  	for {
  4831  		d := v.AuxInt
  4832  		_ = v.Args[1]
  4833  		x := v.Args[0]
  4834  		v_1 := v.Args[1]
  4835  		if v_1.Op != OpARM64MOVDconst {
  4836  			break
  4837  		}
  4838  		c := v_1.AuxInt
  4839  		v.reset(OpARM64XORconst)
  4840  		v.AuxInt = ^int64(uint64(c) << uint64(d))
  4841  		v.AddArg(x)
  4842  		return true
  4843  	}
  4844  	// match: (EONshiftLL x (SLLconst x [c]) [d])
  4845  	// cond: c==d
  4846  	// result: (MOVDconst [-1])
  4847  	for {
  4848  		d := v.AuxInt
  4849  		_ = v.Args[1]
  4850  		x := v.Args[0]
  4851  		v_1 := v.Args[1]
  4852  		if v_1.Op != OpARM64SLLconst {
  4853  			break
  4854  		}
  4855  		c := v_1.AuxInt
  4856  		if x != v_1.Args[0] {
  4857  			break
  4858  		}
  4859  		if !(c == d) {
  4860  			break
  4861  		}
  4862  		v.reset(OpARM64MOVDconst)
  4863  		v.AuxInt = -1
  4864  		return true
  4865  	}
  4866  	return false
  4867  }
  4868  func rewriteValueARM64_OpARM64EONshiftRA_0(v *Value) bool {
  4869  	// match: (EONshiftRA x (MOVDconst [c]) [d])
  4870  	// cond:
  4871  	// result: (XORconst x [^(c>>uint64(d))])
  4872  	for {
  4873  		d := v.AuxInt
  4874  		_ = v.Args[1]
  4875  		x := v.Args[0]
  4876  		v_1 := v.Args[1]
  4877  		if v_1.Op != OpARM64MOVDconst {
  4878  			break
  4879  		}
  4880  		c := v_1.AuxInt
  4881  		v.reset(OpARM64XORconst)
  4882  		v.AuxInt = ^(c >> uint64(d))
  4883  		v.AddArg(x)
  4884  		return true
  4885  	}
  4886  	// match: (EONshiftRA x (SRAconst x [c]) [d])
  4887  	// cond: c==d
  4888  	// result: (MOVDconst [-1])
  4889  	for {
  4890  		d := v.AuxInt
  4891  		_ = v.Args[1]
  4892  		x := v.Args[0]
  4893  		v_1 := v.Args[1]
  4894  		if v_1.Op != OpARM64SRAconst {
  4895  			break
  4896  		}
  4897  		c := v_1.AuxInt
  4898  		if x != v_1.Args[0] {
  4899  			break
  4900  		}
  4901  		if !(c == d) {
  4902  			break
  4903  		}
  4904  		v.reset(OpARM64MOVDconst)
  4905  		v.AuxInt = -1
  4906  		return true
  4907  	}
  4908  	return false
  4909  }
  4910  func rewriteValueARM64_OpARM64EONshiftRL_0(v *Value) bool {
  4911  	// match: (EONshiftRL x (MOVDconst [c]) [d])
  4912  	// cond:
  4913  	// result: (XORconst x [^int64(uint64(c)>>uint64(d))])
  4914  	for {
  4915  		d := v.AuxInt
  4916  		_ = v.Args[1]
  4917  		x := v.Args[0]
  4918  		v_1 := v.Args[1]
  4919  		if v_1.Op != OpARM64MOVDconst {
  4920  			break
  4921  		}
  4922  		c := v_1.AuxInt
  4923  		v.reset(OpARM64XORconst)
  4924  		v.AuxInt = ^int64(uint64(c) >> uint64(d))
  4925  		v.AddArg(x)
  4926  		return true
  4927  	}
  4928  	// match: (EONshiftRL x (SRLconst x [c]) [d])
  4929  	// cond: c==d
  4930  	// result: (MOVDconst [-1])
  4931  	for {
  4932  		d := v.AuxInt
  4933  		_ = v.Args[1]
  4934  		x := v.Args[0]
  4935  		v_1 := v.Args[1]
  4936  		if v_1.Op != OpARM64SRLconst {
  4937  			break
  4938  		}
  4939  		c := v_1.AuxInt
  4940  		if x != v_1.Args[0] {
  4941  			break
  4942  		}
  4943  		if !(c == d) {
  4944  			break
  4945  		}
  4946  		v.reset(OpARM64MOVDconst)
  4947  		v.AuxInt = -1
  4948  		return true
  4949  	}
  4950  	return false
  4951  }
  4952  func rewriteValueARM64_OpARM64Equal_0(v *Value) bool {
  4953  	// match: (Equal (FlagEQ))
  4954  	// cond:
  4955  	// result: (MOVDconst [1])
  4956  	for {
  4957  		v_0 := v.Args[0]
  4958  		if v_0.Op != OpARM64FlagEQ {
  4959  			break
  4960  		}
  4961  		v.reset(OpARM64MOVDconst)
  4962  		v.AuxInt = 1
  4963  		return true
  4964  	}
  4965  	// match: (Equal (FlagLT_ULT))
  4966  	// cond:
  4967  	// result: (MOVDconst [0])
  4968  	for {
  4969  		v_0 := v.Args[0]
  4970  		if v_0.Op != OpARM64FlagLT_ULT {
  4971  			break
  4972  		}
  4973  		v.reset(OpARM64MOVDconst)
  4974  		v.AuxInt = 0
  4975  		return true
  4976  	}
  4977  	// match: (Equal (FlagLT_UGT))
  4978  	// cond:
  4979  	// result: (MOVDconst [0])
  4980  	for {
  4981  		v_0 := v.Args[0]
  4982  		if v_0.Op != OpARM64FlagLT_UGT {
  4983  			break
  4984  		}
  4985  		v.reset(OpARM64MOVDconst)
  4986  		v.AuxInt = 0
  4987  		return true
  4988  	}
  4989  	// match: (Equal (FlagGT_ULT))
  4990  	// cond:
  4991  	// result: (MOVDconst [0])
  4992  	for {
  4993  		v_0 := v.Args[0]
  4994  		if v_0.Op != OpARM64FlagGT_ULT {
  4995  			break
  4996  		}
  4997  		v.reset(OpARM64MOVDconst)
  4998  		v.AuxInt = 0
  4999  		return true
  5000  	}
  5001  	// match: (Equal (FlagGT_UGT))
  5002  	// cond:
  5003  	// result: (MOVDconst [0])
  5004  	for {
  5005  		v_0 := v.Args[0]
  5006  		if v_0.Op != OpARM64FlagGT_UGT {
  5007  			break
  5008  		}
  5009  		v.reset(OpARM64MOVDconst)
  5010  		v.AuxInt = 0
  5011  		return true
  5012  	}
  5013  	// match: (Equal (InvertFlags x))
  5014  	// cond:
  5015  	// result: (Equal x)
  5016  	for {
  5017  		v_0 := v.Args[0]
  5018  		if v_0.Op != OpARM64InvertFlags {
  5019  			break
  5020  		}
  5021  		x := v_0.Args[0]
  5022  		v.reset(OpARM64Equal)
  5023  		v.AddArg(x)
  5024  		return true
  5025  	}
  5026  	return false
  5027  }
  5028  func rewriteValueARM64_OpARM64FADDD_0(v *Value) bool {
  5029  	// match: (FADDD a (FMULD x y))
  5030  	// cond:
  5031  	// result: (FMADDD a x y)
  5032  	for {
  5033  		_ = v.Args[1]
  5034  		a := v.Args[0]
  5035  		v_1 := v.Args[1]
  5036  		if v_1.Op != OpARM64FMULD {
  5037  			break
  5038  		}
  5039  		_ = v_1.Args[1]
  5040  		x := v_1.Args[0]
  5041  		y := v_1.Args[1]
  5042  		v.reset(OpARM64FMADDD)
  5043  		v.AddArg(a)
  5044  		v.AddArg(x)
  5045  		v.AddArg(y)
  5046  		return true
  5047  	}
  5048  	// match: (FADDD (FMULD x y) a)
  5049  	// cond:
  5050  	// result: (FMADDD a x y)
  5051  	for {
  5052  		_ = v.Args[1]
  5053  		v_0 := v.Args[0]
  5054  		if v_0.Op != OpARM64FMULD {
  5055  			break
  5056  		}
  5057  		_ = v_0.Args[1]
  5058  		x := v_0.Args[0]
  5059  		y := v_0.Args[1]
  5060  		a := v.Args[1]
  5061  		v.reset(OpARM64FMADDD)
  5062  		v.AddArg(a)
  5063  		v.AddArg(x)
  5064  		v.AddArg(y)
  5065  		return true
  5066  	}
  5067  	// match: (FADDD a (FNMULD x y))
  5068  	// cond:
  5069  	// result: (FMSUBD a x y)
  5070  	for {
  5071  		_ = v.Args[1]
  5072  		a := v.Args[0]
  5073  		v_1 := v.Args[1]
  5074  		if v_1.Op != OpARM64FNMULD {
  5075  			break
  5076  		}
  5077  		_ = v_1.Args[1]
  5078  		x := v_1.Args[0]
  5079  		y := v_1.Args[1]
  5080  		v.reset(OpARM64FMSUBD)
  5081  		v.AddArg(a)
  5082  		v.AddArg(x)
  5083  		v.AddArg(y)
  5084  		return true
  5085  	}
  5086  	// match: (FADDD (FNMULD x y) a)
  5087  	// cond:
  5088  	// result: (FMSUBD a x y)
  5089  	for {
  5090  		_ = v.Args[1]
  5091  		v_0 := v.Args[0]
  5092  		if v_0.Op != OpARM64FNMULD {
  5093  			break
  5094  		}
  5095  		_ = v_0.Args[1]
  5096  		x := v_0.Args[0]
  5097  		y := v_0.Args[1]
  5098  		a := v.Args[1]
  5099  		v.reset(OpARM64FMSUBD)
  5100  		v.AddArg(a)
  5101  		v.AddArg(x)
  5102  		v.AddArg(y)
  5103  		return true
  5104  	}
  5105  	return false
  5106  }
  5107  func rewriteValueARM64_OpARM64FADDS_0(v *Value) bool {
  5108  	// match: (FADDS a (FMULS x y))
  5109  	// cond:
  5110  	// result: (FMADDS a x y)
  5111  	for {
  5112  		_ = v.Args[1]
  5113  		a := v.Args[0]
  5114  		v_1 := v.Args[1]
  5115  		if v_1.Op != OpARM64FMULS {
  5116  			break
  5117  		}
  5118  		_ = v_1.Args[1]
  5119  		x := v_1.Args[0]
  5120  		y := v_1.Args[1]
  5121  		v.reset(OpARM64FMADDS)
  5122  		v.AddArg(a)
  5123  		v.AddArg(x)
  5124  		v.AddArg(y)
  5125  		return true
  5126  	}
  5127  	// match: (FADDS (FMULS x y) a)
  5128  	// cond:
  5129  	// result: (FMADDS a x y)
  5130  	for {
  5131  		_ = v.Args[1]
  5132  		v_0 := v.Args[0]
  5133  		if v_0.Op != OpARM64FMULS {
  5134  			break
  5135  		}
  5136  		_ = v_0.Args[1]
  5137  		x := v_0.Args[0]
  5138  		y := v_0.Args[1]
  5139  		a := v.Args[1]
  5140  		v.reset(OpARM64FMADDS)
  5141  		v.AddArg(a)
  5142  		v.AddArg(x)
  5143  		v.AddArg(y)
  5144  		return true
  5145  	}
  5146  	// match: (FADDS a (FNMULS x y))
  5147  	// cond:
  5148  	// result: (FMSUBS a x y)
  5149  	for {
  5150  		_ = v.Args[1]
  5151  		a := v.Args[0]
  5152  		v_1 := v.Args[1]
  5153  		if v_1.Op != OpARM64FNMULS {
  5154  			break
  5155  		}
  5156  		_ = v_1.Args[1]
  5157  		x := v_1.Args[0]
  5158  		y := v_1.Args[1]
  5159  		v.reset(OpARM64FMSUBS)
  5160  		v.AddArg(a)
  5161  		v.AddArg(x)
  5162  		v.AddArg(y)
  5163  		return true
  5164  	}
  5165  	// match: (FADDS (FNMULS x y) a)
  5166  	// cond:
  5167  	// result: (FMSUBS a x y)
  5168  	for {
  5169  		_ = v.Args[1]
  5170  		v_0 := v.Args[0]
  5171  		if v_0.Op != OpARM64FNMULS {
  5172  			break
  5173  		}
  5174  		_ = v_0.Args[1]
  5175  		x := v_0.Args[0]
  5176  		y := v_0.Args[1]
  5177  		a := v.Args[1]
  5178  		v.reset(OpARM64FMSUBS)
  5179  		v.AddArg(a)
  5180  		v.AddArg(x)
  5181  		v.AddArg(y)
  5182  		return true
  5183  	}
  5184  	return false
  5185  }
  5186  func rewriteValueARM64_OpARM64FMOVDfpgp_0(v *Value) bool {
  5187  	b := v.Block
  5188  	_ = b
  5189  	// match: (FMOVDfpgp <t> (Arg [off] {sym}))
  5190  	// cond:
  5191  	// result: @b.Func.Entry (Arg <t> [off] {sym})
  5192  	for {
  5193  		t := v.Type
  5194  		v_0 := v.Args[0]
  5195  		if v_0.Op != OpArg {
  5196  			break
  5197  		}
  5198  		off := v_0.AuxInt
  5199  		sym := v_0.Aux
  5200  		b = b.Func.Entry
  5201  		v0 := b.NewValue0(v.Pos, OpArg, t)
  5202  		v.reset(OpCopy)
  5203  		v.AddArg(v0)
  5204  		v0.AuxInt = off
  5205  		v0.Aux = sym
  5206  		return true
  5207  	}
  5208  	return false
  5209  }
  5210  func rewriteValueARM64_OpARM64FMOVDgpfp_0(v *Value) bool {
  5211  	b := v.Block
  5212  	_ = b
  5213  	// match: (FMOVDgpfp <t> (Arg [off] {sym}))
  5214  	// cond:
  5215  	// result: @b.Func.Entry (Arg <t> [off] {sym})
  5216  	for {
  5217  		t := v.Type
  5218  		v_0 := v.Args[0]
  5219  		if v_0.Op != OpArg {
  5220  			break
  5221  		}
  5222  		off := v_0.AuxInt
  5223  		sym := v_0.Aux
  5224  		b = b.Func.Entry
  5225  		v0 := b.NewValue0(v.Pos, OpArg, t)
  5226  		v.reset(OpCopy)
  5227  		v.AddArg(v0)
  5228  		v0.AuxInt = off
  5229  		v0.Aux = sym
  5230  		return true
  5231  	}
  5232  	return false
  5233  }
  5234  func rewriteValueARM64_OpARM64FMOVDload_0(v *Value) bool {
  5235  	b := v.Block
  5236  	_ = b
  5237  	config := b.Func.Config
  5238  	_ = config
  5239  	// match: (FMOVDload [off] {sym} ptr (MOVDstore [off] {sym} ptr val _))
  5240  	// cond:
  5241  	// result: (FMOVDgpfp val)
  5242  	for {
  5243  		off := v.AuxInt
  5244  		sym := v.Aux
  5245  		_ = v.Args[1]
  5246  		ptr := v.Args[0]
  5247  		v_1 := v.Args[1]
  5248  		if v_1.Op != OpARM64MOVDstore {
  5249  			break
  5250  		}
  5251  		if v_1.AuxInt != off {
  5252  			break
  5253  		}
  5254  		if v_1.Aux != sym {
  5255  			break
  5256  		}
  5257  		_ = v_1.Args[2]
  5258  		if ptr != v_1.Args[0] {
  5259  			break
  5260  		}
  5261  		val := v_1.Args[1]
  5262  		v.reset(OpARM64FMOVDgpfp)
  5263  		v.AddArg(val)
  5264  		return true
  5265  	}
  5266  	// match: (FMOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
  5267  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5268  	// result: (FMOVDload [off1+off2] {sym} ptr mem)
  5269  	for {
  5270  		off1 := v.AuxInt
  5271  		sym := v.Aux
  5272  		_ = v.Args[1]
  5273  		v_0 := v.Args[0]
  5274  		if v_0.Op != OpARM64ADDconst {
  5275  			break
  5276  		}
  5277  		off2 := v_0.AuxInt
  5278  		ptr := v_0.Args[0]
  5279  		mem := v.Args[1]
  5280  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5281  			break
  5282  		}
  5283  		v.reset(OpARM64FMOVDload)
  5284  		v.AuxInt = off1 + off2
  5285  		v.Aux = sym
  5286  		v.AddArg(ptr)
  5287  		v.AddArg(mem)
  5288  		return true
  5289  	}
  5290  	// match: (FMOVDload [off] {sym} (ADD ptr idx) mem)
  5291  	// cond: off == 0 && sym == nil
  5292  	// result: (FMOVDloadidx ptr idx mem)
  5293  	for {
  5294  		off := v.AuxInt
  5295  		sym := v.Aux
  5296  		_ = v.Args[1]
  5297  		v_0 := v.Args[0]
  5298  		if v_0.Op != OpARM64ADD {
  5299  			break
  5300  		}
  5301  		_ = v_0.Args[1]
  5302  		ptr := v_0.Args[0]
  5303  		idx := v_0.Args[1]
  5304  		mem := v.Args[1]
  5305  		if !(off == 0 && sym == nil) {
  5306  			break
  5307  		}
  5308  		v.reset(OpARM64FMOVDloadidx)
  5309  		v.AddArg(ptr)
  5310  		v.AddArg(idx)
  5311  		v.AddArg(mem)
  5312  		return true
  5313  	}
  5314  	// match: (FMOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5315  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5316  	// result: (FMOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5317  	for {
  5318  		off1 := v.AuxInt
  5319  		sym1 := v.Aux
  5320  		_ = v.Args[1]
  5321  		v_0 := v.Args[0]
  5322  		if v_0.Op != OpARM64MOVDaddr {
  5323  			break
  5324  		}
  5325  		off2 := v_0.AuxInt
  5326  		sym2 := v_0.Aux
  5327  		ptr := v_0.Args[0]
  5328  		mem := v.Args[1]
  5329  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5330  			break
  5331  		}
  5332  		v.reset(OpARM64FMOVDload)
  5333  		v.AuxInt = off1 + off2
  5334  		v.Aux = mergeSym(sym1, sym2)
  5335  		v.AddArg(ptr)
  5336  		v.AddArg(mem)
  5337  		return true
  5338  	}
  5339  	return false
  5340  }
  5341  func rewriteValueARM64_OpARM64FMOVDloadidx_0(v *Value) bool {
  5342  	// match: (FMOVDloadidx ptr (MOVDconst [c]) mem)
  5343  	// cond:
  5344  	// result: (FMOVDload [c] ptr mem)
  5345  	for {
  5346  		_ = v.Args[2]
  5347  		ptr := v.Args[0]
  5348  		v_1 := v.Args[1]
  5349  		if v_1.Op != OpARM64MOVDconst {
  5350  			break
  5351  		}
  5352  		c := v_1.AuxInt
  5353  		mem := v.Args[2]
  5354  		v.reset(OpARM64FMOVDload)
  5355  		v.AuxInt = c
  5356  		v.AddArg(ptr)
  5357  		v.AddArg(mem)
  5358  		return true
  5359  	}
  5360  	// match: (FMOVDloadidx (MOVDconst [c]) ptr mem)
  5361  	// cond:
  5362  	// result: (FMOVDload [c] ptr mem)
  5363  	for {
  5364  		_ = v.Args[2]
  5365  		v_0 := v.Args[0]
  5366  		if v_0.Op != OpARM64MOVDconst {
  5367  			break
  5368  		}
  5369  		c := v_0.AuxInt
  5370  		ptr := v.Args[1]
  5371  		mem := v.Args[2]
  5372  		v.reset(OpARM64FMOVDload)
  5373  		v.AuxInt = c
  5374  		v.AddArg(ptr)
  5375  		v.AddArg(mem)
  5376  		return true
  5377  	}
  5378  	return false
  5379  }
  5380  func rewriteValueARM64_OpARM64FMOVDstore_0(v *Value) bool {
  5381  	b := v.Block
  5382  	_ = b
  5383  	config := b.Func.Config
  5384  	_ = config
  5385  	// match: (FMOVDstore [off] {sym} ptr (FMOVDgpfp val) mem)
  5386  	// cond:
  5387  	// result: (MOVDstore [off] {sym} ptr val mem)
  5388  	for {
  5389  		off := v.AuxInt
  5390  		sym := v.Aux
  5391  		_ = v.Args[2]
  5392  		ptr := v.Args[0]
  5393  		v_1 := v.Args[1]
  5394  		if v_1.Op != OpARM64FMOVDgpfp {
  5395  			break
  5396  		}
  5397  		val := v_1.Args[0]
  5398  		mem := v.Args[2]
  5399  		v.reset(OpARM64MOVDstore)
  5400  		v.AuxInt = off
  5401  		v.Aux = sym
  5402  		v.AddArg(ptr)
  5403  		v.AddArg(val)
  5404  		v.AddArg(mem)
  5405  		return true
  5406  	}
  5407  	// match: (FMOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  5408  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5409  	// result: (FMOVDstore [off1+off2] {sym} ptr val mem)
  5410  	for {
  5411  		off1 := v.AuxInt
  5412  		sym := v.Aux
  5413  		_ = v.Args[2]
  5414  		v_0 := v.Args[0]
  5415  		if v_0.Op != OpARM64ADDconst {
  5416  			break
  5417  		}
  5418  		off2 := v_0.AuxInt
  5419  		ptr := v_0.Args[0]
  5420  		val := v.Args[1]
  5421  		mem := v.Args[2]
  5422  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5423  			break
  5424  		}
  5425  		v.reset(OpARM64FMOVDstore)
  5426  		v.AuxInt = off1 + off2
  5427  		v.Aux = sym
  5428  		v.AddArg(ptr)
  5429  		v.AddArg(val)
  5430  		v.AddArg(mem)
  5431  		return true
  5432  	}
  5433  	// match: (FMOVDstore [off] {sym} (ADD ptr idx) val mem)
  5434  	// cond: off == 0 && sym == nil
  5435  	// result: (FMOVDstoreidx ptr idx val mem)
  5436  	for {
  5437  		off := v.AuxInt
  5438  		sym := v.Aux
  5439  		_ = v.Args[2]
  5440  		v_0 := v.Args[0]
  5441  		if v_0.Op != OpARM64ADD {
  5442  			break
  5443  		}
  5444  		_ = v_0.Args[1]
  5445  		ptr := v_0.Args[0]
  5446  		idx := v_0.Args[1]
  5447  		val := v.Args[1]
  5448  		mem := v.Args[2]
  5449  		if !(off == 0 && sym == nil) {
  5450  			break
  5451  		}
  5452  		v.reset(OpARM64FMOVDstoreidx)
  5453  		v.AddArg(ptr)
  5454  		v.AddArg(idx)
  5455  		v.AddArg(val)
  5456  		v.AddArg(mem)
  5457  		return true
  5458  	}
  5459  	// match: (FMOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  5460  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5461  	// result: (FMOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5462  	for {
  5463  		off1 := v.AuxInt
  5464  		sym1 := v.Aux
  5465  		_ = v.Args[2]
  5466  		v_0 := v.Args[0]
  5467  		if v_0.Op != OpARM64MOVDaddr {
  5468  			break
  5469  		}
  5470  		off2 := v_0.AuxInt
  5471  		sym2 := v_0.Aux
  5472  		ptr := v_0.Args[0]
  5473  		val := v.Args[1]
  5474  		mem := v.Args[2]
  5475  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5476  			break
  5477  		}
  5478  		v.reset(OpARM64FMOVDstore)
  5479  		v.AuxInt = off1 + off2
  5480  		v.Aux = mergeSym(sym1, sym2)
  5481  		v.AddArg(ptr)
  5482  		v.AddArg(val)
  5483  		v.AddArg(mem)
  5484  		return true
  5485  	}
  5486  	return false
  5487  }
  5488  func rewriteValueARM64_OpARM64FMOVDstoreidx_0(v *Value) bool {
  5489  	// match: (FMOVDstoreidx ptr (MOVDconst [c]) val mem)
  5490  	// cond:
  5491  	// result: (FMOVDstore [c] ptr val mem)
  5492  	for {
  5493  		_ = v.Args[3]
  5494  		ptr := v.Args[0]
  5495  		v_1 := v.Args[1]
  5496  		if v_1.Op != OpARM64MOVDconst {
  5497  			break
  5498  		}
  5499  		c := v_1.AuxInt
  5500  		val := v.Args[2]
  5501  		mem := v.Args[3]
  5502  		v.reset(OpARM64FMOVDstore)
  5503  		v.AuxInt = c
  5504  		v.AddArg(ptr)
  5505  		v.AddArg(val)
  5506  		v.AddArg(mem)
  5507  		return true
  5508  	}
  5509  	// match: (FMOVDstoreidx (MOVDconst [c]) idx val mem)
  5510  	// cond:
  5511  	// result: (FMOVDstore [c] idx val mem)
  5512  	for {
  5513  		_ = v.Args[3]
  5514  		v_0 := v.Args[0]
  5515  		if v_0.Op != OpARM64MOVDconst {
  5516  			break
  5517  		}
  5518  		c := v_0.AuxInt
  5519  		idx := v.Args[1]
  5520  		val := v.Args[2]
  5521  		mem := v.Args[3]
  5522  		v.reset(OpARM64FMOVDstore)
  5523  		v.AuxInt = c
  5524  		v.AddArg(idx)
  5525  		v.AddArg(val)
  5526  		v.AddArg(mem)
  5527  		return true
  5528  	}
  5529  	return false
  5530  }
  5531  func rewriteValueARM64_OpARM64FMOVSload_0(v *Value) bool {
  5532  	b := v.Block
  5533  	_ = b
  5534  	config := b.Func.Config
  5535  	_ = config
  5536  	// match: (FMOVSload [off] {sym} ptr (MOVWstore [off] {sym} ptr val _))
  5537  	// cond:
  5538  	// result: (FMOVSgpfp val)
  5539  	for {
  5540  		off := v.AuxInt
  5541  		sym := v.Aux
  5542  		_ = v.Args[1]
  5543  		ptr := v.Args[0]
  5544  		v_1 := v.Args[1]
  5545  		if v_1.Op != OpARM64MOVWstore {
  5546  			break
  5547  		}
  5548  		if v_1.AuxInt != off {
  5549  			break
  5550  		}
  5551  		if v_1.Aux != sym {
  5552  			break
  5553  		}
  5554  		_ = v_1.Args[2]
  5555  		if ptr != v_1.Args[0] {
  5556  			break
  5557  		}
  5558  		val := v_1.Args[1]
  5559  		v.reset(OpARM64FMOVSgpfp)
  5560  		v.AddArg(val)
  5561  		return true
  5562  	}
  5563  	// match: (FMOVSload [off1] {sym} (ADDconst [off2] ptr) mem)
  5564  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5565  	// result: (FMOVSload [off1+off2] {sym} ptr mem)
  5566  	for {
  5567  		off1 := v.AuxInt
  5568  		sym := v.Aux
  5569  		_ = v.Args[1]
  5570  		v_0 := v.Args[0]
  5571  		if v_0.Op != OpARM64ADDconst {
  5572  			break
  5573  		}
  5574  		off2 := v_0.AuxInt
  5575  		ptr := v_0.Args[0]
  5576  		mem := v.Args[1]
  5577  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5578  			break
  5579  		}
  5580  		v.reset(OpARM64FMOVSload)
  5581  		v.AuxInt = off1 + off2
  5582  		v.Aux = sym
  5583  		v.AddArg(ptr)
  5584  		v.AddArg(mem)
  5585  		return true
  5586  	}
  5587  	// match: (FMOVSload [off] {sym} (ADD ptr idx) mem)
  5588  	// cond: off == 0 && sym == nil
  5589  	// result: (FMOVSloadidx ptr idx mem)
  5590  	for {
  5591  		off := v.AuxInt
  5592  		sym := v.Aux
  5593  		_ = v.Args[1]
  5594  		v_0 := v.Args[0]
  5595  		if v_0.Op != OpARM64ADD {
  5596  			break
  5597  		}
  5598  		_ = v_0.Args[1]
  5599  		ptr := v_0.Args[0]
  5600  		idx := v_0.Args[1]
  5601  		mem := v.Args[1]
  5602  		if !(off == 0 && sym == nil) {
  5603  			break
  5604  		}
  5605  		v.reset(OpARM64FMOVSloadidx)
  5606  		v.AddArg(ptr)
  5607  		v.AddArg(idx)
  5608  		v.AddArg(mem)
  5609  		return true
  5610  	}
  5611  	// match: (FMOVSload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  5612  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5613  	// result: (FMOVSload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  5614  	for {
  5615  		off1 := v.AuxInt
  5616  		sym1 := v.Aux
  5617  		_ = v.Args[1]
  5618  		v_0 := v.Args[0]
  5619  		if v_0.Op != OpARM64MOVDaddr {
  5620  			break
  5621  		}
  5622  		off2 := v_0.AuxInt
  5623  		sym2 := v_0.Aux
  5624  		ptr := v_0.Args[0]
  5625  		mem := v.Args[1]
  5626  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5627  			break
  5628  		}
  5629  		v.reset(OpARM64FMOVSload)
  5630  		v.AuxInt = off1 + off2
  5631  		v.Aux = mergeSym(sym1, sym2)
  5632  		v.AddArg(ptr)
  5633  		v.AddArg(mem)
  5634  		return true
  5635  	}
  5636  	return false
  5637  }
  5638  func rewriteValueARM64_OpARM64FMOVSloadidx_0(v *Value) bool {
  5639  	// match: (FMOVSloadidx ptr (MOVDconst [c]) mem)
  5640  	// cond:
  5641  	// result: (FMOVSload [c] ptr mem)
  5642  	for {
  5643  		_ = v.Args[2]
  5644  		ptr := v.Args[0]
  5645  		v_1 := v.Args[1]
  5646  		if v_1.Op != OpARM64MOVDconst {
  5647  			break
  5648  		}
  5649  		c := v_1.AuxInt
  5650  		mem := v.Args[2]
  5651  		v.reset(OpARM64FMOVSload)
  5652  		v.AuxInt = c
  5653  		v.AddArg(ptr)
  5654  		v.AddArg(mem)
  5655  		return true
  5656  	}
  5657  	// match: (FMOVSloadidx (MOVDconst [c]) ptr mem)
  5658  	// cond:
  5659  	// result: (FMOVSload [c] ptr mem)
  5660  	for {
  5661  		_ = v.Args[2]
  5662  		v_0 := v.Args[0]
  5663  		if v_0.Op != OpARM64MOVDconst {
  5664  			break
  5665  		}
  5666  		c := v_0.AuxInt
  5667  		ptr := v.Args[1]
  5668  		mem := v.Args[2]
  5669  		v.reset(OpARM64FMOVSload)
  5670  		v.AuxInt = c
  5671  		v.AddArg(ptr)
  5672  		v.AddArg(mem)
  5673  		return true
  5674  	}
  5675  	return false
  5676  }
  5677  func rewriteValueARM64_OpARM64FMOVSstore_0(v *Value) bool {
  5678  	b := v.Block
  5679  	_ = b
  5680  	config := b.Func.Config
  5681  	_ = config
  5682  	// match: (FMOVSstore [off] {sym} ptr (FMOVSgpfp val) mem)
  5683  	// cond:
  5684  	// result: (MOVWstore [off] {sym} ptr val mem)
  5685  	for {
  5686  		off := v.AuxInt
  5687  		sym := v.Aux
  5688  		_ = v.Args[2]
  5689  		ptr := v.Args[0]
  5690  		v_1 := v.Args[1]
  5691  		if v_1.Op != OpARM64FMOVSgpfp {
  5692  			break
  5693  		}
  5694  		val := v_1.Args[0]
  5695  		mem := v.Args[2]
  5696  		v.reset(OpARM64MOVWstore)
  5697  		v.AuxInt = off
  5698  		v.Aux = sym
  5699  		v.AddArg(ptr)
  5700  		v.AddArg(val)
  5701  		v.AddArg(mem)
  5702  		return true
  5703  	}
  5704  	// match: (FMOVSstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  5705  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5706  	// result: (FMOVSstore [off1+off2] {sym} ptr val mem)
  5707  	for {
  5708  		off1 := v.AuxInt
  5709  		sym := v.Aux
  5710  		_ = v.Args[2]
  5711  		v_0 := v.Args[0]
  5712  		if v_0.Op != OpARM64ADDconst {
  5713  			break
  5714  		}
  5715  		off2 := v_0.AuxInt
  5716  		ptr := v_0.Args[0]
  5717  		val := v.Args[1]
  5718  		mem := v.Args[2]
  5719  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5720  			break
  5721  		}
  5722  		v.reset(OpARM64FMOVSstore)
  5723  		v.AuxInt = off1 + off2
  5724  		v.Aux = sym
  5725  		v.AddArg(ptr)
  5726  		v.AddArg(val)
  5727  		v.AddArg(mem)
  5728  		return true
  5729  	}
  5730  	// match: (FMOVSstore [off] {sym} (ADD ptr idx) val mem)
  5731  	// cond: off == 0 && sym == nil
  5732  	// result: (FMOVSstoreidx ptr idx val mem)
  5733  	for {
  5734  		off := v.AuxInt
  5735  		sym := v.Aux
  5736  		_ = v.Args[2]
  5737  		v_0 := v.Args[0]
  5738  		if v_0.Op != OpARM64ADD {
  5739  			break
  5740  		}
  5741  		_ = v_0.Args[1]
  5742  		ptr := v_0.Args[0]
  5743  		idx := v_0.Args[1]
  5744  		val := v.Args[1]
  5745  		mem := v.Args[2]
  5746  		if !(off == 0 && sym == nil) {
  5747  			break
  5748  		}
  5749  		v.reset(OpARM64FMOVSstoreidx)
  5750  		v.AddArg(ptr)
  5751  		v.AddArg(idx)
  5752  		v.AddArg(val)
  5753  		v.AddArg(mem)
  5754  		return true
  5755  	}
  5756  	// match: (FMOVSstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  5757  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  5758  	// result: (FMOVSstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  5759  	for {
  5760  		off1 := v.AuxInt
  5761  		sym1 := v.Aux
  5762  		_ = v.Args[2]
  5763  		v_0 := v.Args[0]
  5764  		if v_0.Op != OpARM64MOVDaddr {
  5765  			break
  5766  		}
  5767  		off2 := v_0.AuxInt
  5768  		sym2 := v_0.Aux
  5769  		ptr := v_0.Args[0]
  5770  		val := v.Args[1]
  5771  		mem := v.Args[2]
  5772  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  5773  			break
  5774  		}
  5775  		v.reset(OpARM64FMOVSstore)
  5776  		v.AuxInt = off1 + off2
  5777  		v.Aux = mergeSym(sym1, sym2)
  5778  		v.AddArg(ptr)
  5779  		v.AddArg(val)
  5780  		v.AddArg(mem)
  5781  		return true
  5782  	}
  5783  	return false
  5784  }
  5785  func rewriteValueARM64_OpARM64FMOVSstoreidx_0(v *Value) bool {
  5786  	// match: (FMOVSstoreidx ptr (MOVDconst [c]) val mem)
  5787  	// cond:
  5788  	// result: (FMOVSstore [c] ptr val mem)
  5789  	for {
  5790  		_ = v.Args[3]
  5791  		ptr := v.Args[0]
  5792  		v_1 := v.Args[1]
  5793  		if v_1.Op != OpARM64MOVDconst {
  5794  			break
  5795  		}
  5796  		c := v_1.AuxInt
  5797  		val := v.Args[2]
  5798  		mem := v.Args[3]
  5799  		v.reset(OpARM64FMOVSstore)
  5800  		v.AuxInt = c
  5801  		v.AddArg(ptr)
  5802  		v.AddArg(val)
  5803  		v.AddArg(mem)
  5804  		return true
  5805  	}
  5806  	// match: (FMOVSstoreidx (MOVDconst [c]) idx val mem)
  5807  	// cond:
  5808  	// result: (FMOVSstore [c] idx val mem)
  5809  	for {
  5810  		_ = v.Args[3]
  5811  		v_0 := v.Args[0]
  5812  		if v_0.Op != OpARM64MOVDconst {
  5813  			break
  5814  		}
  5815  		c := v_0.AuxInt
  5816  		idx := v.Args[1]
  5817  		val := v.Args[2]
  5818  		mem := v.Args[3]
  5819  		v.reset(OpARM64FMOVSstore)
  5820  		v.AuxInt = c
  5821  		v.AddArg(idx)
  5822  		v.AddArg(val)
  5823  		v.AddArg(mem)
  5824  		return true
  5825  	}
  5826  	return false
  5827  }
  5828  func rewriteValueARM64_OpARM64FMULD_0(v *Value) bool {
  5829  	// match: (FMULD (FNEGD x) y)
  5830  	// cond:
  5831  	// result: (FNMULD x y)
  5832  	for {
  5833  		_ = v.Args[1]
  5834  		v_0 := v.Args[0]
  5835  		if v_0.Op != OpARM64FNEGD {
  5836  			break
  5837  		}
  5838  		x := v_0.Args[0]
  5839  		y := v.Args[1]
  5840  		v.reset(OpARM64FNMULD)
  5841  		v.AddArg(x)
  5842  		v.AddArg(y)
  5843  		return true
  5844  	}
  5845  	// match: (FMULD y (FNEGD x))
  5846  	// cond:
  5847  	// result: (FNMULD x y)
  5848  	for {
  5849  		_ = v.Args[1]
  5850  		y := v.Args[0]
  5851  		v_1 := v.Args[1]
  5852  		if v_1.Op != OpARM64FNEGD {
  5853  			break
  5854  		}
  5855  		x := v_1.Args[0]
  5856  		v.reset(OpARM64FNMULD)
  5857  		v.AddArg(x)
  5858  		v.AddArg(y)
  5859  		return true
  5860  	}
  5861  	return false
  5862  }
  5863  func rewriteValueARM64_OpARM64FMULS_0(v *Value) bool {
  5864  	// match: (FMULS (FNEGS x) y)
  5865  	// cond:
  5866  	// result: (FNMULS x y)
  5867  	for {
  5868  		_ = v.Args[1]
  5869  		v_0 := v.Args[0]
  5870  		if v_0.Op != OpARM64FNEGS {
  5871  			break
  5872  		}
  5873  		x := v_0.Args[0]
  5874  		y := v.Args[1]
  5875  		v.reset(OpARM64FNMULS)
  5876  		v.AddArg(x)
  5877  		v.AddArg(y)
  5878  		return true
  5879  	}
  5880  	// match: (FMULS y (FNEGS x))
  5881  	// cond:
  5882  	// result: (FNMULS x y)
  5883  	for {
  5884  		_ = v.Args[1]
  5885  		y := v.Args[0]
  5886  		v_1 := v.Args[1]
  5887  		if v_1.Op != OpARM64FNEGS {
  5888  			break
  5889  		}
  5890  		x := v_1.Args[0]
  5891  		v.reset(OpARM64FNMULS)
  5892  		v.AddArg(x)
  5893  		v.AddArg(y)
  5894  		return true
  5895  	}
  5896  	return false
  5897  }
  5898  func rewriteValueARM64_OpARM64FNEGD_0(v *Value) bool {
  5899  	// match: (FNEGD (FMULD x y))
  5900  	// cond:
  5901  	// result: (FNMULD x y)
  5902  	for {
  5903  		v_0 := v.Args[0]
  5904  		if v_0.Op != OpARM64FMULD {
  5905  			break
  5906  		}
  5907  		_ = v_0.Args[1]
  5908  		x := v_0.Args[0]
  5909  		y := v_0.Args[1]
  5910  		v.reset(OpARM64FNMULD)
  5911  		v.AddArg(x)
  5912  		v.AddArg(y)
  5913  		return true
  5914  	}
  5915  	// match: (FNEGD (FNMULD x y))
  5916  	// cond:
  5917  	// result: (FMULD x y)
  5918  	for {
  5919  		v_0 := v.Args[0]
  5920  		if v_0.Op != OpARM64FNMULD {
  5921  			break
  5922  		}
  5923  		_ = v_0.Args[1]
  5924  		x := v_0.Args[0]
  5925  		y := v_0.Args[1]
  5926  		v.reset(OpARM64FMULD)
  5927  		v.AddArg(x)
  5928  		v.AddArg(y)
  5929  		return true
  5930  	}
  5931  	return false
  5932  }
  5933  func rewriteValueARM64_OpARM64FNEGS_0(v *Value) bool {
  5934  	// match: (FNEGS (FMULS x y))
  5935  	// cond:
  5936  	// result: (FNMULS x y)
  5937  	for {
  5938  		v_0 := v.Args[0]
  5939  		if v_0.Op != OpARM64FMULS {
  5940  			break
  5941  		}
  5942  		_ = v_0.Args[1]
  5943  		x := v_0.Args[0]
  5944  		y := v_0.Args[1]
  5945  		v.reset(OpARM64FNMULS)
  5946  		v.AddArg(x)
  5947  		v.AddArg(y)
  5948  		return true
  5949  	}
  5950  	// match: (FNEGS (FNMULS x y))
  5951  	// cond:
  5952  	// result: (FMULS x y)
  5953  	for {
  5954  		v_0 := v.Args[0]
  5955  		if v_0.Op != OpARM64FNMULS {
  5956  			break
  5957  		}
  5958  		_ = v_0.Args[1]
  5959  		x := v_0.Args[0]
  5960  		y := v_0.Args[1]
  5961  		v.reset(OpARM64FMULS)
  5962  		v.AddArg(x)
  5963  		v.AddArg(y)
  5964  		return true
  5965  	}
  5966  	return false
  5967  }
  5968  func rewriteValueARM64_OpARM64FNMULD_0(v *Value) bool {
  5969  	// match: (FNMULD (FNEGD x) y)
  5970  	// cond:
  5971  	// result: (FMULD x y)
  5972  	for {
  5973  		_ = v.Args[1]
  5974  		v_0 := v.Args[0]
  5975  		if v_0.Op != OpARM64FNEGD {
  5976  			break
  5977  		}
  5978  		x := v_0.Args[0]
  5979  		y := v.Args[1]
  5980  		v.reset(OpARM64FMULD)
  5981  		v.AddArg(x)
  5982  		v.AddArg(y)
  5983  		return true
  5984  	}
  5985  	// match: (FNMULD y (FNEGD x))
  5986  	// cond:
  5987  	// result: (FMULD x y)
  5988  	for {
  5989  		_ = v.Args[1]
  5990  		y := v.Args[0]
  5991  		v_1 := v.Args[1]
  5992  		if v_1.Op != OpARM64FNEGD {
  5993  			break
  5994  		}
  5995  		x := v_1.Args[0]
  5996  		v.reset(OpARM64FMULD)
  5997  		v.AddArg(x)
  5998  		v.AddArg(y)
  5999  		return true
  6000  	}
  6001  	return false
  6002  }
  6003  func rewriteValueARM64_OpARM64FNMULS_0(v *Value) bool {
  6004  	// match: (FNMULS (FNEGS x) y)
  6005  	// cond:
  6006  	// result: (FMULS x y)
  6007  	for {
  6008  		_ = v.Args[1]
  6009  		v_0 := v.Args[0]
  6010  		if v_0.Op != OpARM64FNEGS {
  6011  			break
  6012  		}
  6013  		x := v_0.Args[0]
  6014  		y := v.Args[1]
  6015  		v.reset(OpARM64FMULS)
  6016  		v.AddArg(x)
  6017  		v.AddArg(y)
  6018  		return true
  6019  	}
  6020  	// match: (FNMULS y (FNEGS x))
  6021  	// cond:
  6022  	// result: (FMULS x y)
  6023  	for {
  6024  		_ = v.Args[1]
  6025  		y := v.Args[0]
  6026  		v_1 := v.Args[1]
  6027  		if v_1.Op != OpARM64FNEGS {
  6028  			break
  6029  		}
  6030  		x := v_1.Args[0]
  6031  		v.reset(OpARM64FMULS)
  6032  		v.AddArg(x)
  6033  		v.AddArg(y)
  6034  		return true
  6035  	}
  6036  	return false
  6037  }
  6038  func rewriteValueARM64_OpARM64FSUBD_0(v *Value) bool {
  6039  	// match: (FSUBD a (FMULD x y))
  6040  	// cond:
  6041  	// result: (FMSUBD a x y)
  6042  	for {
  6043  		_ = v.Args[1]
  6044  		a := v.Args[0]
  6045  		v_1 := v.Args[1]
  6046  		if v_1.Op != OpARM64FMULD {
  6047  			break
  6048  		}
  6049  		_ = v_1.Args[1]
  6050  		x := v_1.Args[0]
  6051  		y := v_1.Args[1]
  6052  		v.reset(OpARM64FMSUBD)
  6053  		v.AddArg(a)
  6054  		v.AddArg(x)
  6055  		v.AddArg(y)
  6056  		return true
  6057  	}
  6058  	// match: (FSUBD (FMULD x y) a)
  6059  	// cond:
  6060  	// result: (FNMSUBD a x y)
  6061  	for {
  6062  		_ = v.Args[1]
  6063  		v_0 := v.Args[0]
  6064  		if v_0.Op != OpARM64FMULD {
  6065  			break
  6066  		}
  6067  		_ = v_0.Args[1]
  6068  		x := v_0.Args[0]
  6069  		y := v_0.Args[1]
  6070  		a := v.Args[1]
  6071  		v.reset(OpARM64FNMSUBD)
  6072  		v.AddArg(a)
  6073  		v.AddArg(x)
  6074  		v.AddArg(y)
  6075  		return true
  6076  	}
  6077  	// match: (FSUBD a (FNMULD x y))
  6078  	// cond:
  6079  	// result: (FMADDD a x y)
  6080  	for {
  6081  		_ = v.Args[1]
  6082  		a := v.Args[0]
  6083  		v_1 := v.Args[1]
  6084  		if v_1.Op != OpARM64FNMULD {
  6085  			break
  6086  		}
  6087  		_ = v_1.Args[1]
  6088  		x := v_1.Args[0]
  6089  		y := v_1.Args[1]
  6090  		v.reset(OpARM64FMADDD)
  6091  		v.AddArg(a)
  6092  		v.AddArg(x)
  6093  		v.AddArg(y)
  6094  		return true
  6095  	}
  6096  	// match: (FSUBD (FNMULD x y) a)
  6097  	// cond:
  6098  	// result: (FNMADDD a x y)
  6099  	for {
  6100  		_ = v.Args[1]
  6101  		v_0 := v.Args[0]
  6102  		if v_0.Op != OpARM64FNMULD {
  6103  			break
  6104  		}
  6105  		_ = v_0.Args[1]
  6106  		x := v_0.Args[0]
  6107  		y := v_0.Args[1]
  6108  		a := v.Args[1]
  6109  		v.reset(OpARM64FNMADDD)
  6110  		v.AddArg(a)
  6111  		v.AddArg(x)
  6112  		v.AddArg(y)
  6113  		return true
  6114  	}
  6115  	return false
  6116  }
  6117  func rewriteValueARM64_OpARM64FSUBS_0(v *Value) bool {
  6118  	// match: (FSUBS a (FMULS x y))
  6119  	// cond:
  6120  	// result: (FMSUBS a x y)
  6121  	for {
  6122  		_ = v.Args[1]
  6123  		a := v.Args[0]
  6124  		v_1 := v.Args[1]
  6125  		if v_1.Op != OpARM64FMULS {
  6126  			break
  6127  		}
  6128  		_ = v_1.Args[1]
  6129  		x := v_1.Args[0]
  6130  		y := v_1.Args[1]
  6131  		v.reset(OpARM64FMSUBS)
  6132  		v.AddArg(a)
  6133  		v.AddArg(x)
  6134  		v.AddArg(y)
  6135  		return true
  6136  	}
  6137  	// match: (FSUBS (FMULS x y) a)
  6138  	// cond:
  6139  	// result: (FNMSUBS a x y)
  6140  	for {
  6141  		_ = v.Args[1]
  6142  		v_0 := v.Args[0]
  6143  		if v_0.Op != OpARM64FMULS {
  6144  			break
  6145  		}
  6146  		_ = v_0.Args[1]
  6147  		x := v_0.Args[0]
  6148  		y := v_0.Args[1]
  6149  		a := v.Args[1]
  6150  		v.reset(OpARM64FNMSUBS)
  6151  		v.AddArg(a)
  6152  		v.AddArg(x)
  6153  		v.AddArg(y)
  6154  		return true
  6155  	}
  6156  	// match: (FSUBS a (FNMULS x y))
  6157  	// cond:
  6158  	// result: (FMADDS a x y)
  6159  	for {
  6160  		_ = v.Args[1]
  6161  		a := v.Args[0]
  6162  		v_1 := v.Args[1]
  6163  		if v_1.Op != OpARM64FNMULS {
  6164  			break
  6165  		}
  6166  		_ = v_1.Args[1]
  6167  		x := v_1.Args[0]
  6168  		y := v_1.Args[1]
  6169  		v.reset(OpARM64FMADDS)
  6170  		v.AddArg(a)
  6171  		v.AddArg(x)
  6172  		v.AddArg(y)
  6173  		return true
  6174  	}
  6175  	// match: (FSUBS (FNMULS x y) a)
  6176  	// cond:
  6177  	// result: (FNMADDS a x y)
  6178  	for {
  6179  		_ = v.Args[1]
  6180  		v_0 := v.Args[0]
  6181  		if v_0.Op != OpARM64FNMULS {
  6182  			break
  6183  		}
  6184  		_ = v_0.Args[1]
  6185  		x := v_0.Args[0]
  6186  		y := v_0.Args[1]
  6187  		a := v.Args[1]
  6188  		v.reset(OpARM64FNMADDS)
  6189  		v.AddArg(a)
  6190  		v.AddArg(x)
  6191  		v.AddArg(y)
  6192  		return true
  6193  	}
  6194  	return false
  6195  }
  6196  func rewriteValueARM64_OpARM64GreaterEqual_0(v *Value) bool {
  6197  	// match: (GreaterEqual (FlagEQ))
  6198  	// cond:
  6199  	// result: (MOVDconst [1])
  6200  	for {
  6201  		v_0 := v.Args[0]
  6202  		if v_0.Op != OpARM64FlagEQ {
  6203  			break
  6204  		}
  6205  		v.reset(OpARM64MOVDconst)
  6206  		v.AuxInt = 1
  6207  		return true
  6208  	}
  6209  	// match: (GreaterEqual (FlagLT_ULT))
  6210  	// cond:
  6211  	// result: (MOVDconst [0])
  6212  	for {
  6213  		v_0 := v.Args[0]
  6214  		if v_0.Op != OpARM64FlagLT_ULT {
  6215  			break
  6216  		}
  6217  		v.reset(OpARM64MOVDconst)
  6218  		v.AuxInt = 0
  6219  		return true
  6220  	}
  6221  	// match: (GreaterEqual (FlagLT_UGT))
  6222  	// cond:
  6223  	// result: (MOVDconst [0])
  6224  	for {
  6225  		v_0 := v.Args[0]
  6226  		if v_0.Op != OpARM64FlagLT_UGT {
  6227  			break
  6228  		}
  6229  		v.reset(OpARM64MOVDconst)
  6230  		v.AuxInt = 0
  6231  		return true
  6232  	}
  6233  	// match: (GreaterEqual (FlagGT_ULT))
  6234  	// cond:
  6235  	// result: (MOVDconst [1])
  6236  	for {
  6237  		v_0 := v.Args[0]
  6238  		if v_0.Op != OpARM64FlagGT_ULT {
  6239  			break
  6240  		}
  6241  		v.reset(OpARM64MOVDconst)
  6242  		v.AuxInt = 1
  6243  		return true
  6244  	}
  6245  	// match: (GreaterEqual (FlagGT_UGT))
  6246  	// cond:
  6247  	// result: (MOVDconst [1])
  6248  	for {
  6249  		v_0 := v.Args[0]
  6250  		if v_0.Op != OpARM64FlagGT_UGT {
  6251  			break
  6252  		}
  6253  		v.reset(OpARM64MOVDconst)
  6254  		v.AuxInt = 1
  6255  		return true
  6256  	}
  6257  	// match: (GreaterEqual (InvertFlags x))
  6258  	// cond:
  6259  	// result: (LessEqual x)
  6260  	for {
  6261  		v_0 := v.Args[0]
  6262  		if v_0.Op != OpARM64InvertFlags {
  6263  			break
  6264  		}
  6265  		x := v_0.Args[0]
  6266  		v.reset(OpARM64LessEqual)
  6267  		v.AddArg(x)
  6268  		return true
  6269  	}
  6270  	return false
  6271  }
  6272  func rewriteValueARM64_OpARM64GreaterEqualU_0(v *Value) bool {
  6273  	// match: (GreaterEqualU (FlagEQ))
  6274  	// cond:
  6275  	// result: (MOVDconst [1])
  6276  	for {
  6277  		v_0 := v.Args[0]
  6278  		if v_0.Op != OpARM64FlagEQ {
  6279  			break
  6280  		}
  6281  		v.reset(OpARM64MOVDconst)
  6282  		v.AuxInt = 1
  6283  		return true
  6284  	}
  6285  	// match: (GreaterEqualU (FlagLT_ULT))
  6286  	// cond:
  6287  	// result: (MOVDconst [0])
  6288  	for {
  6289  		v_0 := v.Args[0]
  6290  		if v_0.Op != OpARM64FlagLT_ULT {
  6291  			break
  6292  		}
  6293  		v.reset(OpARM64MOVDconst)
  6294  		v.AuxInt = 0
  6295  		return true
  6296  	}
  6297  	// match: (GreaterEqualU (FlagLT_UGT))
  6298  	// cond:
  6299  	// result: (MOVDconst [1])
  6300  	for {
  6301  		v_0 := v.Args[0]
  6302  		if v_0.Op != OpARM64FlagLT_UGT {
  6303  			break
  6304  		}
  6305  		v.reset(OpARM64MOVDconst)
  6306  		v.AuxInt = 1
  6307  		return true
  6308  	}
  6309  	// match: (GreaterEqualU (FlagGT_ULT))
  6310  	// cond:
  6311  	// result: (MOVDconst [0])
  6312  	for {
  6313  		v_0 := v.Args[0]
  6314  		if v_0.Op != OpARM64FlagGT_ULT {
  6315  			break
  6316  		}
  6317  		v.reset(OpARM64MOVDconst)
  6318  		v.AuxInt = 0
  6319  		return true
  6320  	}
  6321  	// match: (GreaterEqualU (FlagGT_UGT))
  6322  	// cond:
  6323  	// result: (MOVDconst [1])
  6324  	for {
  6325  		v_0 := v.Args[0]
  6326  		if v_0.Op != OpARM64FlagGT_UGT {
  6327  			break
  6328  		}
  6329  		v.reset(OpARM64MOVDconst)
  6330  		v.AuxInt = 1
  6331  		return true
  6332  	}
  6333  	// match: (GreaterEqualU (InvertFlags x))
  6334  	// cond:
  6335  	// result: (LessEqualU x)
  6336  	for {
  6337  		v_0 := v.Args[0]
  6338  		if v_0.Op != OpARM64InvertFlags {
  6339  			break
  6340  		}
  6341  		x := v_0.Args[0]
  6342  		v.reset(OpARM64LessEqualU)
  6343  		v.AddArg(x)
  6344  		return true
  6345  	}
  6346  	return false
  6347  }
  6348  func rewriteValueARM64_OpARM64GreaterThan_0(v *Value) bool {
  6349  	// match: (GreaterThan (FlagEQ))
  6350  	// cond:
  6351  	// result: (MOVDconst [0])
  6352  	for {
  6353  		v_0 := v.Args[0]
  6354  		if v_0.Op != OpARM64FlagEQ {
  6355  			break
  6356  		}
  6357  		v.reset(OpARM64MOVDconst)
  6358  		v.AuxInt = 0
  6359  		return true
  6360  	}
  6361  	// match: (GreaterThan (FlagLT_ULT))
  6362  	// cond:
  6363  	// result: (MOVDconst [0])
  6364  	for {
  6365  		v_0 := v.Args[0]
  6366  		if v_0.Op != OpARM64FlagLT_ULT {
  6367  			break
  6368  		}
  6369  		v.reset(OpARM64MOVDconst)
  6370  		v.AuxInt = 0
  6371  		return true
  6372  	}
  6373  	// match: (GreaterThan (FlagLT_UGT))
  6374  	// cond:
  6375  	// result: (MOVDconst [0])
  6376  	for {
  6377  		v_0 := v.Args[0]
  6378  		if v_0.Op != OpARM64FlagLT_UGT {
  6379  			break
  6380  		}
  6381  		v.reset(OpARM64MOVDconst)
  6382  		v.AuxInt = 0
  6383  		return true
  6384  	}
  6385  	// match: (GreaterThan (FlagGT_ULT))
  6386  	// cond:
  6387  	// result: (MOVDconst [1])
  6388  	for {
  6389  		v_0 := v.Args[0]
  6390  		if v_0.Op != OpARM64FlagGT_ULT {
  6391  			break
  6392  		}
  6393  		v.reset(OpARM64MOVDconst)
  6394  		v.AuxInt = 1
  6395  		return true
  6396  	}
  6397  	// match: (GreaterThan (FlagGT_UGT))
  6398  	// cond:
  6399  	// result: (MOVDconst [1])
  6400  	for {
  6401  		v_0 := v.Args[0]
  6402  		if v_0.Op != OpARM64FlagGT_UGT {
  6403  			break
  6404  		}
  6405  		v.reset(OpARM64MOVDconst)
  6406  		v.AuxInt = 1
  6407  		return true
  6408  	}
  6409  	// match: (GreaterThan (InvertFlags x))
  6410  	// cond:
  6411  	// result: (LessThan x)
  6412  	for {
  6413  		v_0 := v.Args[0]
  6414  		if v_0.Op != OpARM64InvertFlags {
  6415  			break
  6416  		}
  6417  		x := v_0.Args[0]
  6418  		v.reset(OpARM64LessThan)
  6419  		v.AddArg(x)
  6420  		return true
  6421  	}
  6422  	return false
  6423  }
  6424  func rewriteValueARM64_OpARM64GreaterThanU_0(v *Value) bool {
  6425  	// match: (GreaterThanU (FlagEQ))
  6426  	// cond:
  6427  	// result: (MOVDconst [0])
  6428  	for {
  6429  		v_0 := v.Args[0]
  6430  		if v_0.Op != OpARM64FlagEQ {
  6431  			break
  6432  		}
  6433  		v.reset(OpARM64MOVDconst)
  6434  		v.AuxInt = 0
  6435  		return true
  6436  	}
  6437  	// match: (GreaterThanU (FlagLT_ULT))
  6438  	// cond:
  6439  	// result: (MOVDconst [0])
  6440  	for {
  6441  		v_0 := v.Args[0]
  6442  		if v_0.Op != OpARM64FlagLT_ULT {
  6443  			break
  6444  		}
  6445  		v.reset(OpARM64MOVDconst)
  6446  		v.AuxInt = 0
  6447  		return true
  6448  	}
  6449  	// match: (GreaterThanU (FlagLT_UGT))
  6450  	// cond:
  6451  	// result: (MOVDconst [1])
  6452  	for {
  6453  		v_0 := v.Args[0]
  6454  		if v_0.Op != OpARM64FlagLT_UGT {
  6455  			break
  6456  		}
  6457  		v.reset(OpARM64MOVDconst)
  6458  		v.AuxInt = 1
  6459  		return true
  6460  	}
  6461  	// match: (GreaterThanU (FlagGT_ULT))
  6462  	// cond:
  6463  	// result: (MOVDconst [0])
  6464  	for {
  6465  		v_0 := v.Args[0]
  6466  		if v_0.Op != OpARM64FlagGT_ULT {
  6467  			break
  6468  		}
  6469  		v.reset(OpARM64MOVDconst)
  6470  		v.AuxInt = 0
  6471  		return true
  6472  	}
  6473  	// match: (GreaterThanU (FlagGT_UGT))
  6474  	// cond:
  6475  	// result: (MOVDconst [1])
  6476  	for {
  6477  		v_0 := v.Args[0]
  6478  		if v_0.Op != OpARM64FlagGT_UGT {
  6479  			break
  6480  		}
  6481  		v.reset(OpARM64MOVDconst)
  6482  		v.AuxInt = 1
  6483  		return true
  6484  	}
  6485  	// match: (GreaterThanU (InvertFlags x))
  6486  	// cond:
  6487  	// result: (LessThanU x)
  6488  	for {
  6489  		v_0 := v.Args[0]
  6490  		if v_0.Op != OpARM64InvertFlags {
  6491  			break
  6492  		}
  6493  		x := v_0.Args[0]
  6494  		v.reset(OpARM64LessThanU)
  6495  		v.AddArg(x)
  6496  		return true
  6497  	}
  6498  	return false
  6499  }
  6500  func rewriteValueARM64_OpARM64LessEqual_0(v *Value) bool {
  6501  	// match: (LessEqual (FlagEQ))
  6502  	// cond:
  6503  	// result: (MOVDconst [1])
  6504  	for {
  6505  		v_0 := v.Args[0]
  6506  		if v_0.Op != OpARM64FlagEQ {
  6507  			break
  6508  		}
  6509  		v.reset(OpARM64MOVDconst)
  6510  		v.AuxInt = 1
  6511  		return true
  6512  	}
  6513  	// match: (LessEqual (FlagLT_ULT))
  6514  	// cond:
  6515  	// result: (MOVDconst [1])
  6516  	for {
  6517  		v_0 := v.Args[0]
  6518  		if v_0.Op != OpARM64FlagLT_ULT {
  6519  			break
  6520  		}
  6521  		v.reset(OpARM64MOVDconst)
  6522  		v.AuxInt = 1
  6523  		return true
  6524  	}
  6525  	// match: (LessEqual (FlagLT_UGT))
  6526  	// cond:
  6527  	// result: (MOVDconst [1])
  6528  	for {
  6529  		v_0 := v.Args[0]
  6530  		if v_0.Op != OpARM64FlagLT_UGT {
  6531  			break
  6532  		}
  6533  		v.reset(OpARM64MOVDconst)
  6534  		v.AuxInt = 1
  6535  		return true
  6536  	}
  6537  	// match: (LessEqual (FlagGT_ULT))
  6538  	// cond:
  6539  	// result: (MOVDconst [0])
  6540  	for {
  6541  		v_0 := v.Args[0]
  6542  		if v_0.Op != OpARM64FlagGT_ULT {
  6543  			break
  6544  		}
  6545  		v.reset(OpARM64MOVDconst)
  6546  		v.AuxInt = 0
  6547  		return true
  6548  	}
  6549  	// match: (LessEqual (FlagGT_UGT))
  6550  	// cond:
  6551  	// result: (MOVDconst [0])
  6552  	for {
  6553  		v_0 := v.Args[0]
  6554  		if v_0.Op != OpARM64FlagGT_UGT {
  6555  			break
  6556  		}
  6557  		v.reset(OpARM64MOVDconst)
  6558  		v.AuxInt = 0
  6559  		return true
  6560  	}
  6561  	// match: (LessEqual (InvertFlags x))
  6562  	// cond:
  6563  	// result: (GreaterEqual x)
  6564  	for {
  6565  		v_0 := v.Args[0]
  6566  		if v_0.Op != OpARM64InvertFlags {
  6567  			break
  6568  		}
  6569  		x := v_0.Args[0]
  6570  		v.reset(OpARM64GreaterEqual)
  6571  		v.AddArg(x)
  6572  		return true
  6573  	}
  6574  	return false
  6575  }
  6576  func rewriteValueARM64_OpARM64LessEqualU_0(v *Value) bool {
  6577  	// match: (LessEqualU (FlagEQ))
  6578  	// cond:
  6579  	// result: (MOVDconst [1])
  6580  	for {
  6581  		v_0 := v.Args[0]
  6582  		if v_0.Op != OpARM64FlagEQ {
  6583  			break
  6584  		}
  6585  		v.reset(OpARM64MOVDconst)
  6586  		v.AuxInt = 1
  6587  		return true
  6588  	}
  6589  	// match: (LessEqualU (FlagLT_ULT))
  6590  	// cond:
  6591  	// result: (MOVDconst [1])
  6592  	for {
  6593  		v_0 := v.Args[0]
  6594  		if v_0.Op != OpARM64FlagLT_ULT {
  6595  			break
  6596  		}
  6597  		v.reset(OpARM64MOVDconst)
  6598  		v.AuxInt = 1
  6599  		return true
  6600  	}
  6601  	// match: (LessEqualU (FlagLT_UGT))
  6602  	// cond:
  6603  	// result: (MOVDconst [0])
  6604  	for {
  6605  		v_0 := v.Args[0]
  6606  		if v_0.Op != OpARM64FlagLT_UGT {
  6607  			break
  6608  		}
  6609  		v.reset(OpARM64MOVDconst)
  6610  		v.AuxInt = 0
  6611  		return true
  6612  	}
  6613  	// match: (LessEqualU (FlagGT_ULT))
  6614  	// cond:
  6615  	// result: (MOVDconst [1])
  6616  	for {
  6617  		v_0 := v.Args[0]
  6618  		if v_0.Op != OpARM64FlagGT_ULT {
  6619  			break
  6620  		}
  6621  		v.reset(OpARM64MOVDconst)
  6622  		v.AuxInt = 1
  6623  		return true
  6624  	}
  6625  	// match: (LessEqualU (FlagGT_UGT))
  6626  	// cond:
  6627  	// result: (MOVDconst [0])
  6628  	for {
  6629  		v_0 := v.Args[0]
  6630  		if v_0.Op != OpARM64FlagGT_UGT {
  6631  			break
  6632  		}
  6633  		v.reset(OpARM64MOVDconst)
  6634  		v.AuxInt = 0
  6635  		return true
  6636  	}
  6637  	// match: (LessEqualU (InvertFlags x))
  6638  	// cond:
  6639  	// result: (GreaterEqualU x)
  6640  	for {
  6641  		v_0 := v.Args[0]
  6642  		if v_0.Op != OpARM64InvertFlags {
  6643  			break
  6644  		}
  6645  		x := v_0.Args[0]
  6646  		v.reset(OpARM64GreaterEqualU)
  6647  		v.AddArg(x)
  6648  		return true
  6649  	}
  6650  	return false
  6651  }
  6652  func rewriteValueARM64_OpARM64LessThan_0(v *Value) bool {
  6653  	// match: (LessThan (FlagEQ))
  6654  	// cond:
  6655  	// result: (MOVDconst [0])
  6656  	for {
  6657  		v_0 := v.Args[0]
  6658  		if v_0.Op != OpARM64FlagEQ {
  6659  			break
  6660  		}
  6661  		v.reset(OpARM64MOVDconst)
  6662  		v.AuxInt = 0
  6663  		return true
  6664  	}
  6665  	// match: (LessThan (FlagLT_ULT))
  6666  	// cond:
  6667  	// result: (MOVDconst [1])
  6668  	for {
  6669  		v_0 := v.Args[0]
  6670  		if v_0.Op != OpARM64FlagLT_ULT {
  6671  			break
  6672  		}
  6673  		v.reset(OpARM64MOVDconst)
  6674  		v.AuxInt = 1
  6675  		return true
  6676  	}
  6677  	// match: (LessThan (FlagLT_UGT))
  6678  	// cond:
  6679  	// result: (MOVDconst [1])
  6680  	for {
  6681  		v_0 := v.Args[0]
  6682  		if v_0.Op != OpARM64FlagLT_UGT {
  6683  			break
  6684  		}
  6685  		v.reset(OpARM64MOVDconst)
  6686  		v.AuxInt = 1
  6687  		return true
  6688  	}
  6689  	// match: (LessThan (FlagGT_ULT))
  6690  	// cond:
  6691  	// result: (MOVDconst [0])
  6692  	for {
  6693  		v_0 := v.Args[0]
  6694  		if v_0.Op != OpARM64FlagGT_ULT {
  6695  			break
  6696  		}
  6697  		v.reset(OpARM64MOVDconst)
  6698  		v.AuxInt = 0
  6699  		return true
  6700  	}
  6701  	// match: (LessThan (FlagGT_UGT))
  6702  	// cond:
  6703  	// result: (MOVDconst [0])
  6704  	for {
  6705  		v_0 := v.Args[0]
  6706  		if v_0.Op != OpARM64FlagGT_UGT {
  6707  			break
  6708  		}
  6709  		v.reset(OpARM64MOVDconst)
  6710  		v.AuxInt = 0
  6711  		return true
  6712  	}
  6713  	// match: (LessThan (InvertFlags x))
  6714  	// cond:
  6715  	// result: (GreaterThan x)
  6716  	for {
  6717  		v_0 := v.Args[0]
  6718  		if v_0.Op != OpARM64InvertFlags {
  6719  			break
  6720  		}
  6721  		x := v_0.Args[0]
  6722  		v.reset(OpARM64GreaterThan)
  6723  		v.AddArg(x)
  6724  		return true
  6725  	}
  6726  	return false
  6727  }
  6728  func rewriteValueARM64_OpARM64LessThanU_0(v *Value) bool {
  6729  	// match: (LessThanU (FlagEQ))
  6730  	// cond:
  6731  	// result: (MOVDconst [0])
  6732  	for {
  6733  		v_0 := v.Args[0]
  6734  		if v_0.Op != OpARM64FlagEQ {
  6735  			break
  6736  		}
  6737  		v.reset(OpARM64MOVDconst)
  6738  		v.AuxInt = 0
  6739  		return true
  6740  	}
  6741  	// match: (LessThanU (FlagLT_ULT))
  6742  	// cond:
  6743  	// result: (MOVDconst [1])
  6744  	for {
  6745  		v_0 := v.Args[0]
  6746  		if v_0.Op != OpARM64FlagLT_ULT {
  6747  			break
  6748  		}
  6749  		v.reset(OpARM64MOVDconst)
  6750  		v.AuxInt = 1
  6751  		return true
  6752  	}
  6753  	// match: (LessThanU (FlagLT_UGT))
  6754  	// cond:
  6755  	// result: (MOVDconst [0])
  6756  	for {
  6757  		v_0 := v.Args[0]
  6758  		if v_0.Op != OpARM64FlagLT_UGT {
  6759  			break
  6760  		}
  6761  		v.reset(OpARM64MOVDconst)
  6762  		v.AuxInt = 0
  6763  		return true
  6764  	}
  6765  	// match: (LessThanU (FlagGT_ULT))
  6766  	// cond:
  6767  	// result: (MOVDconst [1])
  6768  	for {
  6769  		v_0 := v.Args[0]
  6770  		if v_0.Op != OpARM64FlagGT_ULT {
  6771  			break
  6772  		}
  6773  		v.reset(OpARM64MOVDconst)
  6774  		v.AuxInt = 1
  6775  		return true
  6776  	}
  6777  	// match: (LessThanU (FlagGT_UGT))
  6778  	// cond:
  6779  	// result: (MOVDconst [0])
  6780  	for {
  6781  		v_0 := v.Args[0]
  6782  		if v_0.Op != OpARM64FlagGT_UGT {
  6783  			break
  6784  		}
  6785  		v.reset(OpARM64MOVDconst)
  6786  		v.AuxInt = 0
  6787  		return true
  6788  	}
  6789  	// match: (LessThanU (InvertFlags x))
  6790  	// cond:
  6791  	// result: (GreaterThanU x)
  6792  	for {
  6793  		v_0 := v.Args[0]
  6794  		if v_0.Op != OpARM64InvertFlags {
  6795  			break
  6796  		}
  6797  		x := v_0.Args[0]
  6798  		v.reset(OpARM64GreaterThanU)
  6799  		v.AddArg(x)
  6800  		return true
  6801  	}
  6802  	return false
  6803  }
  6804  func rewriteValueARM64_OpARM64MADD_0(v *Value) bool {
  6805  	b := v.Block
  6806  	_ = b
  6807  	// match: (MADD a x (MOVDconst [-1]))
  6808  	// cond:
  6809  	// result: (SUB a x)
  6810  	for {
  6811  		_ = v.Args[2]
  6812  		a := v.Args[0]
  6813  		x := v.Args[1]
  6814  		v_2 := v.Args[2]
  6815  		if v_2.Op != OpARM64MOVDconst {
  6816  			break
  6817  		}
  6818  		if v_2.AuxInt != -1 {
  6819  			break
  6820  		}
  6821  		v.reset(OpARM64SUB)
  6822  		v.AddArg(a)
  6823  		v.AddArg(x)
  6824  		return true
  6825  	}
  6826  	// match: (MADD a _ (MOVDconst [0]))
  6827  	// cond:
  6828  	// result: a
  6829  	for {
  6830  		_ = v.Args[2]
  6831  		a := v.Args[0]
  6832  		v_2 := v.Args[2]
  6833  		if v_2.Op != OpARM64MOVDconst {
  6834  			break
  6835  		}
  6836  		if v_2.AuxInt != 0 {
  6837  			break
  6838  		}
  6839  		v.reset(OpCopy)
  6840  		v.Type = a.Type
  6841  		v.AddArg(a)
  6842  		return true
  6843  	}
  6844  	// match: (MADD a x (MOVDconst [1]))
  6845  	// cond:
  6846  	// result: (ADD a x)
  6847  	for {
  6848  		_ = v.Args[2]
  6849  		a := v.Args[0]
  6850  		x := v.Args[1]
  6851  		v_2 := v.Args[2]
  6852  		if v_2.Op != OpARM64MOVDconst {
  6853  			break
  6854  		}
  6855  		if v_2.AuxInt != 1 {
  6856  			break
  6857  		}
  6858  		v.reset(OpARM64ADD)
  6859  		v.AddArg(a)
  6860  		v.AddArg(x)
  6861  		return true
  6862  	}
  6863  	// match: (MADD a x (MOVDconst [c]))
  6864  	// cond: isPowerOfTwo(c)
  6865  	// result: (ADDshiftLL a x [log2(c)])
  6866  	for {
  6867  		_ = v.Args[2]
  6868  		a := v.Args[0]
  6869  		x := v.Args[1]
  6870  		v_2 := v.Args[2]
  6871  		if v_2.Op != OpARM64MOVDconst {
  6872  			break
  6873  		}
  6874  		c := v_2.AuxInt
  6875  		if !(isPowerOfTwo(c)) {
  6876  			break
  6877  		}
  6878  		v.reset(OpARM64ADDshiftLL)
  6879  		v.AuxInt = log2(c)
  6880  		v.AddArg(a)
  6881  		v.AddArg(x)
  6882  		return true
  6883  	}
  6884  	// match: (MADD a x (MOVDconst [c]))
  6885  	// cond: isPowerOfTwo(c-1) && c>=3
  6886  	// result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)]))
  6887  	for {
  6888  		_ = v.Args[2]
  6889  		a := v.Args[0]
  6890  		x := v.Args[1]
  6891  		v_2 := v.Args[2]
  6892  		if v_2.Op != OpARM64MOVDconst {
  6893  			break
  6894  		}
  6895  		c := v_2.AuxInt
  6896  		if !(isPowerOfTwo(c-1) && c >= 3) {
  6897  			break
  6898  		}
  6899  		v.reset(OpARM64ADD)
  6900  		v.AddArg(a)
  6901  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6902  		v0.AuxInt = log2(c - 1)
  6903  		v0.AddArg(x)
  6904  		v0.AddArg(x)
  6905  		v.AddArg(v0)
  6906  		return true
  6907  	}
  6908  	// match: (MADD a x (MOVDconst [c]))
  6909  	// cond: isPowerOfTwo(c+1) && c>=7
  6910  	// result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)]))
  6911  	for {
  6912  		_ = v.Args[2]
  6913  		a := v.Args[0]
  6914  		x := v.Args[1]
  6915  		v_2 := v.Args[2]
  6916  		if v_2.Op != OpARM64MOVDconst {
  6917  			break
  6918  		}
  6919  		c := v_2.AuxInt
  6920  		if !(isPowerOfTwo(c+1) && c >= 7) {
  6921  			break
  6922  		}
  6923  		v.reset(OpARM64SUB)
  6924  		v.AddArg(a)
  6925  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6926  		v0.AuxInt = log2(c + 1)
  6927  		v0.AddArg(x)
  6928  		v0.AddArg(x)
  6929  		v.AddArg(v0)
  6930  		return true
  6931  	}
  6932  	// match: (MADD a x (MOVDconst [c]))
  6933  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  6934  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)])
  6935  	for {
  6936  		_ = v.Args[2]
  6937  		a := v.Args[0]
  6938  		x := v.Args[1]
  6939  		v_2 := v.Args[2]
  6940  		if v_2.Op != OpARM64MOVDconst {
  6941  			break
  6942  		}
  6943  		c := v_2.AuxInt
  6944  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  6945  			break
  6946  		}
  6947  		v.reset(OpARM64SUBshiftLL)
  6948  		v.AuxInt = log2(c / 3)
  6949  		v.AddArg(a)
  6950  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  6951  		v0.AuxInt = 2
  6952  		v0.AddArg(x)
  6953  		v0.AddArg(x)
  6954  		v.AddArg(v0)
  6955  		return true
  6956  	}
  6957  	// match: (MADD a x (MOVDconst [c]))
  6958  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  6959  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)])
  6960  	for {
  6961  		_ = v.Args[2]
  6962  		a := v.Args[0]
  6963  		x := v.Args[1]
  6964  		v_2 := v.Args[2]
  6965  		if v_2.Op != OpARM64MOVDconst {
  6966  			break
  6967  		}
  6968  		c := v_2.AuxInt
  6969  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  6970  			break
  6971  		}
  6972  		v.reset(OpARM64ADDshiftLL)
  6973  		v.AuxInt = log2(c / 5)
  6974  		v.AddArg(a)
  6975  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  6976  		v0.AuxInt = 2
  6977  		v0.AddArg(x)
  6978  		v0.AddArg(x)
  6979  		v.AddArg(v0)
  6980  		return true
  6981  	}
  6982  	// match: (MADD a x (MOVDconst [c]))
  6983  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  6984  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)])
  6985  	for {
  6986  		_ = v.Args[2]
  6987  		a := v.Args[0]
  6988  		x := v.Args[1]
  6989  		v_2 := v.Args[2]
  6990  		if v_2.Op != OpARM64MOVDconst {
  6991  			break
  6992  		}
  6993  		c := v_2.AuxInt
  6994  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  6995  			break
  6996  		}
  6997  		v.reset(OpARM64SUBshiftLL)
  6998  		v.AuxInt = log2(c / 7)
  6999  		v.AddArg(a)
  7000  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7001  		v0.AuxInt = 3
  7002  		v0.AddArg(x)
  7003  		v0.AddArg(x)
  7004  		v.AddArg(v0)
  7005  		return true
  7006  	}
  7007  	// match: (MADD a x (MOVDconst [c]))
  7008  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  7009  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)])
  7010  	for {
  7011  		_ = v.Args[2]
  7012  		a := v.Args[0]
  7013  		x := v.Args[1]
  7014  		v_2 := v.Args[2]
  7015  		if v_2.Op != OpARM64MOVDconst {
  7016  			break
  7017  		}
  7018  		c := v_2.AuxInt
  7019  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  7020  			break
  7021  		}
  7022  		v.reset(OpARM64ADDshiftLL)
  7023  		v.AuxInt = log2(c / 9)
  7024  		v.AddArg(a)
  7025  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7026  		v0.AuxInt = 3
  7027  		v0.AddArg(x)
  7028  		v0.AddArg(x)
  7029  		v.AddArg(v0)
  7030  		return true
  7031  	}
  7032  	return false
  7033  }
  7034  func rewriteValueARM64_OpARM64MADD_10(v *Value) bool {
  7035  	b := v.Block
  7036  	_ = b
  7037  	// match: (MADD a (MOVDconst [-1]) x)
  7038  	// cond:
  7039  	// result: (SUB a x)
  7040  	for {
  7041  		_ = v.Args[2]
  7042  		a := v.Args[0]
  7043  		v_1 := v.Args[1]
  7044  		if v_1.Op != OpARM64MOVDconst {
  7045  			break
  7046  		}
  7047  		if v_1.AuxInt != -1 {
  7048  			break
  7049  		}
  7050  		x := v.Args[2]
  7051  		v.reset(OpARM64SUB)
  7052  		v.AddArg(a)
  7053  		v.AddArg(x)
  7054  		return true
  7055  	}
  7056  	// match: (MADD a (MOVDconst [0]) _)
  7057  	// cond:
  7058  	// result: a
  7059  	for {
  7060  		_ = v.Args[2]
  7061  		a := v.Args[0]
  7062  		v_1 := v.Args[1]
  7063  		if v_1.Op != OpARM64MOVDconst {
  7064  			break
  7065  		}
  7066  		if v_1.AuxInt != 0 {
  7067  			break
  7068  		}
  7069  		v.reset(OpCopy)
  7070  		v.Type = a.Type
  7071  		v.AddArg(a)
  7072  		return true
  7073  	}
  7074  	// match: (MADD a (MOVDconst [1]) x)
  7075  	// cond:
  7076  	// result: (ADD a x)
  7077  	for {
  7078  		_ = v.Args[2]
  7079  		a := v.Args[0]
  7080  		v_1 := v.Args[1]
  7081  		if v_1.Op != OpARM64MOVDconst {
  7082  			break
  7083  		}
  7084  		if v_1.AuxInt != 1 {
  7085  			break
  7086  		}
  7087  		x := v.Args[2]
  7088  		v.reset(OpARM64ADD)
  7089  		v.AddArg(a)
  7090  		v.AddArg(x)
  7091  		return true
  7092  	}
  7093  	// match: (MADD a (MOVDconst [c]) x)
  7094  	// cond: isPowerOfTwo(c)
  7095  	// result: (ADDshiftLL a x [log2(c)])
  7096  	for {
  7097  		_ = v.Args[2]
  7098  		a := v.Args[0]
  7099  		v_1 := v.Args[1]
  7100  		if v_1.Op != OpARM64MOVDconst {
  7101  			break
  7102  		}
  7103  		c := v_1.AuxInt
  7104  		x := v.Args[2]
  7105  		if !(isPowerOfTwo(c)) {
  7106  			break
  7107  		}
  7108  		v.reset(OpARM64ADDshiftLL)
  7109  		v.AuxInt = log2(c)
  7110  		v.AddArg(a)
  7111  		v.AddArg(x)
  7112  		return true
  7113  	}
  7114  	// match: (MADD a (MOVDconst [c]) x)
  7115  	// cond: isPowerOfTwo(c-1) && c>=3
  7116  	// result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)]))
  7117  	for {
  7118  		_ = v.Args[2]
  7119  		a := v.Args[0]
  7120  		v_1 := v.Args[1]
  7121  		if v_1.Op != OpARM64MOVDconst {
  7122  			break
  7123  		}
  7124  		c := v_1.AuxInt
  7125  		x := v.Args[2]
  7126  		if !(isPowerOfTwo(c-1) && c >= 3) {
  7127  			break
  7128  		}
  7129  		v.reset(OpARM64ADD)
  7130  		v.AddArg(a)
  7131  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7132  		v0.AuxInt = log2(c - 1)
  7133  		v0.AddArg(x)
  7134  		v0.AddArg(x)
  7135  		v.AddArg(v0)
  7136  		return true
  7137  	}
  7138  	// match: (MADD a (MOVDconst [c]) x)
  7139  	// cond: isPowerOfTwo(c+1) && c>=7
  7140  	// result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)]))
  7141  	for {
  7142  		_ = v.Args[2]
  7143  		a := v.Args[0]
  7144  		v_1 := v.Args[1]
  7145  		if v_1.Op != OpARM64MOVDconst {
  7146  			break
  7147  		}
  7148  		c := v_1.AuxInt
  7149  		x := v.Args[2]
  7150  		if !(isPowerOfTwo(c+1) && c >= 7) {
  7151  			break
  7152  		}
  7153  		v.reset(OpARM64SUB)
  7154  		v.AddArg(a)
  7155  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7156  		v0.AuxInt = log2(c + 1)
  7157  		v0.AddArg(x)
  7158  		v0.AddArg(x)
  7159  		v.AddArg(v0)
  7160  		return true
  7161  	}
  7162  	// match: (MADD a (MOVDconst [c]) x)
  7163  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  7164  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)])
  7165  	for {
  7166  		_ = v.Args[2]
  7167  		a := v.Args[0]
  7168  		v_1 := v.Args[1]
  7169  		if v_1.Op != OpARM64MOVDconst {
  7170  			break
  7171  		}
  7172  		c := v_1.AuxInt
  7173  		x := v.Args[2]
  7174  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  7175  			break
  7176  		}
  7177  		v.reset(OpARM64SUBshiftLL)
  7178  		v.AuxInt = log2(c / 3)
  7179  		v.AddArg(a)
  7180  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7181  		v0.AuxInt = 2
  7182  		v0.AddArg(x)
  7183  		v0.AddArg(x)
  7184  		v.AddArg(v0)
  7185  		return true
  7186  	}
  7187  	// match: (MADD a (MOVDconst [c]) x)
  7188  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  7189  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)])
  7190  	for {
  7191  		_ = v.Args[2]
  7192  		a := v.Args[0]
  7193  		v_1 := v.Args[1]
  7194  		if v_1.Op != OpARM64MOVDconst {
  7195  			break
  7196  		}
  7197  		c := v_1.AuxInt
  7198  		x := v.Args[2]
  7199  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  7200  			break
  7201  		}
  7202  		v.reset(OpARM64ADDshiftLL)
  7203  		v.AuxInt = log2(c / 5)
  7204  		v.AddArg(a)
  7205  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7206  		v0.AuxInt = 2
  7207  		v0.AddArg(x)
  7208  		v0.AddArg(x)
  7209  		v.AddArg(v0)
  7210  		return true
  7211  	}
  7212  	// match: (MADD a (MOVDconst [c]) x)
  7213  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  7214  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)])
  7215  	for {
  7216  		_ = v.Args[2]
  7217  		a := v.Args[0]
  7218  		v_1 := v.Args[1]
  7219  		if v_1.Op != OpARM64MOVDconst {
  7220  			break
  7221  		}
  7222  		c := v_1.AuxInt
  7223  		x := v.Args[2]
  7224  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  7225  			break
  7226  		}
  7227  		v.reset(OpARM64SUBshiftLL)
  7228  		v.AuxInt = log2(c / 7)
  7229  		v.AddArg(a)
  7230  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7231  		v0.AuxInt = 3
  7232  		v0.AddArg(x)
  7233  		v0.AddArg(x)
  7234  		v.AddArg(v0)
  7235  		return true
  7236  	}
  7237  	// match: (MADD a (MOVDconst [c]) x)
  7238  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  7239  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)])
  7240  	for {
  7241  		_ = v.Args[2]
  7242  		a := v.Args[0]
  7243  		v_1 := v.Args[1]
  7244  		if v_1.Op != OpARM64MOVDconst {
  7245  			break
  7246  		}
  7247  		c := v_1.AuxInt
  7248  		x := v.Args[2]
  7249  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  7250  			break
  7251  		}
  7252  		v.reset(OpARM64ADDshiftLL)
  7253  		v.AuxInt = log2(c / 9)
  7254  		v.AddArg(a)
  7255  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7256  		v0.AuxInt = 3
  7257  		v0.AddArg(x)
  7258  		v0.AddArg(x)
  7259  		v.AddArg(v0)
  7260  		return true
  7261  	}
  7262  	return false
  7263  }
  7264  func rewriteValueARM64_OpARM64MADD_20(v *Value) bool {
  7265  	b := v.Block
  7266  	_ = b
  7267  	// match: (MADD (MOVDconst [c]) x y)
  7268  	// cond:
  7269  	// result: (ADDconst [c] (MUL <x.Type> x y))
  7270  	for {
  7271  		_ = v.Args[2]
  7272  		v_0 := v.Args[0]
  7273  		if v_0.Op != OpARM64MOVDconst {
  7274  			break
  7275  		}
  7276  		c := v_0.AuxInt
  7277  		x := v.Args[1]
  7278  		y := v.Args[2]
  7279  		v.reset(OpARM64ADDconst)
  7280  		v.AuxInt = c
  7281  		v0 := b.NewValue0(v.Pos, OpARM64MUL, x.Type)
  7282  		v0.AddArg(x)
  7283  		v0.AddArg(y)
  7284  		v.AddArg(v0)
  7285  		return true
  7286  	}
  7287  	// match: (MADD a (MOVDconst [c]) (MOVDconst [d]))
  7288  	// cond:
  7289  	// result: (ADDconst [c*d] a)
  7290  	for {
  7291  		_ = v.Args[2]
  7292  		a := v.Args[0]
  7293  		v_1 := v.Args[1]
  7294  		if v_1.Op != OpARM64MOVDconst {
  7295  			break
  7296  		}
  7297  		c := v_1.AuxInt
  7298  		v_2 := v.Args[2]
  7299  		if v_2.Op != OpARM64MOVDconst {
  7300  			break
  7301  		}
  7302  		d := v_2.AuxInt
  7303  		v.reset(OpARM64ADDconst)
  7304  		v.AuxInt = c * d
  7305  		v.AddArg(a)
  7306  		return true
  7307  	}
  7308  	return false
  7309  }
  7310  func rewriteValueARM64_OpARM64MADDW_0(v *Value) bool {
  7311  	b := v.Block
  7312  	_ = b
  7313  	// match: (MADDW a x (MOVDconst [c]))
  7314  	// cond: int32(c)==-1
  7315  	// result: (SUB a x)
  7316  	for {
  7317  		_ = v.Args[2]
  7318  		a := v.Args[0]
  7319  		x := v.Args[1]
  7320  		v_2 := v.Args[2]
  7321  		if v_2.Op != OpARM64MOVDconst {
  7322  			break
  7323  		}
  7324  		c := v_2.AuxInt
  7325  		if !(int32(c) == -1) {
  7326  			break
  7327  		}
  7328  		v.reset(OpARM64SUB)
  7329  		v.AddArg(a)
  7330  		v.AddArg(x)
  7331  		return true
  7332  	}
  7333  	// match: (MADDW a _ (MOVDconst [c]))
  7334  	// cond: int32(c)==0
  7335  	// result: a
  7336  	for {
  7337  		_ = v.Args[2]
  7338  		a := v.Args[0]
  7339  		v_2 := v.Args[2]
  7340  		if v_2.Op != OpARM64MOVDconst {
  7341  			break
  7342  		}
  7343  		c := v_2.AuxInt
  7344  		if !(int32(c) == 0) {
  7345  			break
  7346  		}
  7347  		v.reset(OpCopy)
  7348  		v.Type = a.Type
  7349  		v.AddArg(a)
  7350  		return true
  7351  	}
  7352  	// match: (MADDW a x (MOVDconst [c]))
  7353  	// cond: int32(c)==1
  7354  	// result: (ADD a x)
  7355  	for {
  7356  		_ = v.Args[2]
  7357  		a := v.Args[0]
  7358  		x := v.Args[1]
  7359  		v_2 := v.Args[2]
  7360  		if v_2.Op != OpARM64MOVDconst {
  7361  			break
  7362  		}
  7363  		c := v_2.AuxInt
  7364  		if !(int32(c) == 1) {
  7365  			break
  7366  		}
  7367  		v.reset(OpARM64ADD)
  7368  		v.AddArg(a)
  7369  		v.AddArg(x)
  7370  		return true
  7371  	}
  7372  	// match: (MADDW a x (MOVDconst [c]))
  7373  	// cond: isPowerOfTwo(c)
  7374  	// result: (ADDshiftLL a x [log2(c)])
  7375  	for {
  7376  		_ = v.Args[2]
  7377  		a := v.Args[0]
  7378  		x := v.Args[1]
  7379  		v_2 := v.Args[2]
  7380  		if v_2.Op != OpARM64MOVDconst {
  7381  			break
  7382  		}
  7383  		c := v_2.AuxInt
  7384  		if !(isPowerOfTwo(c)) {
  7385  			break
  7386  		}
  7387  		v.reset(OpARM64ADDshiftLL)
  7388  		v.AuxInt = log2(c)
  7389  		v.AddArg(a)
  7390  		v.AddArg(x)
  7391  		return true
  7392  	}
  7393  	// match: (MADDW a x (MOVDconst [c]))
  7394  	// cond: isPowerOfTwo(c-1) && int32(c)>=3
  7395  	// result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)]))
  7396  	for {
  7397  		_ = v.Args[2]
  7398  		a := v.Args[0]
  7399  		x := v.Args[1]
  7400  		v_2 := v.Args[2]
  7401  		if v_2.Op != OpARM64MOVDconst {
  7402  			break
  7403  		}
  7404  		c := v_2.AuxInt
  7405  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  7406  			break
  7407  		}
  7408  		v.reset(OpARM64ADD)
  7409  		v.AddArg(a)
  7410  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7411  		v0.AuxInt = log2(c - 1)
  7412  		v0.AddArg(x)
  7413  		v0.AddArg(x)
  7414  		v.AddArg(v0)
  7415  		return true
  7416  	}
  7417  	// match: (MADDW a x (MOVDconst [c]))
  7418  	// cond: isPowerOfTwo(c+1) && int32(c)>=7
  7419  	// result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)]))
  7420  	for {
  7421  		_ = v.Args[2]
  7422  		a := v.Args[0]
  7423  		x := v.Args[1]
  7424  		v_2 := v.Args[2]
  7425  		if v_2.Op != OpARM64MOVDconst {
  7426  			break
  7427  		}
  7428  		c := v_2.AuxInt
  7429  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  7430  			break
  7431  		}
  7432  		v.reset(OpARM64SUB)
  7433  		v.AddArg(a)
  7434  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7435  		v0.AuxInt = log2(c + 1)
  7436  		v0.AddArg(x)
  7437  		v0.AddArg(x)
  7438  		v.AddArg(v0)
  7439  		return true
  7440  	}
  7441  	// match: (MADDW a x (MOVDconst [c]))
  7442  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  7443  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)])
  7444  	for {
  7445  		_ = v.Args[2]
  7446  		a := v.Args[0]
  7447  		x := v.Args[1]
  7448  		v_2 := v.Args[2]
  7449  		if v_2.Op != OpARM64MOVDconst {
  7450  			break
  7451  		}
  7452  		c := v_2.AuxInt
  7453  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  7454  			break
  7455  		}
  7456  		v.reset(OpARM64SUBshiftLL)
  7457  		v.AuxInt = log2(c / 3)
  7458  		v.AddArg(a)
  7459  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7460  		v0.AuxInt = 2
  7461  		v0.AddArg(x)
  7462  		v0.AddArg(x)
  7463  		v.AddArg(v0)
  7464  		return true
  7465  	}
  7466  	// match: (MADDW a x (MOVDconst [c]))
  7467  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  7468  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)])
  7469  	for {
  7470  		_ = v.Args[2]
  7471  		a := v.Args[0]
  7472  		x := v.Args[1]
  7473  		v_2 := v.Args[2]
  7474  		if v_2.Op != OpARM64MOVDconst {
  7475  			break
  7476  		}
  7477  		c := v_2.AuxInt
  7478  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  7479  			break
  7480  		}
  7481  		v.reset(OpARM64ADDshiftLL)
  7482  		v.AuxInt = log2(c / 5)
  7483  		v.AddArg(a)
  7484  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7485  		v0.AuxInt = 2
  7486  		v0.AddArg(x)
  7487  		v0.AddArg(x)
  7488  		v.AddArg(v0)
  7489  		return true
  7490  	}
  7491  	// match: (MADDW a x (MOVDconst [c]))
  7492  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  7493  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)])
  7494  	for {
  7495  		_ = v.Args[2]
  7496  		a := v.Args[0]
  7497  		x := v.Args[1]
  7498  		v_2 := v.Args[2]
  7499  		if v_2.Op != OpARM64MOVDconst {
  7500  			break
  7501  		}
  7502  		c := v_2.AuxInt
  7503  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  7504  			break
  7505  		}
  7506  		v.reset(OpARM64SUBshiftLL)
  7507  		v.AuxInt = log2(c / 7)
  7508  		v.AddArg(a)
  7509  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7510  		v0.AuxInt = 3
  7511  		v0.AddArg(x)
  7512  		v0.AddArg(x)
  7513  		v.AddArg(v0)
  7514  		return true
  7515  	}
  7516  	// match: (MADDW a x (MOVDconst [c]))
  7517  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  7518  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)])
  7519  	for {
  7520  		_ = v.Args[2]
  7521  		a := v.Args[0]
  7522  		x := v.Args[1]
  7523  		v_2 := v.Args[2]
  7524  		if v_2.Op != OpARM64MOVDconst {
  7525  			break
  7526  		}
  7527  		c := v_2.AuxInt
  7528  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  7529  			break
  7530  		}
  7531  		v.reset(OpARM64ADDshiftLL)
  7532  		v.AuxInt = log2(c / 9)
  7533  		v.AddArg(a)
  7534  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7535  		v0.AuxInt = 3
  7536  		v0.AddArg(x)
  7537  		v0.AddArg(x)
  7538  		v.AddArg(v0)
  7539  		return true
  7540  	}
  7541  	return false
  7542  }
  7543  func rewriteValueARM64_OpARM64MADDW_10(v *Value) bool {
  7544  	b := v.Block
  7545  	_ = b
  7546  	// match: (MADDW a (MOVDconst [c]) x)
  7547  	// cond: int32(c)==-1
  7548  	// result: (SUB a x)
  7549  	for {
  7550  		_ = v.Args[2]
  7551  		a := v.Args[0]
  7552  		v_1 := v.Args[1]
  7553  		if v_1.Op != OpARM64MOVDconst {
  7554  			break
  7555  		}
  7556  		c := v_1.AuxInt
  7557  		x := v.Args[2]
  7558  		if !(int32(c) == -1) {
  7559  			break
  7560  		}
  7561  		v.reset(OpARM64SUB)
  7562  		v.AddArg(a)
  7563  		v.AddArg(x)
  7564  		return true
  7565  	}
  7566  	// match: (MADDW a (MOVDconst [c]) _)
  7567  	// cond: int32(c)==0
  7568  	// result: a
  7569  	for {
  7570  		_ = v.Args[2]
  7571  		a := v.Args[0]
  7572  		v_1 := v.Args[1]
  7573  		if v_1.Op != OpARM64MOVDconst {
  7574  			break
  7575  		}
  7576  		c := v_1.AuxInt
  7577  		if !(int32(c) == 0) {
  7578  			break
  7579  		}
  7580  		v.reset(OpCopy)
  7581  		v.Type = a.Type
  7582  		v.AddArg(a)
  7583  		return true
  7584  	}
  7585  	// match: (MADDW a (MOVDconst [c]) x)
  7586  	// cond: int32(c)==1
  7587  	// result: (ADD a x)
  7588  	for {
  7589  		_ = v.Args[2]
  7590  		a := v.Args[0]
  7591  		v_1 := v.Args[1]
  7592  		if v_1.Op != OpARM64MOVDconst {
  7593  			break
  7594  		}
  7595  		c := v_1.AuxInt
  7596  		x := v.Args[2]
  7597  		if !(int32(c) == 1) {
  7598  			break
  7599  		}
  7600  		v.reset(OpARM64ADD)
  7601  		v.AddArg(a)
  7602  		v.AddArg(x)
  7603  		return true
  7604  	}
  7605  	// match: (MADDW a (MOVDconst [c]) x)
  7606  	// cond: isPowerOfTwo(c)
  7607  	// result: (ADDshiftLL a x [log2(c)])
  7608  	for {
  7609  		_ = v.Args[2]
  7610  		a := v.Args[0]
  7611  		v_1 := v.Args[1]
  7612  		if v_1.Op != OpARM64MOVDconst {
  7613  			break
  7614  		}
  7615  		c := v_1.AuxInt
  7616  		x := v.Args[2]
  7617  		if !(isPowerOfTwo(c)) {
  7618  			break
  7619  		}
  7620  		v.reset(OpARM64ADDshiftLL)
  7621  		v.AuxInt = log2(c)
  7622  		v.AddArg(a)
  7623  		v.AddArg(x)
  7624  		return true
  7625  	}
  7626  	// match: (MADDW a (MOVDconst [c]) x)
  7627  	// cond: isPowerOfTwo(c-1) && int32(c)>=3
  7628  	// result: (ADD a (ADDshiftLL <x.Type> x x [log2(c-1)]))
  7629  	for {
  7630  		_ = v.Args[2]
  7631  		a := v.Args[0]
  7632  		v_1 := v.Args[1]
  7633  		if v_1.Op != OpARM64MOVDconst {
  7634  			break
  7635  		}
  7636  		c := v_1.AuxInt
  7637  		x := v.Args[2]
  7638  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  7639  			break
  7640  		}
  7641  		v.reset(OpARM64ADD)
  7642  		v.AddArg(a)
  7643  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7644  		v0.AuxInt = log2(c - 1)
  7645  		v0.AddArg(x)
  7646  		v0.AddArg(x)
  7647  		v.AddArg(v0)
  7648  		return true
  7649  	}
  7650  	// match: (MADDW a (MOVDconst [c]) x)
  7651  	// cond: isPowerOfTwo(c+1) && int32(c)>=7
  7652  	// result: (SUB a (SUBshiftLL <x.Type> x x [log2(c+1)]))
  7653  	for {
  7654  		_ = v.Args[2]
  7655  		a := v.Args[0]
  7656  		v_1 := v.Args[1]
  7657  		if v_1.Op != OpARM64MOVDconst {
  7658  			break
  7659  		}
  7660  		c := v_1.AuxInt
  7661  		x := v.Args[2]
  7662  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  7663  			break
  7664  		}
  7665  		v.reset(OpARM64SUB)
  7666  		v.AddArg(a)
  7667  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7668  		v0.AuxInt = log2(c + 1)
  7669  		v0.AddArg(x)
  7670  		v0.AddArg(x)
  7671  		v.AddArg(v0)
  7672  		return true
  7673  	}
  7674  	// match: (MADDW a (MOVDconst [c]) x)
  7675  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  7676  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [2]) [log2(c/3)])
  7677  	for {
  7678  		_ = v.Args[2]
  7679  		a := v.Args[0]
  7680  		v_1 := v.Args[1]
  7681  		if v_1.Op != OpARM64MOVDconst {
  7682  			break
  7683  		}
  7684  		c := v_1.AuxInt
  7685  		x := v.Args[2]
  7686  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  7687  			break
  7688  		}
  7689  		v.reset(OpARM64SUBshiftLL)
  7690  		v.AuxInt = log2(c / 3)
  7691  		v.AddArg(a)
  7692  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7693  		v0.AuxInt = 2
  7694  		v0.AddArg(x)
  7695  		v0.AddArg(x)
  7696  		v.AddArg(v0)
  7697  		return true
  7698  	}
  7699  	// match: (MADDW a (MOVDconst [c]) x)
  7700  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  7701  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [2]) [log2(c/5)])
  7702  	for {
  7703  		_ = v.Args[2]
  7704  		a := v.Args[0]
  7705  		v_1 := v.Args[1]
  7706  		if v_1.Op != OpARM64MOVDconst {
  7707  			break
  7708  		}
  7709  		c := v_1.AuxInt
  7710  		x := v.Args[2]
  7711  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  7712  			break
  7713  		}
  7714  		v.reset(OpARM64ADDshiftLL)
  7715  		v.AuxInt = log2(c / 5)
  7716  		v.AddArg(a)
  7717  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7718  		v0.AuxInt = 2
  7719  		v0.AddArg(x)
  7720  		v0.AddArg(x)
  7721  		v.AddArg(v0)
  7722  		return true
  7723  	}
  7724  	// match: (MADDW a (MOVDconst [c]) x)
  7725  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  7726  	// result: (SUBshiftLL a (SUBshiftLL <x.Type> x x [3]) [log2(c/7)])
  7727  	for {
  7728  		_ = v.Args[2]
  7729  		a := v.Args[0]
  7730  		v_1 := v.Args[1]
  7731  		if v_1.Op != OpARM64MOVDconst {
  7732  			break
  7733  		}
  7734  		c := v_1.AuxInt
  7735  		x := v.Args[2]
  7736  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  7737  			break
  7738  		}
  7739  		v.reset(OpARM64SUBshiftLL)
  7740  		v.AuxInt = log2(c / 7)
  7741  		v.AddArg(a)
  7742  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  7743  		v0.AuxInt = 3
  7744  		v0.AddArg(x)
  7745  		v0.AddArg(x)
  7746  		v.AddArg(v0)
  7747  		return true
  7748  	}
  7749  	// match: (MADDW a (MOVDconst [c]) x)
  7750  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  7751  	// result: (ADDshiftLL a (ADDshiftLL <x.Type> x x [3]) [log2(c/9)])
  7752  	for {
  7753  		_ = v.Args[2]
  7754  		a := v.Args[0]
  7755  		v_1 := v.Args[1]
  7756  		if v_1.Op != OpARM64MOVDconst {
  7757  			break
  7758  		}
  7759  		c := v_1.AuxInt
  7760  		x := v.Args[2]
  7761  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  7762  			break
  7763  		}
  7764  		v.reset(OpARM64ADDshiftLL)
  7765  		v.AuxInt = log2(c / 9)
  7766  		v.AddArg(a)
  7767  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7768  		v0.AuxInt = 3
  7769  		v0.AddArg(x)
  7770  		v0.AddArg(x)
  7771  		v.AddArg(v0)
  7772  		return true
  7773  	}
  7774  	return false
  7775  }
  7776  func rewriteValueARM64_OpARM64MADDW_20(v *Value) bool {
  7777  	b := v.Block
  7778  	_ = b
  7779  	// match: (MADDW (MOVDconst [c]) x y)
  7780  	// cond:
  7781  	// result: (ADDconst [c] (MULW <x.Type> x y))
  7782  	for {
  7783  		_ = v.Args[2]
  7784  		v_0 := v.Args[0]
  7785  		if v_0.Op != OpARM64MOVDconst {
  7786  			break
  7787  		}
  7788  		c := v_0.AuxInt
  7789  		x := v.Args[1]
  7790  		y := v.Args[2]
  7791  		v.reset(OpARM64ADDconst)
  7792  		v.AuxInt = c
  7793  		v0 := b.NewValue0(v.Pos, OpARM64MULW, x.Type)
  7794  		v0.AddArg(x)
  7795  		v0.AddArg(y)
  7796  		v.AddArg(v0)
  7797  		return true
  7798  	}
  7799  	// match: (MADDW a (MOVDconst [c]) (MOVDconst [d]))
  7800  	// cond:
  7801  	// result: (ADDconst [int64(int32(c)*int32(d))] a)
  7802  	for {
  7803  		_ = v.Args[2]
  7804  		a := v.Args[0]
  7805  		v_1 := v.Args[1]
  7806  		if v_1.Op != OpARM64MOVDconst {
  7807  			break
  7808  		}
  7809  		c := v_1.AuxInt
  7810  		v_2 := v.Args[2]
  7811  		if v_2.Op != OpARM64MOVDconst {
  7812  			break
  7813  		}
  7814  		d := v_2.AuxInt
  7815  		v.reset(OpARM64ADDconst)
  7816  		v.AuxInt = int64(int32(c) * int32(d))
  7817  		v.AddArg(a)
  7818  		return true
  7819  	}
  7820  	return false
  7821  }
  7822  func rewriteValueARM64_OpARM64MNEG_0(v *Value) bool {
  7823  	b := v.Block
  7824  	_ = b
  7825  	// match: (MNEG x (MOVDconst [-1]))
  7826  	// cond:
  7827  	// result: x
  7828  	for {
  7829  		_ = v.Args[1]
  7830  		x := v.Args[0]
  7831  		v_1 := v.Args[1]
  7832  		if v_1.Op != OpARM64MOVDconst {
  7833  			break
  7834  		}
  7835  		if v_1.AuxInt != -1 {
  7836  			break
  7837  		}
  7838  		v.reset(OpCopy)
  7839  		v.Type = x.Type
  7840  		v.AddArg(x)
  7841  		return true
  7842  	}
  7843  	// match: (MNEG (MOVDconst [-1]) x)
  7844  	// cond:
  7845  	// result: x
  7846  	for {
  7847  		_ = v.Args[1]
  7848  		v_0 := v.Args[0]
  7849  		if v_0.Op != OpARM64MOVDconst {
  7850  			break
  7851  		}
  7852  		if v_0.AuxInt != -1 {
  7853  			break
  7854  		}
  7855  		x := v.Args[1]
  7856  		v.reset(OpCopy)
  7857  		v.Type = x.Type
  7858  		v.AddArg(x)
  7859  		return true
  7860  	}
  7861  	// match: (MNEG _ (MOVDconst [0]))
  7862  	// cond:
  7863  	// result: (MOVDconst [0])
  7864  	for {
  7865  		_ = v.Args[1]
  7866  		v_1 := v.Args[1]
  7867  		if v_1.Op != OpARM64MOVDconst {
  7868  			break
  7869  		}
  7870  		if v_1.AuxInt != 0 {
  7871  			break
  7872  		}
  7873  		v.reset(OpARM64MOVDconst)
  7874  		v.AuxInt = 0
  7875  		return true
  7876  	}
  7877  	// match: (MNEG (MOVDconst [0]) _)
  7878  	// cond:
  7879  	// result: (MOVDconst [0])
  7880  	for {
  7881  		_ = v.Args[1]
  7882  		v_0 := v.Args[0]
  7883  		if v_0.Op != OpARM64MOVDconst {
  7884  			break
  7885  		}
  7886  		if v_0.AuxInt != 0 {
  7887  			break
  7888  		}
  7889  		v.reset(OpARM64MOVDconst)
  7890  		v.AuxInt = 0
  7891  		return true
  7892  	}
  7893  	// match: (MNEG x (MOVDconst [1]))
  7894  	// cond:
  7895  	// result: (NEG x)
  7896  	for {
  7897  		_ = v.Args[1]
  7898  		x := v.Args[0]
  7899  		v_1 := v.Args[1]
  7900  		if v_1.Op != OpARM64MOVDconst {
  7901  			break
  7902  		}
  7903  		if v_1.AuxInt != 1 {
  7904  			break
  7905  		}
  7906  		v.reset(OpARM64NEG)
  7907  		v.AddArg(x)
  7908  		return true
  7909  	}
  7910  	// match: (MNEG (MOVDconst [1]) x)
  7911  	// cond:
  7912  	// result: (NEG x)
  7913  	for {
  7914  		_ = v.Args[1]
  7915  		v_0 := v.Args[0]
  7916  		if v_0.Op != OpARM64MOVDconst {
  7917  			break
  7918  		}
  7919  		if v_0.AuxInt != 1 {
  7920  			break
  7921  		}
  7922  		x := v.Args[1]
  7923  		v.reset(OpARM64NEG)
  7924  		v.AddArg(x)
  7925  		return true
  7926  	}
  7927  	// match: (MNEG x (MOVDconst [c]))
  7928  	// cond: isPowerOfTwo(c)
  7929  	// result: (NEG (SLLconst <x.Type> [log2(c)] x))
  7930  	for {
  7931  		_ = v.Args[1]
  7932  		x := v.Args[0]
  7933  		v_1 := v.Args[1]
  7934  		if v_1.Op != OpARM64MOVDconst {
  7935  			break
  7936  		}
  7937  		c := v_1.AuxInt
  7938  		if !(isPowerOfTwo(c)) {
  7939  			break
  7940  		}
  7941  		v.reset(OpARM64NEG)
  7942  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7943  		v0.AuxInt = log2(c)
  7944  		v0.AddArg(x)
  7945  		v.AddArg(v0)
  7946  		return true
  7947  	}
  7948  	// match: (MNEG (MOVDconst [c]) x)
  7949  	// cond: isPowerOfTwo(c)
  7950  	// result: (NEG (SLLconst <x.Type> [log2(c)] x))
  7951  	for {
  7952  		_ = v.Args[1]
  7953  		v_0 := v.Args[0]
  7954  		if v_0.Op != OpARM64MOVDconst {
  7955  			break
  7956  		}
  7957  		c := v_0.AuxInt
  7958  		x := v.Args[1]
  7959  		if !(isPowerOfTwo(c)) {
  7960  			break
  7961  		}
  7962  		v.reset(OpARM64NEG)
  7963  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  7964  		v0.AuxInt = log2(c)
  7965  		v0.AddArg(x)
  7966  		v.AddArg(v0)
  7967  		return true
  7968  	}
  7969  	// match: (MNEG x (MOVDconst [c]))
  7970  	// cond: isPowerOfTwo(c-1) && c >= 3
  7971  	// result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)]))
  7972  	for {
  7973  		_ = v.Args[1]
  7974  		x := v.Args[0]
  7975  		v_1 := v.Args[1]
  7976  		if v_1.Op != OpARM64MOVDconst {
  7977  			break
  7978  		}
  7979  		c := v_1.AuxInt
  7980  		if !(isPowerOfTwo(c-1) && c >= 3) {
  7981  			break
  7982  		}
  7983  		v.reset(OpARM64NEG)
  7984  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  7985  		v0.AuxInt = log2(c - 1)
  7986  		v0.AddArg(x)
  7987  		v0.AddArg(x)
  7988  		v.AddArg(v0)
  7989  		return true
  7990  	}
  7991  	// match: (MNEG (MOVDconst [c]) x)
  7992  	// cond: isPowerOfTwo(c-1) && c >= 3
  7993  	// result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)]))
  7994  	for {
  7995  		_ = v.Args[1]
  7996  		v_0 := v.Args[0]
  7997  		if v_0.Op != OpARM64MOVDconst {
  7998  			break
  7999  		}
  8000  		c := v_0.AuxInt
  8001  		x := v.Args[1]
  8002  		if !(isPowerOfTwo(c-1) && c >= 3) {
  8003  			break
  8004  		}
  8005  		v.reset(OpARM64NEG)
  8006  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8007  		v0.AuxInt = log2(c - 1)
  8008  		v0.AddArg(x)
  8009  		v0.AddArg(x)
  8010  		v.AddArg(v0)
  8011  		return true
  8012  	}
  8013  	return false
  8014  }
  8015  func rewriteValueARM64_OpARM64MNEG_10(v *Value) bool {
  8016  	b := v.Block
  8017  	_ = b
  8018  	// match: (MNEG x (MOVDconst [c]))
  8019  	// cond: isPowerOfTwo(c+1) && c >= 7
  8020  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
  8021  	for {
  8022  		_ = v.Args[1]
  8023  		x := v.Args[0]
  8024  		v_1 := v.Args[1]
  8025  		if v_1.Op != OpARM64MOVDconst {
  8026  			break
  8027  		}
  8028  		c := v_1.AuxInt
  8029  		if !(isPowerOfTwo(c+1) && c >= 7) {
  8030  			break
  8031  		}
  8032  		v.reset(OpARM64NEG)
  8033  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8034  		v0.AuxInt = log2(c + 1)
  8035  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  8036  		v1.AddArg(x)
  8037  		v0.AddArg(v1)
  8038  		v0.AddArg(x)
  8039  		v.AddArg(v0)
  8040  		return true
  8041  	}
  8042  	// match: (MNEG (MOVDconst [c]) x)
  8043  	// cond: isPowerOfTwo(c+1) && c >= 7
  8044  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
  8045  	for {
  8046  		_ = v.Args[1]
  8047  		v_0 := v.Args[0]
  8048  		if v_0.Op != OpARM64MOVDconst {
  8049  			break
  8050  		}
  8051  		c := v_0.AuxInt
  8052  		x := v.Args[1]
  8053  		if !(isPowerOfTwo(c+1) && c >= 7) {
  8054  			break
  8055  		}
  8056  		v.reset(OpARM64NEG)
  8057  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8058  		v0.AuxInt = log2(c + 1)
  8059  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  8060  		v1.AddArg(x)
  8061  		v0.AddArg(v1)
  8062  		v0.AddArg(x)
  8063  		v.AddArg(v0)
  8064  		return true
  8065  	}
  8066  	// match: (MNEG x (MOVDconst [c]))
  8067  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  8068  	// result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2]))
  8069  	for {
  8070  		_ = v.Args[1]
  8071  		x := v.Args[0]
  8072  		v_1 := v.Args[1]
  8073  		if v_1.Op != OpARM64MOVDconst {
  8074  			break
  8075  		}
  8076  		c := v_1.AuxInt
  8077  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  8078  			break
  8079  		}
  8080  		v.reset(OpARM64SLLconst)
  8081  		v.Type = x.Type
  8082  		v.AuxInt = log2(c / 3)
  8083  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8084  		v0.AuxInt = 2
  8085  		v0.AddArg(x)
  8086  		v0.AddArg(x)
  8087  		v.AddArg(v0)
  8088  		return true
  8089  	}
  8090  	// match: (MNEG (MOVDconst [c]) x)
  8091  	// cond: c%3 == 0 && isPowerOfTwo(c/3)
  8092  	// result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2]))
  8093  	for {
  8094  		_ = v.Args[1]
  8095  		v_0 := v.Args[0]
  8096  		if v_0.Op != OpARM64MOVDconst {
  8097  			break
  8098  		}
  8099  		c := v_0.AuxInt
  8100  		x := v.Args[1]
  8101  		if !(c%3 == 0 && isPowerOfTwo(c/3)) {
  8102  			break
  8103  		}
  8104  		v.reset(OpARM64SLLconst)
  8105  		v.Type = x.Type
  8106  		v.AuxInt = log2(c / 3)
  8107  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8108  		v0.AuxInt = 2
  8109  		v0.AddArg(x)
  8110  		v0.AddArg(x)
  8111  		v.AddArg(v0)
  8112  		return true
  8113  	}
  8114  	// match: (MNEG x (MOVDconst [c]))
  8115  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  8116  	// result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2])))
  8117  	for {
  8118  		_ = v.Args[1]
  8119  		x := v.Args[0]
  8120  		v_1 := v.Args[1]
  8121  		if v_1.Op != OpARM64MOVDconst {
  8122  			break
  8123  		}
  8124  		c := v_1.AuxInt
  8125  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  8126  			break
  8127  		}
  8128  		v.reset(OpARM64NEG)
  8129  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8130  		v0.AuxInt = log2(c / 5)
  8131  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8132  		v1.AuxInt = 2
  8133  		v1.AddArg(x)
  8134  		v1.AddArg(x)
  8135  		v0.AddArg(v1)
  8136  		v.AddArg(v0)
  8137  		return true
  8138  	}
  8139  	// match: (MNEG (MOVDconst [c]) x)
  8140  	// cond: c%5 == 0 && isPowerOfTwo(c/5)
  8141  	// result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2])))
  8142  	for {
  8143  		_ = v.Args[1]
  8144  		v_0 := v.Args[0]
  8145  		if v_0.Op != OpARM64MOVDconst {
  8146  			break
  8147  		}
  8148  		c := v_0.AuxInt
  8149  		x := v.Args[1]
  8150  		if !(c%5 == 0 && isPowerOfTwo(c/5)) {
  8151  			break
  8152  		}
  8153  		v.reset(OpARM64NEG)
  8154  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8155  		v0.AuxInt = log2(c / 5)
  8156  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8157  		v1.AuxInt = 2
  8158  		v1.AddArg(x)
  8159  		v1.AddArg(x)
  8160  		v0.AddArg(v1)
  8161  		v.AddArg(v0)
  8162  		return true
  8163  	}
  8164  	// match: (MNEG x (MOVDconst [c]))
  8165  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  8166  	// result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3]))
  8167  	for {
  8168  		_ = v.Args[1]
  8169  		x := v.Args[0]
  8170  		v_1 := v.Args[1]
  8171  		if v_1.Op != OpARM64MOVDconst {
  8172  			break
  8173  		}
  8174  		c := v_1.AuxInt
  8175  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  8176  			break
  8177  		}
  8178  		v.reset(OpARM64SLLconst)
  8179  		v.Type = x.Type
  8180  		v.AuxInt = log2(c / 7)
  8181  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8182  		v0.AuxInt = 3
  8183  		v0.AddArg(x)
  8184  		v0.AddArg(x)
  8185  		v.AddArg(v0)
  8186  		return true
  8187  	}
  8188  	// match: (MNEG (MOVDconst [c]) x)
  8189  	// cond: c%7 == 0 && isPowerOfTwo(c/7)
  8190  	// result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3]))
  8191  	for {
  8192  		_ = v.Args[1]
  8193  		v_0 := v.Args[0]
  8194  		if v_0.Op != OpARM64MOVDconst {
  8195  			break
  8196  		}
  8197  		c := v_0.AuxInt
  8198  		x := v.Args[1]
  8199  		if !(c%7 == 0 && isPowerOfTwo(c/7)) {
  8200  			break
  8201  		}
  8202  		v.reset(OpARM64SLLconst)
  8203  		v.Type = x.Type
  8204  		v.AuxInt = log2(c / 7)
  8205  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8206  		v0.AuxInt = 3
  8207  		v0.AddArg(x)
  8208  		v0.AddArg(x)
  8209  		v.AddArg(v0)
  8210  		return true
  8211  	}
  8212  	// match: (MNEG x (MOVDconst [c]))
  8213  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  8214  	// result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3])))
  8215  	for {
  8216  		_ = v.Args[1]
  8217  		x := v.Args[0]
  8218  		v_1 := v.Args[1]
  8219  		if v_1.Op != OpARM64MOVDconst {
  8220  			break
  8221  		}
  8222  		c := v_1.AuxInt
  8223  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  8224  			break
  8225  		}
  8226  		v.reset(OpARM64NEG)
  8227  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8228  		v0.AuxInt = log2(c / 9)
  8229  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8230  		v1.AuxInt = 3
  8231  		v1.AddArg(x)
  8232  		v1.AddArg(x)
  8233  		v0.AddArg(v1)
  8234  		v.AddArg(v0)
  8235  		return true
  8236  	}
  8237  	// match: (MNEG (MOVDconst [c]) x)
  8238  	// cond: c%9 == 0 && isPowerOfTwo(c/9)
  8239  	// result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3])))
  8240  	for {
  8241  		_ = v.Args[1]
  8242  		v_0 := v.Args[0]
  8243  		if v_0.Op != OpARM64MOVDconst {
  8244  			break
  8245  		}
  8246  		c := v_0.AuxInt
  8247  		x := v.Args[1]
  8248  		if !(c%9 == 0 && isPowerOfTwo(c/9)) {
  8249  			break
  8250  		}
  8251  		v.reset(OpARM64NEG)
  8252  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8253  		v0.AuxInt = log2(c / 9)
  8254  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8255  		v1.AuxInt = 3
  8256  		v1.AddArg(x)
  8257  		v1.AddArg(x)
  8258  		v0.AddArg(v1)
  8259  		v.AddArg(v0)
  8260  		return true
  8261  	}
  8262  	return false
  8263  }
  8264  func rewriteValueARM64_OpARM64MNEG_20(v *Value) bool {
  8265  	// match: (MNEG (MOVDconst [c]) (MOVDconst [d]))
  8266  	// cond:
  8267  	// result: (MOVDconst [-c*d])
  8268  	for {
  8269  		_ = v.Args[1]
  8270  		v_0 := v.Args[0]
  8271  		if v_0.Op != OpARM64MOVDconst {
  8272  			break
  8273  		}
  8274  		c := v_0.AuxInt
  8275  		v_1 := v.Args[1]
  8276  		if v_1.Op != OpARM64MOVDconst {
  8277  			break
  8278  		}
  8279  		d := v_1.AuxInt
  8280  		v.reset(OpARM64MOVDconst)
  8281  		v.AuxInt = -c * d
  8282  		return true
  8283  	}
  8284  	// match: (MNEG (MOVDconst [d]) (MOVDconst [c]))
  8285  	// cond:
  8286  	// result: (MOVDconst [-c*d])
  8287  	for {
  8288  		_ = v.Args[1]
  8289  		v_0 := v.Args[0]
  8290  		if v_0.Op != OpARM64MOVDconst {
  8291  			break
  8292  		}
  8293  		d := v_0.AuxInt
  8294  		v_1 := v.Args[1]
  8295  		if v_1.Op != OpARM64MOVDconst {
  8296  			break
  8297  		}
  8298  		c := v_1.AuxInt
  8299  		v.reset(OpARM64MOVDconst)
  8300  		v.AuxInt = -c * d
  8301  		return true
  8302  	}
  8303  	return false
  8304  }
  8305  func rewriteValueARM64_OpARM64MNEGW_0(v *Value) bool {
  8306  	b := v.Block
  8307  	_ = b
  8308  	// match: (MNEGW x (MOVDconst [c]))
  8309  	// cond: int32(c)==-1
  8310  	// result: x
  8311  	for {
  8312  		_ = v.Args[1]
  8313  		x := v.Args[0]
  8314  		v_1 := v.Args[1]
  8315  		if v_1.Op != OpARM64MOVDconst {
  8316  			break
  8317  		}
  8318  		c := v_1.AuxInt
  8319  		if !(int32(c) == -1) {
  8320  			break
  8321  		}
  8322  		v.reset(OpCopy)
  8323  		v.Type = x.Type
  8324  		v.AddArg(x)
  8325  		return true
  8326  	}
  8327  	// match: (MNEGW (MOVDconst [c]) x)
  8328  	// cond: int32(c)==-1
  8329  	// result: x
  8330  	for {
  8331  		_ = v.Args[1]
  8332  		v_0 := v.Args[0]
  8333  		if v_0.Op != OpARM64MOVDconst {
  8334  			break
  8335  		}
  8336  		c := v_0.AuxInt
  8337  		x := v.Args[1]
  8338  		if !(int32(c) == -1) {
  8339  			break
  8340  		}
  8341  		v.reset(OpCopy)
  8342  		v.Type = x.Type
  8343  		v.AddArg(x)
  8344  		return true
  8345  	}
  8346  	// match: (MNEGW _ (MOVDconst [c]))
  8347  	// cond: int32(c)==0
  8348  	// result: (MOVDconst [0])
  8349  	for {
  8350  		_ = v.Args[1]
  8351  		v_1 := v.Args[1]
  8352  		if v_1.Op != OpARM64MOVDconst {
  8353  			break
  8354  		}
  8355  		c := v_1.AuxInt
  8356  		if !(int32(c) == 0) {
  8357  			break
  8358  		}
  8359  		v.reset(OpARM64MOVDconst)
  8360  		v.AuxInt = 0
  8361  		return true
  8362  	}
  8363  	// match: (MNEGW (MOVDconst [c]) _)
  8364  	// cond: int32(c)==0
  8365  	// result: (MOVDconst [0])
  8366  	for {
  8367  		_ = v.Args[1]
  8368  		v_0 := v.Args[0]
  8369  		if v_0.Op != OpARM64MOVDconst {
  8370  			break
  8371  		}
  8372  		c := v_0.AuxInt
  8373  		if !(int32(c) == 0) {
  8374  			break
  8375  		}
  8376  		v.reset(OpARM64MOVDconst)
  8377  		v.AuxInt = 0
  8378  		return true
  8379  	}
  8380  	// match: (MNEGW x (MOVDconst [c]))
  8381  	// cond: int32(c)==1
  8382  	// result: (NEG x)
  8383  	for {
  8384  		_ = v.Args[1]
  8385  		x := v.Args[0]
  8386  		v_1 := v.Args[1]
  8387  		if v_1.Op != OpARM64MOVDconst {
  8388  			break
  8389  		}
  8390  		c := v_1.AuxInt
  8391  		if !(int32(c) == 1) {
  8392  			break
  8393  		}
  8394  		v.reset(OpARM64NEG)
  8395  		v.AddArg(x)
  8396  		return true
  8397  	}
  8398  	// match: (MNEGW (MOVDconst [c]) x)
  8399  	// cond: int32(c)==1
  8400  	// result: (NEG x)
  8401  	for {
  8402  		_ = v.Args[1]
  8403  		v_0 := v.Args[0]
  8404  		if v_0.Op != OpARM64MOVDconst {
  8405  			break
  8406  		}
  8407  		c := v_0.AuxInt
  8408  		x := v.Args[1]
  8409  		if !(int32(c) == 1) {
  8410  			break
  8411  		}
  8412  		v.reset(OpARM64NEG)
  8413  		v.AddArg(x)
  8414  		return true
  8415  	}
  8416  	// match: (MNEGW x (MOVDconst [c]))
  8417  	// cond: isPowerOfTwo(c)
  8418  	// result: (NEG (SLLconst <x.Type> [log2(c)] x))
  8419  	for {
  8420  		_ = v.Args[1]
  8421  		x := v.Args[0]
  8422  		v_1 := v.Args[1]
  8423  		if v_1.Op != OpARM64MOVDconst {
  8424  			break
  8425  		}
  8426  		c := v_1.AuxInt
  8427  		if !(isPowerOfTwo(c)) {
  8428  			break
  8429  		}
  8430  		v.reset(OpARM64NEG)
  8431  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8432  		v0.AuxInt = log2(c)
  8433  		v0.AddArg(x)
  8434  		v.AddArg(v0)
  8435  		return true
  8436  	}
  8437  	// match: (MNEGW (MOVDconst [c]) x)
  8438  	// cond: isPowerOfTwo(c)
  8439  	// result: (NEG (SLLconst <x.Type> [log2(c)] x))
  8440  	for {
  8441  		_ = v.Args[1]
  8442  		v_0 := v.Args[0]
  8443  		if v_0.Op != OpARM64MOVDconst {
  8444  			break
  8445  		}
  8446  		c := v_0.AuxInt
  8447  		x := v.Args[1]
  8448  		if !(isPowerOfTwo(c)) {
  8449  			break
  8450  		}
  8451  		v.reset(OpARM64NEG)
  8452  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8453  		v0.AuxInt = log2(c)
  8454  		v0.AddArg(x)
  8455  		v.AddArg(v0)
  8456  		return true
  8457  	}
  8458  	// match: (MNEGW x (MOVDconst [c]))
  8459  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  8460  	// result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)]))
  8461  	for {
  8462  		_ = v.Args[1]
  8463  		x := v.Args[0]
  8464  		v_1 := v.Args[1]
  8465  		if v_1.Op != OpARM64MOVDconst {
  8466  			break
  8467  		}
  8468  		c := v_1.AuxInt
  8469  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  8470  			break
  8471  		}
  8472  		v.reset(OpARM64NEG)
  8473  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8474  		v0.AuxInt = log2(c - 1)
  8475  		v0.AddArg(x)
  8476  		v0.AddArg(x)
  8477  		v.AddArg(v0)
  8478  		return true
  8479  	}
  8480  	// match: (MNEGW (MOVDconst [c]) x)
  8481  	// cond: isPowerOfTwo(c-1) && int32(c) >= 3
  8482  	// result: (NEG (ADDshiftLL <x.Type> x x [log2(c-1)]))
  8483  	for {
  8484  		_ = v.Args[1]
  8485  		v_0 := v.Args[0]
  8486  		if v_0.Op != OpARM64MOVDconst {
  8487  			break
  8488  		}
  8489  		c := v_0.AuxInt
  8490  		x := v.Args[1]
  8491  		if !(isPowerOfTwo(c-1) && int32(c) >= 3) {
  8492  			break
  8493  		}
  8494  		v.reset(OpARM64NEG)
  8495  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8496  		v0.AuxInt = log2(c - 1)
  8497  		v0.AddArg(x)
  8498  		v0.AddArg(x)
  8499  		v.AddArg(v0)
  8500  		return true
  8501  	}
  8502  	return false
  8503  }
  8504  func rewriteValueARM64_OpARM64MNEGW_10(v *Value) bool {
  8505  	b := v.Block
  8506  	_ = b
  8507  	// match: (MNEGW x (MOVDconst [c]))
  8508  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  8509  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
  8510  	for {
  8511  		_ = v.Args[1]
  8512  		x := v.Args[0]
  8513  		v_1 := v.Args[1]
  8514  		if v_1.Op != OpARM64MOVDconst {
  8515  			break
  8516  		}
  8517  		c := v_1.AuxInt
  8518  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  8519  			break
  8520  		}
  8521  		v.reset(OpARM64NEG)
  8522  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8523  		v0.AuxInt = log2(c + 1)
  8524  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  8525  		v1.AddArg(x)
  8526  		v0.AddArg(v1)
  8527  		v0.AddArg(x)
  8528  		v.AddArg(v0)
  8529  		return true
  8530  	}
  8531  	// match: (MNEGW (MOVDconst [c]) x)
  8532  	// cond: isPowerOfTwo(c+1) && int32(c) >= 7
  8533  	// result: (NEG (ADDshiftLL <x.Type> (NEG <x.Type> x) x [log2(c+1)]))
  8534  	for {
  8535  		_ = v.Args[1]
  8536  		v_0 := v.Args[0]
  8537  		if v_0.Op != OpARM64MOVDconst {
  8538  			break
  8539  		}
  8540  		c := v_0.AuxInt
  8541  		x := v.Args[1]
  8542  		if !(isPowerOfTwo(c+1) && int32(c) >= 7) {
  8543  			break
  8544  		}
  8545  		v.reset(OpARM64NEG)
  8546  		v0 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8547  		v0.AuxInt = log2(c + 1)
  8548  		v1 := b.NewValue0(v.Pos, OpARM64NEG, x.Type)
  8549  		v1.AddArg(x)
  8550  		v0.AddArg(v1)
  8551  		v0.AddArg(x)
  8552  		v.AddArg(v0)
  8553  		return true
  8554  	}
  8555  	// match: (MNEGW x (MOVDconst [c]))
  8556  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  8557  	// result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2]))
  8558  	for {
  8559  		_ = v.Args[1]
  8560  		x := v.Args[0]
  8561  		v_1 := v.Args[1]
  8562  		if v_1.Op != OpARM64MOVDconst {
  8563  			break
  8564  		}
  8565  		c := v_1.AuxInt
  8566  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  8567  			break
  8568  		}
  8569  		v.reset(OpARM64SLLconst)
  8570  		v.Type = x.Type
  8571  		v.AuxInt = log2(c / 3)
  8572  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8573  		v0.AuxInt = 2
  8574  		v0.AddArg(x)
  8575  		v0.AddArg(x)
  8576  		v.AddArg(v0)
  8577  		return true
  8578  	}
  8579  	// match: (MNEGW (MOVDconst [c]) x)
  8580  	// cond: c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)
  8581  	// result: (SLLconst <x.Type> [log2(c/3)] (SUBshiftLL <x.Type> x x [2]))
  8582  	for {
  8583  		_ = v.Args[1]
  8584  		v_0 := v.Args[0]
  8585  		if v_0.Op != OpARM64MOVDconst {
  8586  			break
  8587  		}
  8588  		c := v_0.AuxInt
  8589  		x := v.Args[1]
  8590  		if !(c%3 == 0 && isPowerOfTwo(c/3) && is32Bit(c)) {
  8591  			break
  8592  		}
  8593  		v.reset(OpARM64SLLconst)
  8594  		v.Type = x.Type
  8595  		v.AuxInt = log2(c / 3)
  8596  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8597  		v0.AuxInt = 2
  8598  		v0.AddArg(x)
  8599  		v0.AddArg(x)
  8600  		v.AddArg(v0)
  8601  		return true
  8602  	}
  8603  	// match: (MNEGW x (MOVDconst [c]))
  8604  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  8605  	// result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2])))
  8606  	for {
  8607  		_ = v.Args[1]
  8608  		x := v.Args[0]
  8609  		v_1 := v.Args[1]
  8610  		if v_1.Op != OpARM64MOVDconst {
  8611  			break
  8612  		}
  8613  		c := v_1.AuxInt
  8614  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  8615  			break
  8616  		}
  8617  		v.reset(OpARM64NEG)
  8618  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8619  		v0.AuxInt = log2(c / 5)
  8620  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8621  		v1.AuxInt = 2
  8622  		v1.AddArg(x)
  8623  		v1.AddArg(x)
  8624  		v0.AddArg(v1)
  8625  		v.AddArg(v0)
  8626  		return true
  8627  	}
  8628  	// match: (MNEGW (MOVDconst [c]) x)
  8629  	// cond: c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)
  8630  	// result: (NEG (SLLconst <x.Type> [log2(c/5)] (ADDshiftLL <x.Type> x x [2])))
  8631  	for {
  8632  		_ = v.Args[1]
  8633  		v_0 := v.Args[0]
  8634  		if v_0.Op != OpARM64MOVDconst {
  8635  			break
  8636  		}
  8637  		c := v_0.AuxInt
  8638  		x := v.Args[1]
  8639  		if !(c%5 == 0 && isPowerOfTwo(c/5) && is32Bit(c)) {
  8640  			break
  8641  		}
  8642  		v.reset(OpARM64NEG)
  8643  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8644  		v0.AuxInt = log2(c / 5)
  8645  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8646  		v1.AuxInt = 2
  8647  		v1.AddArg(x)
  8648  		v1.AddArg(x)
  8649  		v0.AddArg(v1)
  8650  		v.AddArg(v0)
  8651  		return true
  8652  	}
  8653  	// match: (MNEGW x (MOVDconst [c]))
  8654  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  8655  	// result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3]))
  8656  	for {
  8657  		_ = v.Args[1]
  8658  		x := v.Args[0]
  8659  		v_1 := v.Args[1]
  8660  		if v_1.Op != OpARM64MOVDconst {
  8661  			break
  8662  		}
  8663  		c := v_1.AuxInt
  8664  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  8665  			break
  8666  		}
  8667  		v.reset(OpARM64SLLconst)
  8668  		v.Type = x.Type
  8669  		v.AuxInt = log2(c / 7)
  8670  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8671  		v0.AuxInt = 3
  8672  		v0.AddArg(x)
  8673  		v0.AddArg(x)
  8674  		v.AddArg(v0)
  8675  		return true
  8676  	}
  8677  	// match: (MNEGW (MOVDconst [c]) x)
  8678  	// cond: c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)
  8679  	// result: (SLLconst <x.Type> [log2(c/7)] (SUBshiftLL <x.Type> x x [3]))
  8680  	for {
  8681  		_ = v.Args[1]
  8682  		v_0 := v.Args[0]
  8683  		if v_0.Op != OpARM64MOVDconst {
  8684  			break
  8685  		}
  8686  		c := v_0.AuxInt
  8687  		x := v.Args[1]
  8688  		if !(c%7 == 0 && isPowerOfTwo(c/7) && is32Bit(c)) {
  8689  			break
  8690  		}
  8691  		v.reset(OpARM64SLLconst)
  8692  		v.Type = x.Type
  8693  		v.AuxInt = log2(c / 7)
  8694  		v0 := b.NewValue0(v.Pos, OpARM64SUBshiftLL, x.Type)
  8695  		v0.AuxInt = 3
  8696  		v0.AddArg(x)
  8697  		v0.AddArg(x)
  8698  		v.AddArg(v0)
  8699  		return true
  8700  	}
  8701  	// match: (MNEGW x (MOVDconst [c]))
  8702  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  8703  	// result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3])))
  8704  	for {
  8705  		_ = v.Args[1]
  8706  		x := v.Args[0]
  8707  		v_1 := v.Args[1]
  8708  		if v_1.Op != OpARM64MOVDconst {
  8709  			break
  8710  		}
  8711  		c := v_1.AuxInt
  8712  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  8713  			break
  8714  		}
  8715  		v.reset(OpARM64NEG)
  8716  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8717  		v0.AuxInt = log2(c / 9)
  8718  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8719  		v1.AuxInt = 3
  8720  		v1.AddArg(x)
  8721  		v1.AddArg(x)
  8722  		v0.AddArg(v1)
  8723  		v.AddArg(v0)
  8724  		return true
  8725  	}
  8726  	// match: (MNEGW (MOVDconst [c]) x)
  8727  	// cond: c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)
  8728  	// result: (NEG (SLLconst <x.Type> [log2(c/9)] (ADDshiftLL <x.Type> x x [3])))
  8729  	for {
  8730  		_ = v.Args[1]
  8731  		v_0 := v.Args[0]
  8732  		if v_0.Op != OpARM64MOVDconst {
  8733  			break
  8734  		}
  8735  		c := v_0.AuxInt
  8736  		x := v.Args[1]
  8737  		if !(c%9 == 0 && isPowerOfTwo(c/9) && is32Bit(c)) {
  8738  			break
  8739  		}
  8740  		v.reset(OpARM64NEG)
  8741  		v0 := b.NewValue0(v.Pos, OpARM64SLLconst, x.Type)
  8742  		v0.AuxInt = log2(c / 9)
  8743  		v1 := b.NewValue0(v.Pos, OpARM64ADDshiftLL, x.Type)
  8744  		v1.AuxInt = 3
  8745  		v1.AddArg(x)
  8746  		v1.AddArg(x)
  8747  		v0.AddArg(v1)
  8748  		v.AddArg(v0)
  8749  		return true
  8750  	}
  8751  	return false
  8752  }
  8753  func rewriteValueARM64_OpARM64MNEGW_20(v *Value) bool {
  8754  	// match: (MNEGW (MOVDconst [c]) (MOVDconst [d]))
  8755  	// cond:
  8756  	// result: (MOVDconst [-int64(int32(c)*int32(d))])
  8757  	for {
  8758  		_ = v.Args[1]
  8759  		v_0 := v.Args[0]
  8760  		if v_0.Op != OpARM64MOVDconst {
  8761  			break
  8762  		}
  8763  		c := v_0.AuxInt
  8764  		v_1 := v.Args[1]
  8765  		if v_1.Op != OpARM64MOVDconst {
  8766  			break
  8767  		}
  8768  		d := v_1.AuxInt
  8769  		v.reset(OpARM64MOVDconst)
  8770  		v.AuxInt = -int64(int32(c) * int32(d))
  8771  		return true
  8772  	}
  8773  	// match: (MNEGW (MOVDconst [d]) (MOVDconst [c]))
  8774  	// cond:
  8775  	// result: (MOVDconst [-int64(int32(c)*int32(d))])
  8776  	for {
  8777  		_ = v.Args[1]
  8778  		v_0 := v.Args[0]
  8779  		if v_0.Op != OpARM64MOVDconst {
  8780  			break
  8781  		}
  8782  		d := v_0.AuxInt
  8783  		v_1 := v.Args[1]
  8784  		if v_1.Op != OpARM64MOVDconst {
  8785  			break
  8786  		}
  8787  		c := v_1.AuxInt
  8788  		v.reset(OpARM64MOVDconst)
  8789  		v.AuxInt = -int64(int32(c) * int32(d))
  8790  		return true
  8791  	}
  8792  	return false
  8793  }
  8794  func rewriteValueARM64_OpARM64MOD_0(v *Value) bool {
  8795  	// match: (MOD (MOVDconst [c]) (MOVDconst [d]))
  8796  	// cond:
  8797  	// result: (MOVDconst [c%d])
  8798  	for {
  8799  		_ = v.Args[1]
  8800  		v_0 := v.Args[0]
  8801  		if v_0.Op != OpARM64MOVDconst {
  8802  			break
  8803  		}
  8804  		c := v_0.AuxInt
  8805  		v_1 := v.Args[1]
  8806  		if v_1.Op != OpARM64MOVDconst {
  8807  			break
  8808  		}
  8809  		d := v_1.AuxInt
  8810  		v.reset(OpARM64MOVDconst)
  8811  		v.AuxInt = c % d
  8812  		return true
  8813  	}
  8814  	return false
  8815  }
  8816  func rewriteValueARM64_OpARM64MODW_0(v *Value) bool {
  8817  	// match: (MODW (MOVDconst [c]) (MOVDconst [d]))
  8818  	// cond:
  8819  	// result: (MOVDconst [int64(int32(c)%int32(d))])
  8820  	for {
  8821  		_ = v.Args[1]
  8822  		v_0 := v.Args[0]
  8823  		if v_0.Op != OpARM64MOVDconst {
  8824  			break
  8825  		}
  8826  		c := v_0.AuxInt
  8827  		v_1 := v.Args[1]
  8828  		if v_1.Op != OpARM64MOVDconst {
  8829  			break
  8830  		}
  8831  		d := v_1.AuxInt
  8832  		v.reset(OpARM64MOVDconst)
  8833  		v.AuxInt = int64(int32(c) % int32(d))
  8834  		return true
  8835  	}
  8836  	return false
  8837  }
  8838  func rewriteValueARM64_OpARM64MOVBUload_0(v *Value) bool {
  8839  	b := v.Block
  8840  	_ = b
  8841  	config := b.Func.Config
  8842  	_ = config
  8843  	// match: (MOVBUload [off1] {sym} (ADDconst [off2] ptr) mem)
  8844  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8845  	// result: (MOVBUload [off1+off2] {sym} ptr mem)
  8846  	for {
  8847  		off1 := v.AuxInt
  8848  		sym := v.Aux
  8849  		_ = v.Args[1]
  8850  		v_0 := v.Args[0]
  8851  		if v_0.Op != OpARM64ADDconst {
  8852  			break
  8853  		}
  8854  		off2 := v_0.AuxInt
  8855  		ptr := v_0.Args[0]
  8856  		mem := v.Args[1]
  8857  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8858  			break
  8859  		}
  8860  		v.reset(OpARM64MOVBUload)
  8861  		v.AuxInt = off1 + off2
  8862  		v.Aux = sym
  8863  		v.AddArg(ptr)
  8864  		v.AddArg(mem)
  8865  		return true
  8866  	}
  8867  	// match: (MOVBUload [off] {sym} (ADD ptr idx) mem)
  8868  	// cond: off == 0 && sym == nil
  8869  	// result: (MOVBUloadidx ptr idx mem)
  8870  	for {
  8871  		off := v.AuxInt
  8872  		sym := v.Aux
  8873  		_ = v.Args[1]
  8874  		v_0 := v.Args[0]
  8875  		if v_0.Op != OpARM64ADD {
  8876  			break
  8877  		}
  8878  		_ = v_0.Args[1]
  8879  		ptr := v_0.Args[0]
  8880  		idx := v_0.Args[1]
  8881  		mem := v.Args[1]
  8882  		if !(off == 0 && sym == nil) {
  8883  			break
  8884  		}
  8885  		v.reset(OpARM64MOVBUloadidx)
  8886  		v.AddArg(ptr)
  8887  		v.AddArg(idx)
  8888  		v.AddArg(mem)
  8889  		return true
  8890  	}
  8891  	// match: (MOVBUload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  8892  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  8893  	// result: (MOVBUload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  8894  	for {
  8895  		off1 := v.AuxInt
  8896  		sym1 := v.Aux
  8897  		_ = v.Args[1]
  8898  		v_0 := v.Args[0]
  8899  		if v_0.Op != OpARM64MOVDaddr {
  8900  			break
  8901  		}
  8902  		off2 := v_0.AuxInt
  8903  		sym2 := v_0.Aux
  8904  		ptr := v_0.Args[0]
  8905  		mem := v.Args[1]
  8906  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  8907  			break
  8908  		}
  8909  		v.reset(OpARM64MOVBUload)
  8910  		v.AuxInt = off1 + off2
  8911  		v.Aux = mergeSym(sym1, sym2)
  8912  		v.AddArg(ptr)
  8913  		v.AddArg(mem)
  8914  		return true
  8915  	}
  8916  	// match: (MOVBUload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  8917  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  8918  	// result: (MOVDconst [0])
  8919  	for {
  8920  		off := v.AuxInt
  8921  		sym := v.Aux
  8922  		_ = v.Args[1]
  8923  		ptr := v.Args[0]
  8924  		v_1 := v.Args[1]
  8925  		if v_1.Op != OpARM64MOVBstorezero {
  8926  			break
  8927  		}
  8928  		off2 := v_1.AuxInt
  8929  		sym2 := v_1.Aux
  8930  		_ = v_1.Args[1]
  8931  		ptr2 := v_1.Args[0]
  8932  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  8933  			break
  8934  		}
  8935  		v.reset(OpARM64MOVDconst)
  8936  		v.AuxInt = 0
  8937  		return true
  8938  	}
  8939  	// match: (MOVBUload [off] {sym} (SB) _)
  8940  	// cond: symIsRO(sym)
  8941  	// result: (MOVDconst [int64(read8(sym, off))])
  8942  	for {
  8943  		off := v.AuxInt
  8944  		sym := v.Aux
  8945  		_ = v.Args[1]
  8946  		v_0 := v.Args[0]
  8947  		if v_0.Op != OpSB {
  8948  			break
  8949  		}
  8950  		if !(symIsRO(sym)) {
  8951  			break
  8952  		}
  8953  		v.reset(OpARM64MOVDconst)
  8954  		v.AuxInt = int64(read8(sym, off))
  8955  		return true
  8956  	}
  8957  	return false
  8958  }
  8959  func rewriteValueARM64_OpARM64MOVBUloadidx_0(v *Value) bool {
  8960  	// match: (MOVBUloadidx ptr (MOVDconst [c]) mem)
  8961  	// cond:
  8962  	// result: (MOVBUload [c] ptr mem)
  8963  	for {
  8964  		_ = v.Args[2]
  8965  		ptr := v.Args[0]
  8966  		v_1 := v.Args[1]
  8967  		if v_1.Op != OpARM64MOVDconst {
  8968  			break
  8969  		}
  8970  		c := v_1.AuxInt
  8971  		mem := v.Args[2]
  8972  		v.reset(OpARM64MOVBUload)
  8973  		v.AuxInt = c
  8974  		v.AddArg(ptr)
  8975  		v.AddArg(mem)
  8976  		return true
  8977  	}
  8978  	// match: (MOVBUloadidx (MOVDconst [c]) ptr mem)
  8979  	// cond:
  8980  	// result: (MOVBUload [c] ptr mem)
  8981  	for {
  8982  		_ = v.Args[2]
  8983  		v_0 := v.Args[0]
  8984  		if v_0.Op != OpARM64MOVDconst {
  8985  			break
  8986  		}
  8987  		c := v_0.AuxInt
  8988  		ptr := v.Args[1]
  8989  		mem := v.Args[2]
  8990  		v.reset(OpARM64MOVBUload)
  8991  		v.AuxInt = c
  8992  		v.AddArg(ptr)
  8993  		v.AddArg(mem)
  8994  		return true
  8995  	}
  8996  	// match: (MOVBUloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
  8997  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
  8998  	// result: (MOVDconst [0])
  8999  	for {
  9000  		_ = v.Args[2]
  9001  		ptr := v.Args[0]
  9002  		idx := v.Args[1]
  9003  		v_2 := v.Args[2]
  9004  		if v_2.Op != OpARM64MOVBstorezeroidx {
  9005  			break
  9006  		}
  9007  		_ = v_2.Args[2]
  9008  		ptr2 := v_2.Args[0]
  9009  		idx2 := v_2.Args[1]
  9010  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
  9011  			break
  9012  		}
  9013  		v.reset(OpARM64MOVDconst)
  9014  		v.AuxInt = 0
  9015  		return true
  9016  	}
  9017  	return false
  9018  }
  9019  func rewriteValueARM64_OpARM64MOVBUreg_0(v *Value) bool {
  9020  	// match: (MOVBUreg x:(MOVBUload _ _))
  9021  	// cond:
  9022  	// result: (MOVDreg x)
  9023  	for {
  9024  		x := v.Args[0]
  9025  		if x.Op != OpARM64MOVBUload {
  9026  			break
  9027  		}
  9028  		_ = x.Args[1]
  9029  		v.reset(OpARM64MOVDreg)
  9030  		v.AddArg(x)
  9031  		return true
  9032  	}
  9033  	// match: (MOVBUreg x:(MOVBUloadidx _ _ _))
  9034  	// cond:
  9035  	// result: (MOVDreg x)
  9036  	for {
  9037  		x := v.Args[0]
  9038  		if x.Op != OpARM64MOVBUloadidx {
  9039  			break
  9040  		}
  9041  		_ = x.Args[2]
  9042  		v.reset(OpARM64MOVDreg)
  9043  		v.AddArg(x)
  9044  		return true
  9045  	}
  9046  	// match: (MOVBUreg x:(MOVBUreg _))
  9047  	// cond:
  9048  	// result: (MOVDreg x)
  9049  	for {
  9050  		x := v.Args[0]
  9051  		if x.Op != OpARM64MOVBUreg {
  9052  			break
  9053  		}
  9054  		v.reset(OpARM64MOVDreg)
  9055  		v.AddArg(x)
  9056  		return true
  9057  	}
  9058  	// match: (MOVBUreg (ANDconst [c] x))
  9059  	// cond:
  9060  	// result: (ANDconst [c&(1<<8-1)] x)
  9061  	for {
  9062  		v_0 := v.Args[0]
  9063  		if v_0.Op != OpARM64ANDconst {
  9064  			break
  9065  		}
  9066  		c := v_0.AuxInt
  9067  		x := v_0.Args[0]
  9068  		v.reset(OpARM64ANDconst)
  9069  		v.AuxInt = c & (1<<8 - 1)
  9070  		v.AddArg(x)
  9071  		return true
  9072  	}
  9073  	// match: (MOVBUreg (MOVDconst [c]))
  9074  	// cond:
  9075  	// result: (MOVDconst [int64(uint8(c))])
  9076  	for {
  9077  		v_0 := v.Args[0]
  9078  		if v_0.Op != OpARM64MOVDconst {
  9079  			break
  9080  		}
  9081  		c := v_0.AuxInt
  9082  		v.reset(OpARM64MOVDconst)
  9083  		v.AuxInt = int64(uint8(c))
  9084  		return true
  9085  	}
  9086  	// match: (MOVBUreg x)
  9087  	// cond: x.Type.IsBoolean()
  9088  	// result: (MOVDreg x)
  9089  	for {
  9090  		x := v.Args[0]
  9091  		if !(x.Type.IsBoolean()) {
  9092  			break
  9093  		}
  9094  		v.reset(OpARM64MOVDreg)
  9095  		v.AddArg(x)
  9096  		return true
  9097  	}
  9098  	// match: (MOVBUreg (SLLconst [sc] x))
  9099  	// cond: isARM64BFMask(sc, 1<<8-1, sc)
  9100  	// result: (UBFIZ [arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))] x)
  9101  	for {
  9102  		v_0 := v.Args[0]
  9103  		if v_0.Op != OpARM64SLLconst {
  9104  			break
  9105  		}
  9106  		sc := v_0.AuxInt
  9107  		x := v_0.Args[0]
  9108  		if !(isARM64BFMask(sc, 1<<8-1, sc)) {
  9109  			break
  9110  		}
  9111  		v.reset(OpARM64UBFIZ)
  9112  		v.AuxInt = arm64BFAuxInt(sc, arm64BFWidth(1<<8-1, sc))
  9113  		v.AddArg(x)
  9114  		return true
  9115  	}
  9116  	// match: (MOVBUreg (SRLconst [sc] x))
  9117  	// cond: isARM64BFMask(sc, 1<<8-1, 0)
  9118  	// result: (UBFX [arm64BFAuxInt(sc, 8)] x)
  9119  	for {
  9120  		v_0 := v.Args[0]
  9121  		if v_0.Op != OpARM64SRLconst {
  9122  			break
  9123  		}
  9124  		sc := v_0.AuxInt
  9125  		x := v_0.Args[0]
  9126  		if !(isARM64BFMask(sc, 1<<8-1, 0)) {
  9127  			break
  9128  		}
  9129  		v.reset(OpARM64UBFX)
  9130  		v.AuxInt = arm64BFAuxInt(sc, 8)
  9131  		v.AddArg(x)
  9132  		return true
  9133  	}
  9134  	return false
  9135  }
  9136  func rewriteValueARM64_OpARM64MOVBload_0(v *Value) bool {
  9137  	b := v.Block
  9138  	_ = b
  9139  	config := b.Func.Config
  9140  	_ = config
  9141  	// match: (MOVBload [off1] {sym} (ADDconst [off2] ptr) mem)
  9142  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  9143  	// result: (MOVBload [off1+off2] {sym} ptr mem)
  9144  	for {
  9145  		off1 := v.AuxInt
  9146  		sym := v.Aux
  9147  		_ = v.Args[1]
  9148  		v_0 := v.Args[0]
  9149  		if v_0.Op != OpARM64ADDconst {
  9150  			break
  9151  		}
  9152  		off2 := v_0.AuxInt
  9153  		ptr := v_0.Args[0]
  9154  		mem := v.Args[1]
  9155  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  9156  			break
  9157  		}
  9158  		v.reset(OpARM64MOVBload)
  9159  		v.AuxInt = off1 + off2
  9160  		v.Aux = sym
  9161  		v.AddArg(ptr)
  9162  		v.AddArg(mem)
  9163  		return true
  9164  	}
  9165  	// match: (MOVBload [off] {sym} (ADD ptr idx) mem)
  9166  	// cond: off == 0 && sym == nil
  9167  	// result: (MOVBloadidx ptr idx mem)
  9168  	for {
  9169  		off := v.AuxInt
  9170  		sym := v.Aux
  9171  		_ = v.Args[1]
  9172  		v_0 := v.Args[0]
  9173  		if v_0.Op != OpARM64ADD {
  9174  			break
  9175  		}
  9176  		_ = v_0.Args[1]
  9177  		ptr := v_0.Args[0]
  9178  		idx := v_0.Args[1]
  9179  		mem := v.Args[1]
  9180  		if !(off == 0 && sym == nil) {
  9181  			break
  9182  		}
  9183  		v.reset(OpARM64MOVBloadidx)
  9184  		v.AddArg(ptr)
  9185  		v.AddArg(idx)
  9186  		v.AddArg(mem)
  9187  		return true
  9188  	}
  9189  	// match: (MOVBload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
  9190  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  9191  	// result: (MOVBload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
  9192  	for {
  9193  		off1 := v.AuxInt
  9194  		sym1 := v.Aux
  9195  		_ = v.Args[1]
  9196  		v_0 := v.Args[0]
  9197  		if v_0.Op != OpARM64MOVDaddr {
  9198  			break
  9199  		}
  9200  		off2 := v_0.AuxInt
  9201  		sym2 := v_0.Aux
  9202  		ptr := v_0.Args[0]
  9203  		mem := v.Args[1]
  9204  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  9205  			break
  9206  		}
  9207  		v.reset(OpARM64MOVBload)
  9208  		v.AuxInt = off1 + off2
  9209  		v.Aux = mergeSym(sym1, sym2)
  9210  		v.AddArg(ptr)
  9211  		v.AddArg(mem)
  9212  		return true
  9213  	}
  9214  	// match: (MOVBload [off] {sym} ptr (MOVBstorezero [off2] {sym2} ptr2 _))
  9215  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
  9216  	// result: (MOVDconst [0])
  9217  	for {
  9218  		off := v.AuxInt
  9219  		sym := v.Aux
  9220  		_ = v.Args[1]
  9221  		ptr := v.Args[0]
  9222  		v_1 := v.Args[1]
  9223  		if v_1.Op != OpARM64MOVBstorezero {
  9224  			break
  9225  		}
  9226  		off2 := v_1.AuxInt
  9227  		sym2 := v_1.Aux
  9228  		_ = v_1.Args[1]
  9229  		ptr2 := v_1.Args[0]
  9230  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
  9231  			break
  9232  		}
  9233  		v.reset(OpARM64MOVDconst)
  9234  		v.AuxInt = 0
  9235  		return true
  9236  	}
  9237  	return false
  9238  }
  9239  func rewriteValueARM64_OpARM64MOVBloadidx_0(v *Value) bool {
  9240  	// match: (MOVBloadidx ptr (MOVDconst [c]) mem)
  9241  	// cond:
  9242  	// result: (MOVBload [c] ptr mem)
  9243  	for {
  9244  		_ = v.Args[2]
  9245  		ptr := v.Args[0]
  9246  		v_1 := v.Args[1]
  9247  		if v_1.Op != OpARM64MOVDconst {
  9248  			break
  9249  		}
  9250  		c := v_1.AuxInt
  9251  		mem := v.Args[2]
  9252  		v.reset(OpARM64MOVBload)
  9253  		v.AuxInt = c
  9254  		v.AddArg(ptr)
  9255  		v.AddArg(mem)
  9256  		return true
  9257  	}
  9258  	// match: (MOVBloadidx (MOVDconst [c]) ptr mem)
  9259  	// cond:
  9260  	// result: (MOVBload [c] ptr mem)
  9261  	for {
  9262  		_ = v.Args[2]
  9263  		v_0 := v.Args[0]
  9264  		if v_0.Op != OpARM64MOVDconst {
  9265  			break
  9266  		}
  9267  		c := v_0.AuxInt
  9268  		ptr := v.Args[1]
  9269  		mem := v.Args[2]
  9270  		v.reset(OpARM64MOVBload)
  9271  		v.AuxInt = c
  9272  		v.AddArg(ptr)
  9273  		v.AddArg(mem)
  9274  		return true
  9275  	}
  9276  	// match: (MOVBloadidx ptr idx (MOVBstorezeroidx ptr2 idx2 _))
  9277  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
  9278  	// result: (MOVDconst [0])
  9279  	for {
  9280  		_ = v.Args[2]
  9281  		ptr := v.Args[0]
  9282  		idx := v.Args[1]
  9283  		v_2 := v.Args[2]
  9284  		if v_2.Op != OpARM64MOVBstorezeroidx {
  9285  			break
  9286  		}
  9287  		_ = v_2.Args[2]
  9288  		ptr2 := v_2.Args[0]
  9289  		idx2 := v_2.Args[1]
  9290  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
  9291  			break
  9292  		}
  9293  		v.reset(OpARM64MOVDconst)
  9294  		v.AuxInt = 0
  9295  		return true
  9296  	}
  9297  	return false
  9298  }
  9299  func rewriteValueARM64_OpARM64MOVBreg_0(v *Value) bool {
  9300  	// match: (MOVBreg x:(MOVBload _ _))
  9301  	// cond:
  9302  	// result: (MOVDreg x)
  9303  	for {
  9304  		x := v.Args[0]
  9305  		if x.Op != OpARM64MOVBload {
  9306  			break
  9307  		}
  9308  		_ = x.Args[1]
  9309  		v.reset(OpARM64MOVDreg)
  9310  		v.AddArg(x)
  9311  		return true
  9312  	}
  9313  	// match: (MOVBreg x:(MOVBloadidx _ _ _))
  9314  	// cond:
  9315  	// result: (MOVDreg x)
  9316  	for {
  9317  		x := v.Args[0]
  9318  		if x.Op != OpARM64MOVBloadidx {
  9319  			break
  9320  		}
  9321  		_ = x.Args[2]
  9322  		v.reset(OpARM64MOVDreg)
  9323  		v.AddArg(x)
  9324  		return true
  9325  	}
  9326  	// match: (MOVBreg x:(MOVBreg _))
  9327  	// cond:
  9328  	// result: (MOVDreg x)
  9329  	for {
  9330  		x := v.Args[0]
  9331  		if x.Op != OpARM64MOVBreg {
  9332  			break
  9333  		}
  9334  		v.reset(OpARM64MOVDreg)
  9335  		v.AddArg(x)
  9336  		return true
  9337  	}
  9338  	// match: (MOVBreg (MOVDconst [c]))
  9339  	// cond:
  9340  	// result: (MOVDconst [int64(int8(c))])
  9341  	for {
  9342  		v_0 := v.Args[0]
  9343  		if v_0.Op != OpARM64MOVDconst {
  9344  			break
  9345  		}
  9346  		c := v_0.AuxInt
  9347  		v.reset(OpARM64MOVDconst)
  9348  		v.AuxInt = int64(int8(c))
  9349  		return true
  9350  	}
  9351  	// match: (MOVBreg (SLLconst [lc] x))
  9352  	// cond: lc < 8
  9353  	// result: (SBFIZ [arm64BFAuxInt(lc, 8-lc)] x)
  9354  	for {
  9355  		v_0 := v.Args[0]
  9356  		if v_0.Op != OpARM64SLLconst {
  9357  			break
  9358  		}
  9359  		lc := v_0.AuxInt
  9360  		x := v_0.Args[0]
  9361  		if !(lc < 8) {
  9362  			break
  9363  		}
  9364  		v.reset(OpARM64SBFIZ)
  9365  		v.AuxInt = arm64BFAuxInt(lc, 8-lc)
  9366  		v.AddArg(x)
  9367  		return true
  9368  	}
  9369  	return false
  9370  }
  9371  func rewriteValueARM64_OpARM64MOVBstore_0(v *Value) bool {
  9372  	b := v.Block
  9373  	_ = b
  9374  	config := b.Func.Config
  9375  	_ = config
  9376  	// match: (MOVBstore [off1] {sym} (ADDconst [off2] ptr) val mem)
  9377  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  9378  	// result: (MOVBstore [off1+off2] {sym} ptr val mem)
  9379  	for {
  9380  		off1 := v.AuxInt
  9381  		sym := v.Aux
  9382  		_ = v.Args[2]
  9383  		v_0 := v.Args[0]
  9384  		if v_0.Op != OpARM64ADDconst {
  9385  			break
  9386  		}
  9387  		off2 := v_0.AuxInt
  9388  		ptr := v_0.Args[0]
  9389  		val := v.Args[1]
  9390  		mem := v.Args[2]
  9391  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  9392  			break
  9393  		}
  9394  		v.reset(OpARM64MOVBstore)
  9395  		v.AuxInt = off1 + off2
  9396  		v.Aux = sym
  9397  		v.AddArg(ptr)
  9398  		v.AddArg(val)
  9399  		v.AddArg(mem)
  9400  		return true
  9401  	}
  9402  	// match: (MOVBstore [off] {sym} (ADD ptr idx) val mem)
  9403  	// cond: off == 0 && sym == nil
  9404  	// result: (MOVBstoreidx ptr idx val mem)
  9405  	for {
  9406  		off := v.AuxInt
  9407  		sym := v.Aux
  9408  		_ = v.Args[2]
  9409  		v_0 := v.Args[0]
  9410  		if v_0.Op != OpARM64ADD {
  9411  			break
  9412  		}
  9413  		_ = v_0.Args[1]
  9414  		ptr := v_0.Args[0]
  9415  		idx := v_0.Args[1]
  9416  		val := v.Args[1]
  9417  		mem := v.Args[2]
  9418  		if !(off == 0 && sym == nil) {
  9419  			break
  9420  		}
  9421  		v.reset(OpARM64MOVBstoreidx)
  9422  		v.AddArg(ptr)
  9423  		v.AddArg(idx)
  9424  		v.AddArg(val)
  9425  		v.AddArg(mem)
  9426  		return true
  9427  	}
  9428  	// match: (MOVBstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
  9429  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
  9430  	// result: (MOVBstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
  9431  	for {
  9432  		off1 := v.AuxInt
  9433  		sym1 := v.Aux
  9434  		_ = v.Args[2]
  9435  		v_0 := v.Args[0]
  9436  		if v_0.Op != OpARM64MOVDaddr {
  9437  			break
  9438  		}
  9439  		off2 := v_0.AuxInt
  9440  		sym2 := v_0.Aux
  9441  		ptr := v_0.Args[0]
  9442  		val := v.Args[1]
  9443  		mem := v.Args[2]
  9444  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
  9445  			break
  9446  		}
  9447  		v.reset(OpARM64MOVBstore)
  9448  		v.AuxInt = off1 + off2
  9449  		v.Aux = mergeSym(sym1, sym2)
  9450  		v.AddArg(ptr)
  9451  		v.AddArg(val)
  9452  		v.AddArg(mem)
  9453  		return true
  9454  	}
  9455  	// match: (MOVBstore [off] {sym} ptr (MOVDconst [0]) mem)
  9456  	// cond:
  9457  	// result: (MOVBstorezero [off] {sym} ptr mem)
  9458  	for {
  9459  		off := v.AuxInt
  9460  		sym := v.Aux
  9461  		_ = v.Args[2]
  9462  		ptr := v.Args[0]
  9463  		v_1 := v.Args[1]
  9464  		if v_1.Op != OpARM64MOVDconst {
  9465  			break
  9466  		}
  9467  		if v_1.AuxInt != 0 {
  9468  			break
  9469  		}
  9470  		mem := v.Args[2]
  9471  		v.reset(OpARM64MOVBstorezero)
  9472  		v.AuxInt = off
  9473  		v.Aux = sym
  9474  		v.AddArg(ptr)
  9475  		v.AddArg(mem)
  9476  		return true
  9477  	}
  9478  	// match: (MOVBstore [off] {sym} ptr (MOVBreg x) mem)
  9479  	// cond:
  9480  	// result: (MOVBstore [off] {sym} ptr x mem)
  9481  	for {
  9482  		off := v.AuxInt
  9483  		sym := v.Aux
  9484  		_ = v.Args[2]
  9485  		ptr := v.Args[0]
  9486  		v_1 := v.Args[1]
  9487  		if v_1.Op != OpARM64MOVBreg {
  9488  			break
  9489  		}
  9490  		x := v_1.Args[0]
  9491  		mem := v.Args[2]
  9492  		v.reset(OpARM64MOVBstore)
  9493  		v.AuxInt = off
  9494  		v.Aux = sym
  9495  		v.AddArg(ptr)
  9496  		v.AddArg(x)
  9497  		v.AddArg(mem)
  9498  		return true
  9499  	}
  9500  	// match: (MOVBstore [off] {sym} ptr (MOVBUreg x) mem)
  9501  	// cond:
  9502  	// result: (MOVBstore [off] {sym} ptr x mem)
  9503  	for {
  9504  		off := v.AuxInt
  9505  		sym := v.Aux
  9506  		_ = v.Args[2]
  9507  		ptr := v.Args[0]
  9508  		v_1 := v.Args[1]
  9509  		if v_1.Op != OpARM64MOVBUreg {
  9510  			break
  9511  		}
  9512  		x := v_1.Args[0]
  9513  		mem := v.Args[2]
  9514  		v.reset(OpARM64MOVBstore)
  9515  		v.AuxInt = off
  9516  		v.Aux = sym
  9517  		v.AddArg(ptr)
  9518  		v.AddArg(x)
  9519  		v.AddArg(mem)
  9520  		return true
  9521  	}
  9522  	// match: (MOVBstore [off] {sym} ptr (MOVHreg x) mem)
  9523  	// cond:
  9524  	// result: (MOVBstore [off] {sym} ptr x mem)
  9525  	for {
  9526  		off := v.AuxInt
  9527  		sym := v.Aux
  9528  		_ = v.Args[2]
  9529  		ptr := v.Args[0]
  9530  		v_1 := v.Args[1]
  9531  		if v_1.Op != OpARM64MOVHreg {
  9532  			break
  9533  		}
  9534  		x := v_1.Args[0]
  9535  		mem := v.Args[2]
  9536  		v.reset(OpARM64MOVBstore)
  9537  		v.AuxInt = off
  9538  		v.Aux = sym
  9539  		v.AddArg(ptr)
  9540  		v.AddArg(x)
  9541  		v.AddArg(mem)
  9542  		return true
  9543  	}
  9544  	// match: (MOVBstore [off] {sym} ptr (MOVHUreg x) mem)
  9545  	// cond:
  9546  	// result: (MOVBstore [off] {sym} ptr x mem)
  9547  	for {
  9548  		off := v.AuxInt
  9549  		sym := v.Aux
  9550  		_ = v.Args[2]
  9551  		ptr := v.Args[0]
  9552  		v_1 := v.Args[1]
  9553  		if v_1.Op != OpARM64MOVHUreg {
  9554  			break
  9555  		}
  9556  		x := v_1.Args[0]
  9557  		mem := v.Args[2]
  9558  		v.reset(OpARM64MOVBstore)
  9559  		v.AuxInt = off
  9560  		v.Aux = sym
  9561  		v.AddArg(ptr)
  9562  		v.AddArg(x)
  9563  		v.AddArg(mem)
  9564  		return true
  9565  	}
  9566  	// match: (MOVBstore [off] {sym} ptr (MOVWreg x) mem)
  9567  	// cond:
  9568  	// result: (MOVBstore [off] {sym} ptr x mem)
  9569  	for {
  9570  		off := v.AuxInt
  9571  		sym := v.Aux
  9572  		_ = v.Args[2]
  9573  		ptr := v.Args[0]
  9574  		v_1 := v.Args[1]
  9575  		if v_1.Op != OpARM64MOVWreg {
  9576  			break
  9577  		}
  9578  		x := v_1.Args[0]
  9579  		mem := v.Args[2]
  9580  		v.reset(OpARM64MOVBstore)
  9581  		v.AuxInt = off
  9582  		v.Aux = sym
  9583  		v.AddArg(ptr)
  9584  		v.AddArg(x)
  9585  		v.AddArg(mem)
  9586  		return true
  9587  	}
  9588  	// match: (MOVBstore [off] {sym} ptr (MOVWUreg x) mem)
  9589  	// cond:
  9590  	// result: (MOVBstore [off] {sym} ptr x mem)
  9591  	for {
  9592  		off := v.AuxInt
  9593  		sym := v.Aux
  9594  		_ = v.Args[2]
  9595  		ptr := v.Args[0]
  9596  		v_1 := v.Args[1]
  9597  		if v_1.Op != OpARM64MOVWUreg {
  9598  			break
  9599  		}
  9600  		x := v_1.Args[0]
  9601  		mem := v.Args[2]
  9602  		v.reset(OpARM64MOVBstore)
  9603  		v.AuxInt = off
  9604  		v.Aux = sym
  9605  		v.AddArg(ptr)
  9606  		v.AddArg(x)
  9607  		v.AddArg(mem)
  9608  		return true
  9609  	}
  9610  	return false
  9611  }
  9612  func rewriteValueARM64_OpARM64MOVBstore_10(v *Value) bool {
  9613  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  9614  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  9615  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  9616  	for {
  9617  		i := v.AuxInt
  9618  		s := v.Aux
  9619  		_ = v.Args[2]
  9620  		ptr0 := v.Args[0]
  9621  		v_1 := v.Args[1]
  9622  		if v_1.Op != OpARM64SRLconst {
  9623  			break
  9624  		}
  9625  		if v_1.AuxInt != 8 {
  9626  			break
  9627  		}
  9628  		w := v_1.Args[0]
  9629  		x := v.Args[2]
  9630  		if x.Op != OpARM64MOVBstore {
  9631  			break
  9632  		}
  9633  		if x.AuxInt != i-1 {
  9634  			break
  9635  		}
  9636  		if x.Aux != s {
  9637  			break
  9638  		}
  9639  		_ = x.Args[2]
  9640  		ptr1 := x.Args[0]
  9641  		if w != x.Args[1] {
  9642  			break
  9643  		}
  9644  		mem := x.Args[2]
  9645  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  9646  			break
  9647  		}
  9648  		v.reset(OpARM64MOVHstore)
  9649  		v.AuxInt = i - 1
  9650  		v.Aux = s
  9651  		v.AddArg(ptr0)
  9652  		v.AddArg(w)
  9653  		v.AddArg(mem)
  9654  		return true
  9655  	}
  9656  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  9657  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9658  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  9659  	for {
  9660  		if v.AuxInt != 1 {
  9661  			break
  9662  		}
  9663  		s := v.Aux
  9664  		_ = v.Args[2]
  9665  		v_0 := v.Args[0]
  9666  		if v_0.Op != OpARM64ADD {
  9667  			break
  9668  		}
  9669  		_ = v_0.Args[1]
  9670  		ptr0 := v_0.Args[0]
  9671  		idx0 := v_0.Args[1]
  9672  		v_1 := v.Args[1]
  9673  		if v_1.Op != OpARM64SRLconst {
  9674  			break
  9675  		}
  9676  		if v_1.AuxInt != 8 {
  9677  			break
  9678  		}
  9679  		w := v_1.Args[0]
  9680  		x := v.Args[2]
  9681  		if x.Op != OpARM64MOVBstoreidx {
  9682  			break
  9683  		}
  9684  		_ = x.Args[3]
  9685  		ptr1 := x.Args[0]
  9686  		idx1 := x.Args[1]
  9687  		if w != x.Args[2] {
  9688  			break
  9689  		}
  9690  		mem := x.Args[3]
  9691  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9692  			break
  9693  		}
  9694  		v.reset(OpARM64MOVHstoreidx)
  9695  		v.AddArg(ptr1)
  9696  		v.AddArg(idx1)
  9697  		v.AddArg(w)
  9698  		v.AddArg(mem)
  9699  		return true
  9700  	}
  9701  	// match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  9702  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  9703  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  9704  	for {
  9705  		i := v.AuxInt
  9706  		s := v.Aux
  9707  		_ = v.Args[2]
  9708  		ptr0 := v.Args[0]
  9709  		v_1 := v.Args[1]
  9710  		if v_1.Op != OpARM64UBFX {
  9711  			break
  9712  		}
  9713  		if v_1.AuxInt != arm64BFAuxInt(8, 8) {
  9714  			break
  9715  		}
  9716  		w := v_1.Args[0]
  9717  		x := v.Args[2]
  9718  		if x.Op != OpARM64MOVBstore {
  9719  			break
  9720  		}
  9721  		if x.AuxInt != i-1 {
  9722  			break
  9723  		}
  9724  		if x.Aux != s {
  9725  			break
  9726  		}
  9727  		_ = x.Args[2]
  9728  		ptr1 := x.Args[0]
  9729  		if w != x.Args[1] {
  9730  			break
  9731  		}
  9732  		mem := x.Args[2]
  9733  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  9734  			break
  9735  		}
  9736  		v.reset(OpARM64MOVHstore)
  9737  		v.AuxInt = i - 1
  9738  		v.Aux = s
  9739  		v.AddArg(ptr0)
  9740  		v.AddArg(w)
  9741  		v.AddArg(mem)
  9742  		return true
  9743  	}
  9744  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 8)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  9745  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9746  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  9747  	for {
  9748  		if v.AuxInt != 1 {
  9749  			break
  9750  		}
  9751  		s := v.Aux
  9752  		_ = v.Args[2]
  9753  		v_0 := v.Args[0]
  9754  		if v_0.Op != OpARM64ADD {
  9755  			break
  9756  		}
  9757  		_ = v_0.Args[1]
  9758  		ptr0 := v_0.Args[0]
  9759  		idx0 := v_0.Args[1]
  9760  		v_1 := v.Args[1]
  9761  		if v_1.Op != OpARM64UBFX {
  9762  			break
  9763  		}
  9764  		if v_1.AuxInt != arm64BFAuxInt(8, 8) {
  9765  			break
  9766  		}
  9767  		w := v_1.Args[0]
  9768  		x := v.Args[2]
  9769  		if x.Op != OpARM64MOVBstoreidx {
  9770  			break
  9771  		}
  9772  		_ = x.Args[3]
  9773  		ptr1 := x.Args[0]
  9774  		idx1 := x.Args[1]
  9775  		if w != x.Args[2] {
  9776  			break
  9777  		}
  9778  		mem := x.Args[3]
  9779  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9780  			break
  9781  		}
  9782  		v.reset(OpARM64MOVHstoreidx)
  9783  		v.AddArg(ptr1)
  9784  		v.AddArg(idx1)
  9785  		v.AddArg(w)
  9786  		v.AddArg(mem)
  9787  		return true
  9788  	}
  9789  	// match: (MOVBstore [i] {s} ptr0 (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstore [i-1] {s} ptr1 w mem))
  9790  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  9791  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  9792  	for {
  9793  		i := v.AuxInt
  9794  		s := v.Aux
  9795  		_ = v.Args[2]
  9796  		ptr0 := v.Args[0]
  9797  		v_1 := v.Args[1]
  9798  		if v_1.Op != OpARM64UBFX {
  9799  			break
  9800  		}
  9801  		if v_1.AuxInt != arm64BFAuxInt(8, 24) {
  9802  			break
  9803  		}
  9804  		w := v_1.Args[0]
  9805  		x := v.Args[2]
  9806  		if x.Op != OpARM64MOVBstore {
  9807  			break
  9808  		}
  9809  		if x.AuxInt != i-1 {
  9810  			break
  9811  		}
  9812  		if x.Aux != s {
  9813  			break
  9814  		}
  9815  		_ = x.Args[2]
  9816  		ptr1 := x.Args[0]
  9817  		if w != x.Args[1] {
  9818  			break
  9819  		}
  9820  		mem := x.Args[2]
  9821  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  9822  			break
  9823  		}
  9824  		v.reset(OpARM64MOVHstore)
  9825  		v.AuxInt = i - 1
  9826  		v.Aux = s
  9827  		v.AddArg(ptr0)
  9828  		v.AddArg(w)
  9829  		v.AddArg(mem)
  9830  		return true
  9831  	}
  9832  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [arm64BFAuxInt(8, 24)] w) x:(MOVBstoreidx ptr1 idx1 w mem))
  9833  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9834  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  9835  	for {
  9836  		if v.AuxInt != 1 {
  9837  			break
  9838  		}
  9839  		s := v.Aux
  9840  		_ = v.Args[2]
  9841  		v_0 := v.Args[0]
  9842  		if v_0.Op != OpARM64ADD {
  9843  			break
  9844  		}
  9845  		_ = v_0.Args[1]
  9846  		ptr0 := v_0.Args[0]
  9847  		idx0 := v_0.Args[1]
  9848  		v_1 := v.Args[1]
  9849  		if v_1.Op != OpARM64UBFX {
  9850  			break
  9851  		}
  9852  		if v_1.AuxInt != arm64BFAuxInt(8, 24) {
  9853  			break
  9854  		}
  9855  		w := v_1.Args[0]
  9856  		x := v.Args[2]
  9857  		if x.Op != OpARM64MOVBstoreidx {
  9858  			break
  9859  		}
  9860  		_ = x.Args[3]
  9861  		ptr1 := x.Args[0]
  9862  		idx1 := x.Args[1]
  9863  		if w != x.Args[2] {
  9864  			break
  9865  		}
  9866  		mem := x.Args[3]
  9867  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9868  			break
  9869  		}
  9870  		v.reset(OpARM64MOVHstoreidx)
  9871  		v.AddArg(ptr1)
  9872  		v.AddArg(idx1)
  9873  		v.AddArg(w)
  9874  		v.AddArg(mem)
  9875  		return true
  9876  	}
  9877  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [8] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w mem))
  9878  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  9879  	// result: (MOVHstore [i-1] {s} ptr0 w mem)
  9880  	for {
  9881  		i := v.AuxInt
  9882  		s := v.Aux
  9883  		_ = v.Args[2]
  9884  		ptr0 := v.Args[0]
  9885  		v_1 := v.Args[1]
  9886  		if v_1.Op != OpARM64SRLconst {
  9887  			break
  9888  		}
  9889  		if v_1.AuxInt != 8 {
  9890  			break
  9891  		}
  9892  		v_1_0 := v_1.Args[0]
  9893  		if v_1_0.Op != OpARM64MOVDreg {
  9894  			break
  9895  		}
  9896  		w := v_1_0.Args[0]
  9897  		x := v.Args[2]
  9898  		if x.Op != OpARM64MOVBstore {
  9899  			break
  9900  		}
  9901  		if x.AuxInt != i-1 {
  9902  			break
  9903  		}
  9904  		if x.Aux != s {
  9905  			break
  9906  		}
  9907  		_ = x.Args[2]
  9908  		ptr1 := x.Args[0]
  9909  		if w != x.Args[1] {
  9910  			break
  9911  		}
  9912  		mem := x.Args[2]
  9913  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
  9914  			break
  9915  		}
  9916  		v.reset(OpARM64MOVHstore)
  9917  		v.AuxInt = i - 1
  9918  		v.Aux = s
  9919  		v.AddArg(ptr0)
  9920  		v.AddArg(w)
  9921  		v.AddArg(mem)
  9922  		return true
  9923  	}
  9924  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [8] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w mem))
  9925  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
  9926  	// result: (MOVHstoreidx ptr1 idx1 w mem)
  9927  	for {
  9928  		if v.AuxInt != 1 {
  9929  			break
  9930  		}
  9931  		s := v.Aux
  9932  		_ = v.Args[2]
  9933  		v_0 := v.Args[0]
  9934  		if v_0.Op != OpARM64ADD {
  9935  			break
  9936  		}
  9937  		_ = v_0.Args[1]
  9938  		ptr0 := v_0.Args[0]
  9939  		idx0 := v_0.Args[1]
  9940  		v_1 := v.Args[1]
  9941  		if v_1.Op != OpARM64SRLconst {
  9942  			break
  9943  		}
  9944  		if v_1.AuxInt != 8 {
  9945  			break
  9946  		}
  9947  		v_1_0 := v_1.Args[0]
  9948  		if v_1_0.Op != OpARM64MOVDreg {
  9949  			break
  9950  		}
  9951  		w := v_1_0.Args[0]
  9952  		x := v.Args[2]
  9953  		if x.Op != OpARM64MOVBstoreidx {
  9954  			break
  9955  		}
  9956  		_ = x.Args[3]
  9957  		ptr1 := x.Args[0]
  9958  		idx1 := x.Args[1]
  9959  		if w != x.Args[2] {
  9960  			break
  9961  		}
  9962  		mem := x.Args[3]
  9963  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
  9964  			break
  9965  		}
  9966  		v.reset(OpARM64MOVHstoreidx)
  9967  		v.AddArg(ptr1)
  9968  		v.AddArg(idx1)
  9969  		v.AddArg(w)
  9970  		v.AddArg(mem)
  9971  		return true
  9972  	}
  9973  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] w) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] w) mem))
  9974  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
  9975  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
  9976  	for {
  9977  		i := v.AuxInt
  9978  		s := v.Aux
  9979  		_ = v.Args[2]
  9980  		ptr0 := v.Args[0]
  9981  		v_1 := v.Args[1]
  9982  		if v_1.Op != OpARM64SRLconst {
  9983  			break
  9984  		}
  9985  		j := v_1.AuxInt
  9986  		w := v_1.Args[0]
  9987  		x := v.Args[2]
  9988  		if x.Op != OpARM64MOVBstore {
  9989  			break
  9990  		}
  9991  		if x.AuxInt != i-1 {
  9992  			break
  9993  		}
  9994  		if x.Aux != s {
  9995  			break
  9996  		}
  9997  		_ = x.Args[2]
  9998  		ptr1 := x.Args[0]
  9999  		w0 := x.Args[1]
 10000  		if w0.Op != OpARM64SRLconst {
 10001  			break
 10002  		}
 10003  		if w0.AuxInt != j-8 {
 10004  			break
 10005  		}
 10006  		if w != w0.Args[0] {
 10007  			break
 10008  		}
 10009  		mem := x.Args[2]
 10010  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 10011  			break
 10012  		}
 10013  		v.reset(OpARM64MOVHstore)
 10014  		v.AuxInt = i - 1
 10015  		v.Aux = s
 10016  		v.AddArg(ptr0)
 10017  		v.AddArg(w0)
 10018  		v.AddArg(mem)
 10019  		return true
 10020  	}
 10021  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] w) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] w) mem))
 10022  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 10023  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
 10024  	for {
 10025  		if v.AuxInt != 1 {
 10026  			break
 10027  		}
 10028  		s := v.Aux
 10029  		_ = v.Args[2]
 10030  		v_0 := v.Args[0]
 10031  		if v_0.Op != OpARM64ADD {
 10032  			break
 10033  		}
 10034  		_ = v_0.Args[1]
 10035  		ptr0 := v_0.Args[0]
 10036  		idx0 := v_0.Args[1]
 10037  		v_1 := v.Args[1]
 10038  		if v_1.Op != OpARM64SRLconst {
 10039  			break
 10040  		}
 10041  		j := v_1.AuxInt
 10042  		w := v_1.Args[0]
 10043  		x := v.Args[2]
 10044  		if x.Op != OpARM64MOVBstoreidx {
 10045  			break
 10046  		}
 10047  		_ = x.Args[3]
 10048  		ptr1 := x.Args[0]
 10049  		idx1 := x.Args[1]
 10050  		w0 := x.Args[2]
 10051  		if w0.Op != OpARM64SRLconst {
 10052  			break
 10053  		}
 10054  		if w0.AuxInt != j-8 {
 10055  			break
 10056  		}
 10057  		if w != w0.Args[0] {
 10058  			break
 10059  		}
 10060  		mem := x.Args[3]
 10061  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 10062  			break
 10063  		}
 10064  		v.reset(OpARM64MOVHstoreidx)
 10065  		v.AddArg(ptr1)
 10066  		v.AddArg(idx1)
 10067  		v.AddArg(w0)
 10068  		v.AddArg(mem)
 10069  		return true
 10070  	}
 10071  	return false
 10072  }
 10073  func rewriteValueARM64_OpARM64MOVBstore_20(v *Value) bool {
 10074  	b := v.Block
 10075  	_ = b
 10076  	// match: (MOVBstore [i] {s} ptr0 (UBFX [bfc] w) x:(MOVBstore [i-1] {s} ptr1 w0:(UBFX [bfc2] w) mem))
 10077  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x)
 10078  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
 10079  	for {
 10080  		i := v.AuxInt
 10081  		s := v.Aux
 10082  		_ = v.Args[2]
 10083  		ptr0 := v.Args[0]
 10084  		v_1 := v.Args[1]
 10085  		if v_1.Op != OpARM64UBFX {
 10086  			break
 10087  		}
 10088  		bfc := v_1.AuxInt
 10089  		w := v_1.Args[0]
 10090  		x := v.Args[2]
 10091  		if x.Op != OpARM64MOVBstore {
 10092  			break
 10093  		}
 10094  		if x.AuxInt != i-1 {
 10095  			break
 10096  		}
 10097  		if x.Aux != s {
 10098  			break
 10099  		}
 10100  		_ = x.Args[2]
 10101  		ptr1 := x.Args[0]
 10102  		w0 := x.Args[1]
 10103  		if w0.Op != OpARM64UBFX {
 10104  			break
 10105  		}
 10106  		bfc2 := w0.AuxInt
 10107  		if w != w0.Args[0] {
 10108  			break
 10109  		}
 10110  		mem := x.Args[2]
 10111  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) {
 10112  			break
 10113  		}
 10114  		v.reset(OpARM64MOVHstore)
 10115  		v.AuxInt = i - 1
 10116  		v.Aux = s
 10117  		v.AddArg(ptr0)
 10118  		v.AddArg(w0)
 10119  		v.AddArg(mem)
 10120  		return true
 10121  	}
 10122  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (UBFX [bfc] w) x:(MOVBstoreidx ptr1 idx1 w0:(UBFX [bfc2] w) mem))
 10123  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32 - getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32 - getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc) - 8 && clobber(x)
 10124  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
 10125  	for {
 10126  		if v.AuxInt != 1 {
 10127  			break
 10128  		}
 10129  		s := v.Aux
 10130  		_ = v.Args[2]
 10131  		v_0 := v.Args[0]
 10132  		if v_0.Op != OpARM64ADD {
 10133  			break
 10134  		}
 10135  		_ = v_0.Args[1]
 10136  		ptr0 := v_0.Args[0]
 10137  		idx0 := v_0.Args[1]
 10138  		v_1 := v.Args[1]
 10139  		if v_1.Op != OpARM64UBFX {
 10140  			break
 10141  		}
 10142  		bfc := v_1.AuxInt
 10143  		w := v_1.Args[0]
 10144  		x := v.Args[2]
 10145  		if x.Op != OpARM64MOVBstoreidx {
 10146  			break
 10147  		}
 10148  		_ = x.Args[3]
 10149  		ptr1 := x.Args[0]
 10150  		idx1 := x.Args[1]
 10151  		w0 := x.Args[2]
 10152  		if w0.Op != OpARM64UBFX {
 10153  			break
 10154  		}
 10155  		bfc2 := w0.AuxInt
 10156  		if w != w0.Args[0] {
 10157  			break
 10158  		}
 10159  		mem := x.Args[3]
 10160  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && getARM64BFwidth(bfc) == 32-getARM64BFlsb(bfc) && getARM64BFwidth(bfc2) == 32-getARM64BFlsb(bfc2) && getARM64BFlsb(bfc2) == getARM64BFlsb(bfc)-8 && clobber(x)) {
 10161  			break
 10162  		}
 10163  		v.reset(OpARM64MOVHstoreidx)
 10164  		v.AddArg(ptr1)
 10165  		v.AddArg(idx1)
 10166  		v.AddArg(w0)
 10167  		v.AddArg(mem)
 10168  		return true
 10169  	}
 10170  	// match: (MOVBstore [i] {s} ptr0 (SRLconst [j] (MOVDreg w)) x:(MOVBstore [i-1] {s} ptr1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
 10171  	// cond: x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)
 10172  	// result: (MOVHstore [i-1] {s} ptr0 w0 mem)
 10173  	for {
 10174  		i := v.AuxInt
 10175  		s := v.Aux
 10176  		_ = v.Args[2]
 10177  		ptr0 := v.Args[0]
 10178  		v_1 := v.Args[1]
 10179  		if v_1.Op != OpARM64SRLconst {
 10180  			break
 10181  		}
 10182  		j := v_1.AuxInt
 10183  		v_1_0 := v_1.Args[0]
 10184  		if v_1_0.Op != OpARM64MOVDreg {
 10185  			break
 10186  		}
 10187  		w := v_1_0.Args[0]
 10188  		x := v.Args[2]
 10189  		if x.Op != OpARM64MOVBstore {
 10190  			break
 10191  		}
 10192  		if x.AuxInt != i-1 {
 10193  			break
 10194  		}
 10195  		if x.Aux != s {
 10196  			break
 10197  		}
 10198  		_ = x.Args[2]
 10199  		ptr1 := x.Args[0]
 10200  		w0 := x.Args[1]
 10201  		if w0.Op != OpARM64SRLconst {
 10202  			break
 10203  		}
 10204  		if w0.AuxInt != j-8 {
 10205  			break
 10206  		}
 10207  		w0_0 := w0.Args[0]
 10208  		if w0_0.Op != OpARM64MOVDreg {
 10209  			break
 10210  		}
 10211  		if w != w0_0.Args[0] {
 10212  			break
 10213  		}
 10214  		mem := x.Args[2]
 10215  		if !(x.Uses == 1 && isSamePtr(ptr0, ptr1) && clobber(x)) {
 10216  			break
 10217  		}
 10218  		v.reset(OpARM64MOVHstore)
 10219  		v.AuxInt = i - 1
 10220  		v.Aux = s
 10221  		v.AddArg(ptr0)
 10222  		v.AddArg(w0)
 10223  		v.AddArg(mem)
 10224  		return true
 10225  	}
 10226  	// match: (MOVBstore [1] {s} (ADD ptr0 idx0) (SRLconst [j] (MOVDreg w)) x:(MOVBstoreidx ptr1 idx1 w0:(SRLconst [j-8] (MOVDreg w)) mem))
 10227  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 10228  	// result: (MOVHstoreidx ptr1 idx1 w0 mem)
 10229  	for {
 10230  		if v.AuxInt != 1 {
 10231  			break
 10232  		}
 10233  		s := v.Aux
 10234  		_ = v.Args[2]
 10235  		v_0 := v.Args[0]
 10236  		if v_0.Op != OpARM64ADD {
 10237  			break
 10238  		}
 10239  		_ = v_0.Args[1]
 10240  		ptr0 := v_0.Args[0]
 10241  		idx0 := v_0.Args[1]
 10242  		v_1 := v.Args[1]
 10243  		if v_1.Op != OpARM64SRLconst {
 10244  			break
 10245  		}
 10246  		j := v_1.AuxInt
 10247  		v_1_0 := v_1.Args[0]
 10248  		if v_1_0.Op != OpARM64MOVDreg {
 10249  			break
 10250  		}
 10251  		w := v_1_0.Args[0]
 10252  		x := v.Args[2]
 10253  		if x.Op != OpARM64MOVBstoreidx {
 10254  			break
 10255  		}
 10256  		_ = x.Args[3]
 10257  		ptr1 := x.Args[0]
 10258  		idx1 := x.Args[1]
 10259  		w0 := x.Args[2]
 10260  		if w0.Op != OpARM64SRLconst {
 10261  			break
 10262  		}
 10263  		if w0.AuxInt != j-8 {
 10264  			break
 10265  		}
 10266  		w0_0 := w0.Args[0]
 10267  		if w0_0.Op != OpARM64MOVDreg {
 10268  			break
 10269  		}
 10270  		if w != w0_0.Args[0] {
 10271  			break
 10272  		}
 10273  		mem := x.Args[3]
 10274  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 10275  			break
 10276  		}
 10277  		v.reset(OpARM64MOVHstoreidx)
 10278  		v.AddArg(ptr1)
 10279  		v.AddArg(idx1)
 10280  		v.AddArg(w0)
 10281  		v.AddArg(mem)
 10282  		return true
 10283  	}
 10284  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) x3:(MOVBstore [i-4] {s} ptr (SRLconst [32] w) x4:(MOVBstore [i-5] {s} ptr (SRLconst [40] w) x5:(MOVBstore [i-6] {s} ptr (SRLconst [48] w) x6:(MOVBstore [i-7] {s} ptr (SRLconst [56] w) mem))))))))
 10285  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)
 10286  	// result: (MOVDstore [i-7] {s} ptr (REV <w.Type> w) mem)
 10287  	for {
 10288  		i := v.AuxInt
 10289  		s := v.Aux
 10290  		_ = v.Args[2]
 10291  		ptr := v.Args[0]
 10292  		w := v.Args[1]
 10293  		x0 := v.Args[2]
 10294  		if x0.Op != OpARM64MOVBstore {
 10295  			break
 10296  		}
 10297  		if x0.AuxInt != i-1 {
 10298  			break
 10299  		}
 10300  		if x0.Aux != s {
 10301  			break
 10302  		}
 10303  		_ = x0.Args[2]
 10304  		if ptr != x0.Args[0] {
 10305  			break
 10306  		}
 10307  		x0_1 := x0.Args[1]
 10308  		if x0_1.Op != OpARM64SRLconst {
 10309  			break
 10310  		}
 10311  		if x0_1.AuxInt != 8 {
 10312  			break
 10313  		}
 10314  		if w != x0_1.Args[0] {
 10315  			break
 10316  		}
 10317  		x1 := x0.Args[2]
 10318  		if x1.Op != OpARM64MOVBstore {
 10319  			break
 10320  		}
 10321  		if x1.AuxInt != i-2 {
 10322  			break
 10323  		}
 10324  		if x1.Aux != s {
 10325  			break
 10326  		}
 10327  		_ = x1.Args[2]
 10328  		if ptr != x1.Args[0] {
 10329  			break
 10330  		}
 10331  		x1_1 := x1.Args[1]
 10332  		if x1_1.Op != OpARM64SRLconst {
 10333  			break
 10334  		}
 10335  		if x1_1.AuxInt != 16 {
 10336  			break
 10337  		}
 10338  		if w != x1_1.Args[0] {
 10339  			break
 10340  		}
 10341  		x2 := x1.Args[2]
 10342  		if x2.Op != OpARM64MOVBstore {
 10343  			break
 10344  		}
 10345  		if x2.AuxInt != i-3 {
 10346  			break
 10347  		}
 10348  		if x2.Aux != s {
 10349  			break
 10350  		}
 10351  		_ = x2.Args[2]
 10352  		if ptr != x2.Args[0] {
 10353  			break
 10354  		}
 10355  		x2_1 := x2.Args[1]
 10356  		if x2_1.Op != OpARM64SRLconst {
 10357  			break
 10358  		}
 10359  		if x2_1.AuxInt != 24 {
 10360  			break
 10361  		}
 10362  		if w != x2_1.Args[0] {
 10363  			break
 10364  		}
 10365  		x3 := x2.Args[2]
 10366  		if x3.Op != OpARM64MOVBstore {
 10367  			break
 10368  		}
 10369  		if x3.AuxInt != i-4 {
 10370  			break
 10371  		}
 10372  		if x3.Aux != s {
 10373  			break
 10374  		}
 10375  		_ = x3.Args[2]
 10376  		if ptr != x3.Args[0] {
 10377  			break
 10378  		}
 10379  		x3_1 := x3.Args[1]
 10380  		if x3_1.Op != OpARM64SRLconst {
 10381  			break
 10382  		}
 10383  		if x3_1.AuxInt != 32 {
 10384  			break
 10385  		}
 10386  		if w != x3_1.Args[0] {
 10387  			break
 10388  		}
 10389  		x4 := x3.Args[2]
 10390  		if x4.Op != OpARM64MOVBstore {
 10391  			break
 10392  		}
 10393  		if x4.AuxInt != i-5 {
 10394  			break
 10395  		}
 10396  		if x4.Aux != s {
 10397  			break
 10398  		}
 10399  		_ = x4.Args[2]
 10400  		if ptr != x4.Args[0] {
 10401  			break
 10402  		}
 10403  		x4_1 := x4.Args[1]
 10404  		if x4_1.Op != OpARM64SRLconst {
 10405  			break
 10406  		}
 10407  		if x4_1.AuxInt != 40 {
 10408  			break
 10409  		}
 10410  		if w != x4_1.Args[0] {
 10411  			break
 10412  		}
 10413  		x5 := x4.Args[2]
 10414  		if x5.Op != OpARM64MOVBstore {
 10415  			break
 10416  		}
 10417  		if x5.AuxInt != i-6 {
 10418  			break
 10419  		}
 10420  		if x5.Aux != s {
 10421  			break
 10422  		}
 10423  		_ = x5.Args[2]
 10424  		if ptr != x5.Args[0] {
 10425  			break
 10426  		}
 10427  		x5_1 := x5.Args[1]
 10428  		if x5_1.Op != OpARM64SRLconst {
 10429  			break
 10430  		}
 10431  		if x5_1.AuxInt != 48 {
 10432  			break
 10433  		}
 10434  		if w != x5_1.Args[0] {
 10435  			break
 10436  		}
 10437  		x6 := x5.Args[2]
 10438  		if x6.Op != OpARM64MOVBstore {
 10439  			break
 10440  		}
 10441  		if x6.AuxInt != i-7 {
 10442  			break
 10443  		}
 10444  		if x6.Aux != s {
 10445  			break
 10446  		}
 10447  		_ = x6.Args[2]
 10448  		if ptr != x6.Args[0] {
 10449  			break
 10450  		}
 10451  		x6_1 := x6.Args[1]
 10452  		if x6_1.Op != OpARM64SRLconst {
 10453  			break
 10454  		}
 10455  		if x6_1.AuxInt != 56 {
 10456  			break
 10457  		}
 10458  		if w != x6_1.Args[0] {
 10459  			break
 10460  		}
 10461  		mem := x6.Args[2]
 10462  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) {
 10463  			break
 10464  		}
 10465  		v.reset(OpARM64MOVDstore)
 10466  		v.AuxInt = i - 7
 10467  		v.Aux = s
 10468  		v.AddArg(ptr)
 10469  		v0 := b.NewValue0(x6.Pos, OpARM64REV, w.Type)
 10470  		v0.AddArg(w)
 10471  		v.AddArg(v0)
 10472  		v.AddArg(mem)
 10473  		return true
 10474  	}
 10475  	// match: (MOVBstore [7] {s} p w x0:(MOVBstore [6] {s} p (SRLconst [8] w) x1:(MOVBstore [5] {s} p (SRLconst [16] w) x2:(MOVBstore [4] {s} p (SRLconst [24] w) x3:(MOVBstore [3] {s} p (SRLconst [32] w) x4:(MOVBstore [2] {s} p (SRLconst [40] w) x5:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [48] w) x6:(MOVBstoreidx ptr0 idx0 (SRLconst [56] w) mem))))))))
 10476  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)
 10477  	// result: (MOVDstoreidx ptr0 idx0 (REV <w.Type> w) mem)
 10478  	for {
 10479  		if v.AuxInt != 7 {
 10480  			break
 10481  		}
 10482  		s := v.Aux
 10483  		_ = v.Args[2]
 10484  		p := v.Args[0]
 10485  		w := v.Args[1]
 10486  		x0 := v.Args[2]
 10487  		if x0.Op != OpARM64MOVBstore {
 10488  			break
 10489  		}
 10490  		if x0.AuxInt != 6 {
 10491  			break
 10492  		}
 10493  		if x0.Aux != s {
 10494  			break
 10495  		}
 10496  		_ = x0.Args[2]
 10497  		if p != x0.Args[0] {
 10498  			break
 10499  		}
 10500  		x0_1 := x0.Args[1]
 10501  		if x0_1.Op != OpARM64SRLconst {
 10502  			break
 10503  		}
 10504  		if x0_1.AuxInt != 8 {
 10505  			break
 10506  		}
 10507  		if w != x0_1.Args[0] {
 10508  			break
 10509  		}
 10510  		x1 := x0.Args[2]
 10511  		if x1.Op != OpARM64MOVBstore {
 10512  			break
 10513  		}
 10514  		if x1.AuxInt != 5 {
 10515  			break
 10516  		}
 10517  		if x1.Aux != s {
 10518  			break
 10519  		}
 10520  		_ = x1.Args[2]
 10521  		if p != x1.Args[0] {
 10522  			break
 10523  		}
 10524  		x1_1 := x1.Args[1]
 10525  		if x1_1.Op != OpARM64SRLconst {
 10526  			break
 10527  		}
 10528  		if x1_1.AuxInt != 16 {
 10529  			break
 10530  		}
 10531  		if w != x1_1.Args[0] {
 10532  			break
 10533  		}
 10534  		x2 := x1.Args[2]
 10535  		if x2.Op != OpARM64MOVBstore {
 10536  			break
 10537  		}
 10538  		if x2.AuxInt != 4 {
 10539  			break
 10540  		}
 10541  		if x2.Aux != s {
 10542  			break
 10543  		}
 10544  		_ = x2.Args[2]
 10545  		if p != x2.Args[0] {
 10546  			break
 10547  		}
 10548  		x2_1 := x2.Args[1]
 10549  		if x2_1.Op != OpARM64SRLconst {
 10550  			break
 10551  		}
 10552  		if x2_1.AuxInt != 24 {
 10553  			break
 10554  		}
 10555  		if w != x2_1.Args[0] {
 10556  			break
 10557  		}
 10558  		x3 := x2.Args[2]
 10559  		if x3.Op != OpARM64MOVBstore {
 10560  			break
 10561  		}
 10562  		if x3.AuxInt != 3 {
 10563  			break
 10564  		}
 10565  		if x3.Aux != s {
 10566  			break
 10567  		}
 10568  		_ = x3.Args[2]
 10569  		if p != x3.Args[0] {
 10570  			break
 10571  		}
 10572  		x3_1 := x3.Args[1]
 10573  		if x3_1.Op != OpARM64SRLconst {
 10574  			break
 10575  		}
 10576  		if x3_1.AuxInt != 32 {
 10577  			break
 10578  		}
 10579  		if w != x3_1.Args[0] {
 10580  			break
 10581  		}
 10582  		x4 := x3.Args[2]
 10583  		if x4.Op != OpARM64MOVBstore {
 10584  			break
 10585  		}
 10586  		if x4.AuxInt != 2 {
 10587  			break
 10588  		}
 10589  		if x4.Aux != s {
 10590  			break
 10591  		}
 10592  		_ = x4.Args[2]
 10593  		if p != x4.Args[0] {
 10594  			break
 10595  		}
 10596  		x4_1 := x4.Args[1]
 10597  		if x4_1.Op != OpARM64SRLconst {
 10598  			break
 10599  		}
 10600  		if x4_1.AuxInt != 40 {
 10601  			break
 10602  		}
 10603  		if w != x4_1.Args[0] {
 10604  			break
 10605  		}
 10606  		x5 := x4.Args[2]
 10607  		if x5.Op != OpARM64MOVBstore {
 10608  			break
 10609  		}
 10610  		if x5.AuxInt != 1 {
 10611  			break
 10612  		}
 10613  		if x5.Aux != s {
 10614  			break
 10615  		}
 10616  		_ = x5.Args[2]
 10617  		p1 := x5.Args[0]
 10618  		if p1.Op != OpARM64ADD {
 10619  			break
 10620  		}
 10621  		_ = p1.Args[1]
 10622  		ptr1 := p1.Args[0]
 10623  		idx1 := p1.Args[1]
 10624  		x5_1 := x5.Args[1]
 10625  		if x5_1.Op != OpARM64SRLconst {
 10626  			break
 10627  		}
 10628  		if x5_1.AuxInt != 48 {
 10629  			break
 10630  		}
 10631  		if w != x5_1.Args[0] {
 10632  			break
 10633  		}
 10634  		x6 := x5.Args[2]
 10635  		if x6.Op != OpARM64MOVBstoreidx {
 10636  			break
 10637  		}
 10638  		_ = x6.Args[3]
 10639  		ptr0 := x6.Args[0]
 10640  		idx0 := x6.Args[1]
 10641  		x6_2 := x6.Args[2]
 10642  		if x6_2.Op != OpARM64SRLconst {
 10643  			break
 10644  		}
 10645  		if x6_2.AuxInt != 56 {
 10646  			break
 10647  		}
 10648  		if w != x6_2.Args[0] {
 10649  			break
 10650  		}
 10651  		mem := x6.Args[3]
 10652  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && x3.Uses == 1 && x4.Uses == 1 && x5.Uses == 1 && x6.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2) && clobber(x3) && clobber(x4) && clobber(x5) && clobber(x6)) {
 10653  			break
 10654  		}
 10655  		v.reset(OpARM64MOVDstoreidx)
 10656  		v.AddArg(ptr0)
 10657  		v.AddArg(idx0)
 10658  		v0 := b.NewValue0(x5.Pos, OpARM64REV, w.Type)
 10659  		v0.AddArg(w)
 10660  		v.AddArg(v0)
 10661  		v.AddArg(mem)
 10662  		return true
 10663  	}
 10664  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [i-2] {s} ptr (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstore [i-3] {s} ptr (UBFX [arm64BFAuxInt(24, 8)] w) mem))))
 10665  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
 10666  	// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
 10667  	for {
 10668  		i := v.AuxInt
 10669  		s := v.Aux
 10670  		_ = v.Args[2]
 10671  		ptr := v.Args[0]
 10672  		w := v.Args[1]
 10673  		x0 := v.Args[2]
 10674  		if x0.Op != OpARM64MOVBstore {
 10675  			break
 10676  		}
 10677  		if x0.AuxInt != i-1 {
 10678  			break
 10679  		}
 10680  		if x0.Aux != s {
 10681  			break
 10682  		}
 10683  		_ = x0.Args[2]
 10684  		if ptr != x0.Args[0] {
 10685  			break
 10686  		}
 10687  		x0_1 := x0.Args[1]
 10688  		if x0_1.Op != OpARM64UBFX {
 10689  			break
 10690  		}
 10691  		if x0_1.AuxInt != arm64BFAuxInt(8, 24) {
 10692  			break
 10693  		}
 10694  		if w != x0_1.Args[0] {
 10695  			break
 10696  		}
 10697  		x1 := x0.Args[2]
 10698  		if x1.Op != OpARM64MOVBstore {
 10699  			break
 10700  		}
 10701  		if x1.AuxInt != i-2 {
 10702  			break
 10703  		}
 10704  		if x1.Aux != s {
 10705  			break
 10706  		}
 10707  		_ = x1.Args[2]
 10708  		if ptr != x1.Args[0] {
 10709  			break
 10710  		}
 10711  		x1_1 := x1.Args[1]
 10712  		if x1_1.Op != OpARM64UBFX {
 10713  			break
 10714  		}
 10715  		if x1_1.AuxInt != arm64BFAuxInt(16, 16) {
 10716  			break
 10717  		}
 10718  		if w != x1_1.Args[0] {
 10719  			break
 10720  		}
 10721  		x2 := x1.Args[2]
 10722  		if x2.Op != OpARM64MOVBstore {
 10723  			break
 10724  		}
 10725  		if x2.AuxInt != i-3 {
 10726  			break
 10727  		}
 10728  		if x2.Aux != s {
 10729  			break
 10730  		}
 10731  		_ = x2.Args[2]
 10732  		if ptr != x2.Args[0] {
 10733  			break
 10734  		}
 10735  		x2_1 := x2.Args[1]
 10736  		if x2_1.Op != OpARM64UBFX {
 10737  			break
 10738  		}
 10739  		if x2_1.AuxInt != arm64BFAuxInt(24, 8) {
 10740  			break
 10741  		}
 10742  		if w != x2_1.Args[0] {
 10743  			break
 10744  		}
 10745  		mem := x2.Args[2]
 10746  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
 10747  			break
 10748  		}
 10749  		v.reset(OpARM64MOVWstore)
 10750  		v.AuxInt = i - 3
 10751  		v.Aux = s
 10752  		v.AddArg(ptr)
 10753  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
 10754  		v0.AddArg(w)
 10755  		v.AddArg(v0)
 10756  		v.AddArg(mem)
 10757  		return true
 10758  	}
 10759  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(24, 8)] w) mem))))
 10760  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)
 10761  	// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
 10762  	for {
 10763  		if v.AuxInt != 3 {
 10764  			break
 10765  		}
 10766  		s := v.Aux
 10767  		_ = v.Args[2]
 10768  		p := v.Args[0]
 10769  		w := v.Args[1]
 10770  		x0 := v.Args[2]
 10771  		if x0.Op != OpARM64MOVBstore {
 10772  			break
 10773  		}
 10774  		if x0.AuxInt != 2 {
 10775  			break
 10776  		}
 10777  		if x0.Aux != s {
 10778  			break
 10779  		}
 10780  		_ = x0.Args[2]
 10781  		if p != x0.Args[0] {
 10782  			break
 10783  		}
 10784  		x0_1 := x0.Args[1]
 10785  		if x0_1.Op != OpARM64UBFX {
 10786  			break
 10787  		}
 10788  		if x0_1.AuxInt != arm64BFAuxInt(8, 24) {
 10789  			break
 10790  		}
 10791  		if w != x0_1.Args[0] {
 10792  			break
 10793  		}
 10794  		x1 := x0.Args[2]
 10795  		if x1.Op != OpARM64MOVBstore {
 10796  			break
 10797  		}
 10798  		if x1.AuxInt != 1 {
 10799  			break
 10800  		}
 10801  		if x1.Aux != s {
 10802  			break
 10803  		}
 10804  		_ = x1.Args[2]
 10805  		p1 := x1.Args[0]
 10806  		if p1.Op != OpARM64ADD {
 10807  			break
 10808  		}
 10809  		_ = p1.Args[1]
 10810  		ptr1 := p1.Args[0]
 10811  		idx1 := p1.Args[1]
 10812  		x1_1 := x1.Args[1]
 10813  		if x1_1.Op != OpARM64UBFX {
 10814  			break
 10815  		}
 10816  		if x1_1.AuxInt != arm64BFAuxInt(16, 16) {
 10817  			break
 10818  		}
 10819  		if w != x1_1.Args[0] {
 10820  			break
 10821  		}
 10822  		x2 := x1.Args[2]
 10823  		if x2.Op != OpARM64MOVBstoreidx {
 10824  			break
 10825  		}
 10826  		_ = x2.Args[3]
 10827  		ptr0 := x2.Args[0]
 10828  		idx0 := x2.Args[1]
 10829  		x2_2 := x2.Args[2]
 10830  		if x2_2.Op != OpARM64UBFX {
 10831  			break
 10832  		}
 10833  		if x2_2.AuxInt != arm64BFAuxInt(24, 8) {
 10834  			break
 10835  		}
 10836  		if w != x2_2.Args[0] {
 10837  			break
 10838  		}
 10839  		mem := x2.Args[3]
 10840  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
 10841  			break
 10842  		}
 10843  		v.reset(OpARM64MOVWstoreidx)
 10844  		v.AddArg(ptr0)
 10845  		v.AddArg(idx0)
 10846  		v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
 10847  		v0.AddArg(w)
 10848  		v.AddArg(v0)
 10849  		v.AddArg(mem)
 10850  		return true
 10851  	}
 10852  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] (MOVDreg w)) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] (MOVDreg w)) mem))))
 10853  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
 10854  	// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
 10855  	for {
 10856  		i := v.AuxInt
 10857  		s := v.Aux
 10858  		_ = v.Args[2]
 10859  		ptr := v.Args[0]
 10860  		w := v.Args[1]
 10861  		x0 := v.Args[2]
 10862  		if x0.Op != OpARM64MOVBstore {
 10863  			break
 10864  		}
 10865  		if x0.AuxInt != i-1 {
 10866  			break
 10867  		}
 10868  		if x0.Aux != s {
 10869  			break
 10870  		}
 10871  		_ = x0.Args[2]
 10872  		if ptr != x0.Args[0] {
 10873  			break
 10874  		}
 10875  		x0_1 := x0.Args[1]
 10876  		if x0_1.Op != OpARM64SRLconst {
 10877  			break
 10878  		}
 10879  		if x0_1.AuxInt != 8 {
 10880  			break
 10881  		}
 10882  		x0_1_0 := x0_1.Args[0]
 10883  		if x0_1_0.Op != OpARM64MOVDreg {
 10884  			break
 10885  		}
 10886  		if w != x0_1_0.Args[0] {
 10887  			break
 10888  		}
 10889  		x1 := x0.Args[2]
 10890  		if x1.Op != OpARM64MOVBstore {
 10891  			break
 10892  		}
 10893  		if x1.AuxInt != i-2 {
 10894  			break
 10895  		}
 10896  		if x1.Aux != s {
 10897  			break
 10898  		}
 10899  		_ = x1.Args[2]
 10900  		if ptr != x1.Args[0] {
 10901  			break
 10902  		}
 10903  		x1_1 := x1.Args[1]
 10904  		if x1_1.Op != OpARM64SRLconst {
 10905  			break
 10906  		}
 10907  		if x1_1.AuxInt != 16 {
 10908  			break
 10909  		}
 10910  		x1_1_0 := x1_1.Args[0]
 10911  		if x1_1_0.Op != OpARM64MOVDreg {
 10912  			break
 10913  		}
 10914  		if w != x1_1_0.Args[0] {
 10915  			break
 10916  		}
 10917  		x2 := x1.Args[2]
 10918  		if x2.Op != OpARM64MOVBstore {
 10919  			break
 10920  		}
 10921  		if x2.AuxInt != i-3 {
 10922  			break
 10923  		}
 10924  		if x2.Aux != s {
 10925  			break
 10926  		}
 10927  		_ = x2.Args[2]
 10928  		if ptr != x2.Args[0] {
 10929  			break
 10930  		}
 10931  		x2_1 := x2.Args[1]
 10932  		if x2_1.Op != OpARM64SRLconst {
 10933  			break
 10934  		}
 10935  		if x2_1.AuxInt != 24 {
 10936  			break
 10937  		}
 10938  		x2_1_0 := x2_1.Args[0]
 10939  		if x2_1_0.Op != OpARM64MOVDreg {
 10940  			break
 10941  		}
 10942  		if w != x2_1_0.Args[0] {
 10943  			break
 10944  		}
 10945  		mem := x2.Args[2]
 10946  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
 10947  			break
 10948  		}
 10949  		v.reset(OpARM64MOVWstore)
 10950  		v.AuxInt = i - 3
 10951  		v.Aux = s
 10952  		v.AddArg(ptr)
 10953  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
 10954  		v0.AddArg(w)
 10955  		v.AddArg(v0)
 10956  		v.AddArg(mem)
 10957  		return true
 10958  	}
 10959  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] (MOVDreg w)) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] (MOVDreg w)) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] (MOVDreg w)) mem))))
 10960  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)
 10961  	// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
 10962  	for {
 10963  		if v.AuxInt != 3 {
 10964  			break
 10965  		}
 10966  		s := v.Aux
 10967  		_ = v.Args[2]
 10968  		p := v.Args[0]
 10969  		w := v.Args[1]
 10970  		x0 := v.Args[2]
 10971  		if x0.Op != OpARM64MOVBstore {
 10972  			break
 10973  		}
 10974  		if x0.AuxInt != 2 {
 10975  			break
 10976  		}
 10977  		if x0.Aux != s {
 10978  			break
 10979  		}
 10980  		_ = x0.Args[2]
 10981  		if p != x0.Args[0] {
 10982  			break
 10983  		}
 10984  		x0_1 := x0.Args[1]
 10985  		if x0_1.Op != OpARM64SRLconst {
 10986  			break
 10987  		}
 10988  		if x0_1.AuxInt != 8 {
 10989  			break
 10990  		}
 10991  		x0_1_0 := x0_1.Args[0]
 10992  		if x0_1_0.Op != OpARM64MOVDreg {
 10993  			break
 10994  		}
 10995  		if w != x0_1_0.Args[0] {
 10996  			break
 10997  		}
 10998  		x1 := x0.Args[2]
 10999  		if x1.Op != OpARM64MOVBstore {
 11000  			break
 11001  		}
 11002  		if x1.AuxInt != 1 {
 11003  			break
 11004  		}
 11005  		if x1.Aux != s {
 11006  			break
 11007  		}
 11008  		_ = x1.Args[2]
 11009  		p1 := x1.Args[0]
 11010  		if p1.Op != OpARM64ADD {
 11011  			break
 11012  		}
 11013  		_ = p1.Args[1]
 11014  		ptr1 := p1.Args[0]
 11015  		idx1 := p1.Args[1]
 11016  		x1_1 := x1.Args[1]
 11017  		if x1_1.Op != OpARM64SRLconst {
 11018  			break
 11019  		}
 11020  		if x1_1.AuxInt != 16 {
 11021  			break
 11022  		}
 11023  		x1_1_0 := x1_1.Args[0]
 11024  		if x1_1_0.Op != OpARM64MOVDreg {
 11025  			break
 11026  		}
 11027  		if w != x1_1_0.Args[0] {
 11028  			break
 11029  		}
 11030  		x2 := x1.Args[2]
 11031  		if x2.Op != OpARM64MOVBstoreidx {
 11032  			break
 11033  		}
 11034  		_ = x2.Args[3]
 11035  		ptr0 := x2.Args[0]
 11036  		idx0 := x2.Args[1]
 11037  		x2_2 := x2.Args[2]
 11038  		if x2_2.Op != OpARM64SRLconst {
 11039  			break
 11040  		}
 11041  		if x2_2.AuxInt != 24 {
 11042  			break
 11043  		}
 11044  		x2_2_0 := x2_2.Args[0]
 11045  		if x2_2_0.Op != OpARM64MOVDreg {
 11046  			break
 11047  		}
 11048  		if w != x2_2_0.Args[0] {
 11049  			break
 11050  		}
 11051  		mem := x2.Args[3]
 11052  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
 11053  			break
 11054  		}
 11055  		v.reset(OpARM64MOVWstoreidx)
 11056  		v.AddArg(ptr0)
 11057  		v.AddArg(idx0)
 11058  		v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
 11059  		v0.AddArg(w)
 11060  		v.AddArg(v0)
 11061  		v.AddArg(mem)
 11062  		return true
 11063  	}
 11064  	return false
 11065  }
 11066  func rewriteValueARM64_OpARM64MOVBstore_30(v *Value) bool {
 11067  	b := v.Block
 11068  	_ = b
 11069  	// match: (MOVBstore [i] {s} ptr w x0:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) x1:(MOVBstore [i-2] {s} ptr (SRLconst [16] w) x2:(MOVBstore [i-3] {s} ptr (SRLconst [24] w) mem))))
 11070  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
 11071  	// result: (MOVWstore [i-3] {s} ptr (REVW <w.Type> w) mem)
 11072  	for {
 11073  		i := v.AuxInt
 11074  		s := v.Aux
 11075  		_ = v.Args[2]
 11076  		ptr := v.Args[0]
 11077  		w := v.Args[1]
 11078  		x0 := v.Args[2]
 11079  		if x0.Op != OpARM64MOVBstore {
 11080  			break
 11081  		}
 11082  		if x0.AuxInt != i-1 {
 11083  			break
 11084  		}
 11085  		if x0.Aux != s {
 11086  			break
 11087  		}
 11088  		_ = x0.Args[2]
 11089  		if ptr != x0.Args[0] {
 11090  			break
 11091  		}
 11092  		x0_1 := x0.Args[1]
 11093  		if x0_1.Op != OpARM64SRLconst {
 11094  			break
 11095  		}
 11096  		if x0_1.AuxInt != 8 {
 11097  			break
 11098  		}
 11099  		if w != x0_1.Args[0] {
 11100  			break
 11101  		}
 11102  		x1 := x0.Args[2]
 11103  		if x1.Op != OpARM64MOVBstore {
 11104  			break
 11105  		}
 11106  		if x1.AuxInt != i-2 {
 11107  			break
 11108  		}
 11109  		if x1.Aux != s {
 11110  			break
 11111  		}
 11112  		_ = x1.Args[2]
 11113  		if ptr != x1.Args[0] {
 11114  			break
 11115  		}
 11116  		x1_1 := x1.Args[1]
 11117  		if x1_1.Op != OpARM64SRLconst {
 11118  			break
 11119  		}
 11120  		if x1_1.AuxInt != 16 {
 11121  			break
 11122  		}
 11123  		if w != x1_1.Args[0] {
 11124  			break
 11125  		}
 11126  		x2 := x1.Args[2]
 11127  		if x2.Op != OpARM64MOVBstore {
 11128  			break
 11129  		}
 11130  		if x2.AuxInt != i-3 {
 11131  			break
 11132  		}
 11133  		if x2.Aux != s {
 11134  			break
 11135  		}
 11136  		_ = x2.Args[2]
 11137  		if ptr != x2.Args[0] {
 11138  			break
 11139  		}
 11140  		x2_1 := x2.Args[1]
 11141  		if x2_1.Op != OpARM64SRLconst {
 11142  			break
 11143  		}
 11144  		if x2_1.AuxInt != 24 {
 11145  			break
 11146  		}
 11147  		if w != x2_1.Args[0] {
 11148  			break
 11149  		}
 11150  		mem := x2.Args[2]
 11151  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
 11152  			break
 11153  		}
 11154  		v.reset(OpARM64MOVWstore)
 11155  		v.AuxInt = i - 3
 11156  		v.Aux = s
 11157  		v.AddArg(ptr)
 11158  		v0 := b.NewValue0(x2.Pos, OpARM64REVW, w.Type)
 11159  		v0.AddArg(w)
 11160  		v.AddArg(v0)
 11161  		v.AddArg(mem)
 11162  		return true
 11163  	}
 11164  	// match: (MOVBstore [3] {s} p w x0:(MOVBstore [2] {s} p (SRLconst [8] w) x1:(MOVBstore [1] {s} p1:(ADD ptr1 idx1) (SRLconst [16] w) x2:(MOVBstoreidx ptr0 idx0 (SRLconst [24] w) mem))))
 11165  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)
 11166  	// result: (MOVWstoreidx ptr0 idx0 (REVW <w.Type> w) mem)
 11167  	for {
 11168  		if v.AuxInt != 3 {
 11169  			break
 11170  		}
 11171  		s := v.Aux
 11172  		_ = v.Args[2]
 11173  		p := v.Args[0]
 11174  		w := v.Args[1]
 11175  		x0 := v.Args[2]
 11176  		if x0.Op != OpARM64MOVBstore {
 11177  			break
 11178  		}
 11179  		if x0.AuxInt != 2 {
 11180  			break
 11181  		}
 11182  		if x0.Aux != s {
 11183  			break
 11184  		}
 11185  		_ = x0.Args[2]
 11186  		if p != x0.Args[0] {
 11187  			break
 11188  		}
 11189  		x0_1 := x0.Args[1]
 11190  		if x0_1.Op != OpARM64SRLconst {
 11191  			break
 11192  		}
 11193  		if x0_1.AuxInt != 8 {
 11194  			break
 11195  		}
 11196  		if w != x0_1.Args[0] {
 11197  			break
 11198  		}
 11199  		x1 := x0.Args[2]
 11200  		if x1.Op != OpARM64MOVBstore {
 11201  			break
 11202  		}
 11203  		if x1.AuxInt != 1 {
 11204  			break
 11205  		}
 11206  		if x1.Aux != s {
 11207  			break
 11208  		}
 11209  		_ = x1.Args[2]
 11210  		p1 := x1.Args[0]
 11211  		if p1.Op != OpARM64ADD {
 11212  			break
 11213  		}
 11214  		_ = p1.Args[1]
 11215  		ptr1 := p1.Args[0]
 11216  		idx1 := p1.Args[1]
 11217  		x1_1 := x1.Args[1]
 11218  		if x1_1.Op != OpARM64SRLconst {
 11219  			break
 11220  		}
 11221  		if x1_1.AuxInt != 16 {
 11222  			break
 11223  		}
 11224  		if w != x1_1.Args[0] {
 11225  			break
 11226  		}
 11227  		x2 := x1.Args[2]
 11228  		if x2.Op != OpARM64MOVBstoreidx {
 11229  			break
 11230  		}
 11231  		_ = x2.Args[3]
 11232  		ptr0 := x2.Args[0]
 11233  		idx0 := x2.Args[1]
 11234  		x2_2 := x2.Args[2]
 11235  		if x2_2.Op != OpARM64SRLconst {
 11236  			break
 11237  		}
 11238  		if x2_2.AuxInt != 24 {
 11239  			break
 11240  		}
 11241  		if w != x2_2.Args[0] {
 11242  			break
 11243  		}
 11244  		mem := x2.Args[3]
 11245  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && isSamePtr(p1, p) && clobber(x0) && clobber(x1) && clobber(x2)) {
 11246  			break
 11247  		}
 11248  		v.reset(OpARM64MOVWstoreidx)
 11249  		v.AddArg(ptr0)
 11250  		v.AddArg(idx0)
 11251  		v0 := b.NewValue0(x1.Pos, OpARM64REVW, w.Type)
 11252  		v0.AddArg(w)
 11253  		v.AddArg(v0)
 11254  		v.AddArg(mem)
 11255  		return true
 11256  	}
 11257  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] w) mem))
 11258  	// cond: x.Uses == 1 && clobber(x)
 11259  	// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
 11260  	for {
 11261  		i := v.AuxInt
 11262  		s := v.Aux
 11263  		_ = v.Args[2]
 11264  		ptr := v.Args[0]
 11265  		w := v.Args[1]
 11266  		x := v.Args[2]
 11267  		if x.Op != OpARM64MOVBstore {
 11268  			break
 11269  		}
 11270  		if x.AuxInt != i-1 {
 11271  			break
 11272  		}
 11273  		if x.Aux != s {
 11274  			break
 11275  		}
 11276  		_ = x.Args[2]
 11277  		if ptr != x.Args[0] {
 11278  			break
 11279  		}
 11280  		x_1 := x.Args[1]
 11281  		if x_1.Op != OpARM64SRLconst {
 11282  			break
 11283  		}
 11284  		if x_1.AuxInt != 8 {
 11285  			break
 11286  		}
 11287  		if w != x_1.Args[0] {
 11288  			break
 11289  		}
 11290  		mem := x.Args[2]
 11291  		if !(x.Uses == 1 && clobber(x)) {
 11292  			break
 11293  		}
 11294  		v.reset(OpARM64MOVHstore)
 11295  		v.AuxInt = i - 1
 11296  		v.Aux = s
 11297  		v.AddArg(ptr)
 11298  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
 11299  		v0.AddArg(w)
 11300  		v.AddArg(v0)
 11301  		v.AddArg(mem)
 11302  		return true
 11303  	}
 11304  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] w) mem))
 11305  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11306  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
 11307  	for {
 11308  		if v.AuxInt != 1 {
 11309  			break
 11310  		}
 11311  		s := v.Aux
 11312  		_ = v.Args[2]
 11313  		v_0 := v.Args[0]
 11314  		if v_0.Op != OpARM64ADD {
 11315  			break
 11316  		}
 11317  		_ = v_0.Args[1]
 11318  		ptr1 := v_0.Args[0]
 11319  		idx1 := v_0.Args[1]
 11320  		w := v.Args[1]
 11321  		x := v.Args[2]
 11322  		if x.Op != OpARM64MOVBstoreidx {
 11323  			break
 11324  		}
 11325  		_ = x.Args[3]
 11326  		ptr0 := x.Args[0]
 11327  		idx0 := x.Args[1]
 11328  		x_2 := x.Args[2]
 11329  		if x_2.Op != OpARM64SRLconst {
 11330  			break
 11331  		}
 11332  		if x_2.AuxInt != 8 {
 11333  			break
 11334  		}
 11335  		if w != x_2.Args[0] {
 11336  			break
 11337  		}
 11338  		mem := x.Args[3]
 11339  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11340  			break
 11341  		}
 11342  		v.reset(OpARM64MOVHstoreidx)
 11343  		v.AddArg(ptr0)
 11344  		v.AddArg(idx0)
 11345  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 11346  		v0.AddArg(w)
 11347  		v.AddArg(v0)
 11348  		v.AddArg(mem)
 11349  		return true
 11350  	}
 11351  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 8)] w) mem))
 11352  	// cond: x.Uses == 1 && clobber(x)
 11353  	// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
 11354  	for {
 11355  		i := v.AuxInt
 11356  		s := v.Aux
 11357  		_ = v.Args[2]
 11358  		ptr := v.Args[0]
 11359  		w := v.Args[1]
 11360  		x := v.Args[2]
 11361  		if x.Op != OpARM64MOVBstore {
 11362  			break
 11363  		}
 11364  		if x.AuxInt != i-1 {
 11365  			break
 11366  		}
 11367  		if x.Aux != s {
 11368  			break
 11369  		}
 11370  		_ = x.Args[2]
 11371  		if ptr != x.Args[0] {
 11372  			break
 11373  		}
 11374  		x_1 := x.Args[1]
 11375  		if x_1.Op != OpARM64UBFX {
 11376  			break
 11377  		}
 11378  		if x_1.AuxInt != arm64BFAuxInt(8, 8) {
 11379  			break
 11380  		}
 11381  		if w != x_1.Args[0] {
 11382  			break
 11383  		}
 11384  		mem := x.Args[2]
 11385  		if !(x.Uses == 1 && clobber(x)) {
 11386  			break
 11387  		}
 11388  		v.reset(OpARM64MOVHstore)
 11389  		v.AuxInt = i - 1
 11390  		v.Aux = s
 11391  		v.AddArg(ptr)
 11392  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
 11393  		v0.AddArg(w)
 11394  		v.AddArg(v0)
 11395  		v.AddArg(mem)
 11396  		return true
 11397  	}
 11398  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 8)] w) mem))
 11399  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11400  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
 11401  	for {
 11402  		if v.AuxInt != 1 {
 11403  			break
 11404  		}
 11405  		s := v.Aux
 11406  		_ = v.Args[2]
 11407  		v_0 := v.Args[0]
 11408  		if v_0.Op != OpARM64ADD {
 11409  			break
 11410  		}
 11411  		_ = v_0.Args[1]
 11412  		ptr1 := v_0.Args[0]
 11413  		idx1 := v_0.Args[1]
 11414  		w := v.Args[1]
 11415  		x := v.Args[2]
 11416  		if x.Op != OpARM64MOVBstoreidx {
 11417  			break
 11418  		}
 11419  		_ = x.Args[3]
 11420  		ptr0 := x.Args[0]
 11421  		idx0 := x.Args[1]
 11422  		x_2 := x.Args[2]
 11423  		if x_2.Op != OpARM64UBFX {
 11424  			break
 11425  		}
 11426  		if x_2.AuxInt != arm64BFAuxInt(8, 8) {
 11427  			break
 11428  		}
 11429  		if w != x_2.Args[0] {
 11430  			break
 11431  		}
 11432  		mem := x.Args[3]
 11433  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11434  			break
 11435  		}
 11436  		v.reset(OpARM64MOVHstoreidx)
 11437  		v.AddArg(ptr0)
 11438  		v.AddArg(idx0)
 11439  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 11440  		v0.AddArg(w)
 11441  		v.AddArg(v0)
 11442  		v.AddArg(mem)
 11443  		return true
 11444  	}
 11445  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
 11446  	// cond: x.Uses == 1 && clobber(x)
 11447  	// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
 11448  	for {
 11449  		i := v.AuxInt
 11450  		s := v.Aux
 11451  		_ = v.Args[2]
 11452  		ptr := v.Args[0]
 11453  		w := v.Args[1]
 11454  		x := v.Args[2]
 11455  		if x.Op != OpARM64MOVBstore {
 11456  			break
 11457  		}
 11458  		if x.AuxInt != i-1 {
 11459  			break
 11460  		}
 11461  		if x.Aux != s {
 11462  			break
 11463  		}
 11464  		_ = x.Args[2]
 11465  		if ptr != x.Args[0] {
 11466  			break
 11467  		}
 11468  		x_1 := x.Args[1]
 11469  		if x_1.Op != OpARM64SRLconst {
 11470  			break
 11471  		}
 11472  		if x_1.AuxInt != 8 {
 11473  			break
 11474  		}
 11475  		x_1_0 := x_1.Args[0]
 11476  		if x_1_0.Op != OpARM64MOVDreg {
 11477  			break
 11478  		}
 11479  		if w != x_1_0.Args[0] {
 11480  			break
 11481  		}
 11482  		mem := x.Args[2]
 11483  		if !(x.Uses == 1 && clobber(x)) {
 11484  			break
 11485  		}
 11486  		v.reset(OpARM64MOVHstore)
 11487  		v.AuxInt = i - 1
 11488  		v.Aux = s
 11489  		v.AddArg(ptr)
 11490  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
 11491  		v0.AddArg(w)
 11492  		v.AddArg(v0)
 11493  		v.AddArg(mem)
 11494  		return true
 11495  	}
 11496  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
 11497  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11498  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
 11499  	for {
 11500  		if v.AuxInt != 1 {
 11501  			break
 11502  		}
 11503  		s := v.Aux
 11504  		_ = v.Args[2]
 11505  		v_0 := v.Args[0]
 11506  		if v_0.Op != OpARM64ADD {
 11507  			break
 11508  		}
 11509  		_ = v_0.Args[1]
 11510  		ptr1 := v_0.Args[0]
 11511  		idx1 := v_0.Args[1]
 11512  		w := v.Args[1]
 11513  		x := v.Args[2]
 11514  		if x.Op != OpARM64MOVBstoreidx {
 11515  			break
 11516  		}
 11517  		_ = x.Args[3]
 11518  		ptr0 := x.Args[0]
 11519  		idx0 := x.Args[1]
 11520  		x_2 := x.Args[2]
 11521  		if x_2.Op != OpARM64SRLconst {
 11522  			break
 11523  		}
 11524  		if x_2.AuxInt != 8 {
 11525  			break
 11526  		}
 11527  		x_2_0 := x_2.Args[0]
 11528  		if x_2_0.Op != OpARM64MOVDreg {
 11529  			break
 11530  		}
 11531  		if w != x_2_0.Args[0] {
 11532  			break
 11533  		}
 11534  		mem := x.Args[3]
 11535  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11536  			break
 11537  		}
 11538  		v.reset(OpARM64MOVHstoreidx)
 11539  		v.AddArg(ptr0)
 11540  		v.AddArg(idx0)
 11541  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 11542  		v0.AddArg(w)
 11543  		v.AddArg(v0)
 11544  		v.AddArg(mem)
 11545  		return true
 11546  	}
 11547  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (UBFX [arm64BFAuxInt(8, 24)] w) mem))
 11548  	// cond: x.Uses == 1 && clobber(x)
 11549  	// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
 11550  	for {
 11551  		i := v.AuxInt
 11552  		s := v.Aux
 11553  		_ = v.Args[2]
 11554  		ptr := v.Args[0]
 11555  		w := v.Args[1]
 11556  		x := v.Args[2]
 11557  		if x.Op != OpARM64MOVBstore {
 11558  			break
 11559  		}
 11560  		if x.AuxInt != i-1 {
 11561  			break
 11562  		}
 11563  		if x.Aux != s {
 11564  			break
 11565  		}
 11566  		_ = x.Args[2]
 11567  		if ptr != x.Args[0] {
 11568  			break
 11569  		}
 11570  		x_1 := x.Args[1]
 11571  		if x_1.Op != OpARM64UBFX {
 11572  			break
 11573  		}
 11574  		if x_1.AuxInt != arm64BFAuxInt(8, 24) {
 11575  			break
 11576  		}
 11577  		if w != x_1.Args[0] {
 11578  			break
 11579  		}
 11580  		mem := x.Args[2]
 11581  		if !(x.Uses == 1 && clobber(x)) {
 11582  			break
 11583  		}
 11584  		v.reset(OpARM64MOVHstore)
 11585  		v.AuxInt = i - 1
 11586  		v.Aux = s
 11587  		v.AddArg(ptr)
 11588  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
 11589  		v0.AddArg(w)
 11590  		v.AddArg(v0)
 11591  		v.AddArg(mem)
 11592  		return true
 11593  	}
 11594  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (UBFX [arm64BFAuxInt(8, 24)] w) mem))
 11595  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11596  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
 11597  	for {
 11598  		if v.AuxInt != 1 {
 11599  			break
 11600  		}
 11601  		s := v.Aux
 11602  		_ = v.Args[2]
 11603  		v_0 := v.Args[0]
 11604  		if v_0.Op != OpARM64ADD {
 11605  			break
 11606  		}
 11607  		_ = v_0.Args[1]
 11608  		ptr1 := v_0.Args[0]
 11609  		idx1 := v_0.Args[1]
 11610  		w := v.Args[1]
 11611  		x := v.Args[2]
 11612  		if x.Op != OpARM64MOVBstoreidx {
 11613  			break
 11614  		}
 11615  		_ = x.Args[3]
 11616  		ptr0 := x.Args[0]
 11617  		idx0 := x.Args[1]
 11618  		x_2 := x.Args[2]
 11619  		if x_2.Op != OpARM64UBFX {
 11620  			break
 11621  		}
 11622  		if x_2.AuxInt != arm64BFAuxInt(8, 24) {
 11623  			break
 11624  		}
 11625  		if w != x_2.Args[0] {
 11626  			break
 11627  		}
 11628  		mem := x.Args[3]
 11629  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11630  			break
 11631  		}
 11632  		v.reset(OpARM64MOVHstoreidx)
 11633  		v.AddArg(ptr0)
 11634  		v.AddArg(idx0)
 11635  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 11636  		v0.AddArg(w)
 11637  		v.AddArg(v0)
 11638  		v.AddArg(mem)
 11639  		return true
 11640  	}
 11641  	return false
 11642  }
 11643  func rewriteValueARM64_OpARM64MOVBstore_40(v *Value) bool {
 11644  	b := v.Block
 11645  	_ = b
 11646  	// match: (MOVBstore [i] {s} ptr w x:(MOVBstore [i-1] {s} ptr (SRLconst [8] (MOVDreg w)) mem))
 11647  	// cond: x.Uses == 1 && clobber(x)
 11648  	// result: (MOVHstore [i-1] {s} ptr (REV16W <w.Type> w) mem)
 11649  	for {
 11650  		i := v.AuxInt
 11651  		s := v.Aux
 11652  		_ = v.Args[2]
 11653  		ptr := v.Args[0]
 11654  		w := v.Args[1]
 11655  		x := v.Args[2]
 11656  		if x.Op != OpARM64MOVBstore {
 11657  			break
 11658  		}
 11659  		if x.AuxInt != i-1 {
 11660  			break
 11661  		}
 11662  		if x.Aux != s {
 11663  			break
 11664  		}
 11665  		_ = x.Args[2]
 11666  		if ptr != x.Args[0] {
 11667  			break
 11668  		}
 11669  		x_1 := x.Args[1]
 11670  		if x_1.Op != OpARM64SRLconst {
 11671  			break
 11672  		}
 11673  		if x_1.AuxInt != 8 {
 11674  			break
 11675  		}
 11676  		x_1_0 := x_1.Args[0]
 11677  		if x_1_0.Op != OpARM64MOVDreg {
 11678  			break
 11679  		}
 11680  		if w != x_1_0.Args[0] {
 11681  			break
 11682  		}
 11683  		mem := x.Args[2]
 11684  		if !(x.Uses == 1 && clobber(x)) {
 11685  			break
 11686  		}
 11687  		v.reset(OpARM64MOVHstore)
 11688  		v.AuxInt = i - 1
 11689  		v.Aux = s
 11690  		v.AddArg(ptr)
 11691  		v0 := b.NewValue0(x.Pos, OpARM64REV16W, w.Type)
 11692  		v0.AddArg(w)
 11693  		v.AddArg(v0)
 11694  		v.AddArg(mem)
 11695  		return true
 11696  	}
 11697  	// match: (MOVBstore [1] {s} (ADD ptr1 idx1) w x:(MOVBstoreidx ptr0 idx0 (SRLconst [8] (MOVDreg w)) mem))
 11698  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 11699  	// result: (MOVHstoreidx ptr0 idx0 (REV16W <w.Type> w) mem)
 11700  	for {
 11701  		if v.AuxInt != 1 {
 11702  			break
 11703  		}
 11704  		s := v.Aux
 11705  		_ = v.Args[2]
 11706  		v_0 := v.Args[0]
 11707  		if v_0.Op != OpARM64ADD {
 11708  			break
 11709  		}
 11710  		_ = v_0.Args[1]
 11711  		ptr1 := v_0.Args[0]
 11712  		idx1 := v_0.Args[1]
 11713  		w := v.Args[1]
 11714  		x := v.Args[2]
 11715  		if x.Op != OpARM64MOVBstoreidx {
 11716  			break
 11717  		}
 11718  		_ = x.Args[3]
 11719  		ptr0 := x.Args[0]
 11720  		idx0 := x.Args[1]
 11721  		x_2 := x.Args[2]
 11722  		if x_2.Op != OpARM64SRLconst {
 11723  			break
 11724  		}
 11725  		if x_2.AuxInt != 8 {
 11726  			break
 11727  		}
 11728  		x_2_0 := x_2.Args[0]
 11729  		if x_2_0.Op != OpARM64MOVDreg {
 11730  			break
 11731  		}
 11732  		if w != x_2_0.Args[0] {
 11733  			break
 11734  		}
 11735  		mem := x.Args[3]
 11736  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 11737  			break
 11738  		}
 11739  		v.reset(OpARM64MOVHstoreidx)
 11740  		v.AddArg(ptr0)
 11741  		v.AddArg(idx0)
 11742  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 11743  		v0.AddArg(w)
 11744  		v.AddArg(v0)
 11745  		v.AddArg(mem)
 11746  		return true
 11747  	}
 11748  	return false
 11749  }
 11750  func rewriteValueARM64_OpARM64MOVBstoreidx_0(v *Value) bool {
 11751  	// match: (MOVBstoreidx ptr (MOVDconst [c]) val mem)
 11752  	// cond:
 11753  	// result: (MOVBstore [c] ptr val mem)
 11754  	for {
 11755  		_ = v.Args[3]
 11756  		ptr := v.Args[0]
 11757  		v_1 := v.Args[1]
 11758  		if v_1.Op != OpARM64MOVDconst {
 11759  			break
 11760  		}
 11761  		c := v_1.AuxInt
 11762  		val := v.Args[2]
 11763  		mem := v.Args[3]
 11764  		v.reset(OpARM64MOVBstore)
 11765  		v.AuxInt = c
 11766  		v.AddArg(ptr)
 11767  		v.AddArg(val)
 11768  		v.AddArg(mem)
 11769  		return true
 11770  	}
 11771  	// match: (MOVBstoreidx (MOVDconst [c]) idx val mem)
 11772  	// cond:
 11773  	// result: (MOVBstore [c] idx val mem)
 11774  	for {
 11775  		_ = v.Args[3]
 11776  		v_0 := v.Args[0]
 11777  		if v_0.Op != OpARM64MOVDconst {
 11778  			break
 11779  		}
 11780  		c := v_0.AuxInt
 11781  		idx := v.Args[1]
 11782  		val := v.Args[2]
 11783  		mem := v.Args[3]
 11784  		v.reset(OpARM64MOVBstore)
 11785  		v.AuxInt = c
 11786  		v.AddArg(idx)
 11787  		v.AddArg(val)
 11788  		v.AddArg(mem)
 11789  		return true
 11790  	}
 11791  	// match: (MOVBstoreidx ptr idx (MOVDconst [0]) mem)
 11792  	// cond:
 11793  	// result: (MOVBstorezeroidx ptr idx mem)
 11794  	for {
 11795  		_ = v.Args[3]
 11796  		ptr := v.Args[0]
 11797  		idx := v.Args[1]
 11798  		v_2 := v.Args[2]
 11799  		if v_2.Op != OpARM64MOVDconst {
 11800  			break
 11801  		}
 11802  		if v_2.AuxInt != 0 {
 11803  			break
 11804  		}
 11805  		mem := v.Args[3]
 11806  		v.reset(OpARM64MOVBstorezeroidx)
 11807  		v.AddArg(ptr)
 11808  		v.AddArg(idx)
 11809  		v.AddArg(mem)
 11810  		return true
 11811  	}
 11812  	// match: (MOVBstoreidx ptr idx (MOVBreg x) mem)
 11813  	// cond:
 11814  	// result: (MOVBstoreidx ptr idx x mem)
 11815  	for {
 11816  		_ = v.Args[3]
 11817  		ptr := v.Args[0]
 11818  		idx := v.Args[1]
 11819  		v_2 := v.Args[2]
 11820  		if v_2.Op != OpARM64MOVBreg {
 11821  			break
 11822  		}
 11823  		x := v_2.Args[0]
 11824  		mem := v.Args[3]
 11825  		v.reset(OpARM64MOVBstoreidx)
 11826  		v.AddArg(ptr)
 11827  		v.AddArg(idx)
 11828  		v.AddArg(x)
 11829  		v.AddArg(mem)
 11830  		return true
 11831  	}
 11832  	// match: (MOVBstoreidx ptr idx (MOVBUreg x) mem)
 11833  	// cond:
 11834  	// result: (MOVBstoreidx ptr idx x mem)
 11835  	for {
 11836  		_ = v.Args[3]
 11837  		ptr := v.Args[0]
 11838  		idx := v.Args[1]
 11839  		v_2 := v.Args[2]
 11840  		if v_2.Op != OpARM64MOVBUreg {
 11841  			break
 11842  		}
 11843  		x := v_2.Args[0]
 11844  		mem := v.Args[3]
 11845  		v.reset(OpARM64MOVBstoreidx)
 11846  		v.AddArg(ptr)
 11847  		v.AddArg(idx)
 11848  		v.AddArg(x)
 11849  		v.AddArg(mem)
 11850  		return true
 11851  	}
 11852  	// match: (MOVBstoreidx ptr idx (MOVHreg x) mem)
 11853  	// cond:
 11854  	// result: (MOVBstoreidx ptr idx x mem)
 11855  	for {
 11856  		_ = v.Args[3]
 11857  		ptr := v.Args[0]
 11858  		idx := v.Args[1]
 11859  		v_2 := v.Args[2]
 11860  		if v_2.Op != OpARM64MOVHreg {
 11861  			break
 11862  		}
 11863  		x := v_2.Args[0]
 11864  		mem := v.Args[3]
 11865  		v.reset(OpARM64MOVBstoreidx)
 11866  		v.AddArg(ptr)
 11867  		v.AddArg(idx)
 11868  		v.AddArg(x)
 11869  		v.AddArg(mem)
 11870  		return true
 11871  	}
 11872  	// match: (MOVBstoreidx ptr idx (MOVHUreg x) mem)
 11873  	// cond:
 11874  	// result: (MOVBstoreidx ptr idx x mem)
 11875  	for {
 11876  		_ = v.Args[3]
 11877  		ptr := v.Args[0]
 11878  		idx := v.Args[1]
 11879  		v_2 := v.Args[2]
 11880  		if v_2.Op != OpARM64MOVHUreg {
 11881  			break
 11882  		}
 11883  		x := v_2.Args[0]
 11884  		mem := v.Args[3]
 11885  		v.reset(OpARM64MOVBstoreidx)
 11886  		v.AddArg(ptr)
 11887  		v.AddArg(idx)
 11888  		v.AddArg(x)
 11889  		v.AddArg(mem)
 11890  		return true
 11891  	}
 11892  	// match: (MOVBstoreidx ptr idx (MOVWreg x) mem)
 11893  	// cond:
 11894  	// result: (MOVBstoreidx ptr idx x mem)
 11895  	for {
 11896  		_ = v.Args[3]
 11897  		ptr := v.Args[0]
 11898  		idx := v.Args[1]
 11899  		v_2 := v.Args[2]
 11900  		if v_2.Op != OpARM64MOVWreg {
 11901  			break
 11902  		}
 11903  		x := v_2.Args[0]
 11904  		mem := v.Args[3]
 11905  		v.reset(OpARM64MOVBstoreidx)
 11906  		v.AddArg(ptr)
 11907  		v.AddArg(idx)
 11908  		v.AddArg(x)
 11909  		v.AddArg(mem)
 11910  		return true
 11911  	}
 11912  	// match: (MOVBstoreidx ptr idx (MOVWUreg x) mem)
 11913  	// cond:
 11914  	// result: (MOVBstoreidx ptr idx x mem)
 11915  	for {
 11916  		_ = v.Args[3]
 11917  		ptr := v.Args[0]
 11918  		idx := v.Args[1]
 11919  		v_2 := v.Args[2]
 11920  		if v_2.Op != OpARM64MOVWUreg {
 11921  			break
 11922  		}
 11923  		x := v_2.Args[0]
 11924  		mem := v.Args[3]
 11925  		v.reset(OpARM64MOVBstoreidx)
 11926  		v.AddArg(ptr)
 11927  		v.AddArg(idx)
 11928  		v.AddArg(x)
 11929  		v.AddArg(mem)
 11930  		return true
 11931  	}
 11932  	// match: (MOVBstoreidx ptr (ADDconst [1] idx) (SRLconst [8] w) x:(MOVBstoreidx ptr idx w mem))
 11933  	// cond: x.Uses == 1 && clobber(x)
 11934  	// result: (MOVHstoreidx ptr idx w mem)
 11935  	for {
 11936  		_ = v.Args[3]
 11937  		ptr := v.Args[0]
 11938  		v_1 := v.Args[1]
 11939  		if v_1.Op != OpARM64ADDconst {
 11940  			break
 11941  		}
 11942  		if v_1.AuxInt != 1 {
 11943  			break
 11944  		}
 11945  		idx := v_1.Args[0]
 11946  		v_2 := v.Args[2]
 11947  		if v_2.Op != OpARM64SRLconst {
 11948  			break
 11949  		}
 11950  		if v_2.AuxInt != 8 {
 11951  			break
 11952  		}
 11953  		w := v_2.Args[0]
 11954  		x := v.Args[3]
 11955  		if x.Op != OpARM64MOVBstoreidx {
 11956  			break
 11957  		}
 11958  		_ = x.Args[3]
 11959  		if ptr != x.Args[0] {
 11960  			break
 11961  		}
 11962  		if idx != x.Args[1] {
 11963  			break
 11964  		}
 11965  		if w != x.Args[2] {
 11966  			break
 11967  		}
 11968  		mem := x.Args[3]
 11969  		if !(x.Uses == 1 && clobber(x)) {
 11970  			break
 11971  		}
 11972  		v.reset(OpARM64MOVHstoreidx)
 11973  		v.AddArg(ptr)
 11974  		v.AddArg(idx)
 11975  		v.AddArg(w)
 11976  		v.AddArg(mem)
 11977  		return true
 11978  	}
 11979  	return false
 11980  }
 11981  func rewriteValueARM64_OpARM64MOVBstoreidx_10(v *Value) bool {
 11982  	b := v.Block
 11983  	_ = b
 11984  	// match: (MOVBstoreidx ptr (ADDconst [3] idx) w x0:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(24, 8)] w) mem))))
 11985  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
 11986  	// result: (MOVWstoreidx ptr idx (REVW <w.Type> w) mem)
 11987  	for {
 11988  		_ = v.Args[3]
 11989  		ptr := v.Args[0]
 11990  		v_1 := v.Args[1]
 11991  		if v_1.Op != OpARM64ADDconst {
 11992  			break
 11993  		}
 11994  		if v_1.AuxInt != 3 {
 11995  			break
 11996  		}
 11997  		idx := v_1.Args[0]
 11998  		w := v.Args[2]
 11999  		x0 := v.Args[3]
 12000  		if x0.Op != OpARM64MOVBstoreidx {
 12001  			break
 12002  		}
 12003  		_ = x0.Args[3]
 12004  		if ptr != x0.Args[0] {
 12005  			break
 12006  		}
 12007  		x0_1 := x0.Args[1]
 12008  		if x0_1.Op != OpARM64ADDconst {
 12009  			break
 12010  		}
 12011  		if x0_1.AuxInt != 2 {
 12012  			break
 12013  		}
 12014  		if idx != x0_1.Args[0] {
 12015  			break
 12016  		}
 12017  		x0_2 := x0.Args[2]
 12018  		if x0_2.Op != OpARM64UBFX {
 12019  			break
 12020  		}
 12021  		if x0_2.AuxInt != arm64BFAuxInt(8, 24) {
 12022  			break
 12023  		}
 12024  		if w != x0_2.Args[0] {
 12025  			break
 12026  		}
 12027  		x1 := x0.Args[3]
 12028  		if x1.Op != OpARM64MOVBstoreidx {
 12029  			break
 12030  		}
 12031  		_ = x1.Args[3]
 12032  		if ptr != x1.Args[0] {
 12033  			break
 12034  		}
 12035  		x1_1 := x1.Args[1]
 12036  		if x1_1.Op != OpARM64ADDconst {
 12037  			break
 12038  		}
 12039  		if x1_1.AuxInt != 1 {
 12040  			break
 12041  		}
 12042  		if idx != x1_1.Args[0] {
 12043  			break
 12044  		}
 12045  		x1_2 := x1.Args[2]
 12046  		if x1_2.Op != OpARM64UBFX {
 12047  			break
 12048  		}
 12049  		if x1_2.AuxInt != arm64BFAuxInt(16, 16) {
 12050  			break
 12051  		}
 12052  		if w != x1_2.Args[0] {
 12053  			break
 12054  		}
 12055  		x2 := x1.Args[3]
 12056  		if x2.Op != OpARM64MOVBstoreidx {
 12057  			break
 12058  		}
 12059  		_ = x2.Args[3]
 12060  		if ptr != x2.Args[0] {
 12061  			break
 12062  		}
 12063  		if idx != x2.Args[1] {
 12064  			break
 12065  		}
 12066  		x2_2 := x2.Args[2]
 12067  		if x2_2.Op != OpARM64UBFX {
 12068  			break
 12069  		}
 12070  		if x2_2.AuxInt != arm64BFAuxInt(24, 8) {
 12071  			break
 12072  		}
 12073  		if w != x2_2.Args[0] {
 12074  			break
 12075  		}
 12076  		mem := x2.Args[3]
 12077  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
 12078  			break
 12079  		}
 12080  		v.reset(OpARM64MOVWstoreidx)
 12081  		v.AddArg(ptr)
 12082  		v.AddArg(idx)
 12083  		v0 := b.NewValue0(v.Pos, OpARM64REVW, w.Type)
 12084  		v0.AddArg(w)
 12085  		v.AddArg(v0)
 12086  		v.AddArg(mem)
 12087  		return true
 12088  	}
 12089  	// match: (MOVBstoreidx ptr idx w x0:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 24)] w) x1:(MOVBstoreidx ptr (ADDconst [2] idx) (UBFX [arm64BFAuxInt(16, 16)] w) x2:(MOVBstoreidx ptr (ADDconst [3] idx) (UBFX [arm64BFAuxInt(24, 8)] w) mem))))
 12090  	// cond: x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)
 12091  	// result: (MOVWstoreidx ptr idx w mem)
 12092  	for {
 12093  		_ = v.Args[3]
 12094  		ptr := v.Args[0]
 12095  		idx := v.Args[1]
 12096  		w := v.Args[2]
 12097  		x0 := v.Args[3]
 12098  		if x0.Op != OpARM64MOVBstoreidx {
 12099  			break
 12100  		}
 12101  		_ = x0.Args[3]
 12102  		if ptr != x0.Args[0] {
 12103  			break
 12104  		}
 12105  		x0_1 := x0.Args[1]
 12106  		if x0_1.Op != OpARM64ADDconst {
 12107  			break
 12108  		}
 12109  		if x0_1.AuxInt != 1 {
 12110  			break
 12111  		}
 12112  		if idx != x0_1.Args[0] {
 12113  			break
 12114  		}
 12115  		x0_2 := x0.Args[2]
 12116  		if x0_2.Op != OpARM64UBFX {
 12117  			break
 12118  		}
 12119  		if x0_2.AuxInt != arm64BFAuxInt(8, 24) {
 12120  			break
 12121  		}
 12122  		if w != x0_2.Args[0] {
 12123  			break
 12124  		}
 12125  		x1 := x0.Args[3]
 12126  		if x1.Op != OpARM64MOVBstoreidx {
 12127  			break
 12128  		}
 12129  		_ = x1.Args[3]
 12130  		if ptr != x1.Args[0] {
 12131  			break
 12132  		}
 12133  		x1_1 := x1.Args[1]
 12134  		if x1_1.Op != OpARM64ADDconst {
 12135  			break
 12136  		}
 12137  		if x1_1.AuxInt != 2 {
 12138  			break
 12139  		}
 12140  		if idx != x1_1.Args[0] {
 12141  			break
 12142  		}
 12143  		x1_2 := x1.Args[2]
 12144  		if x1_2.Op != OpARM64UBFX {
 12145  			break
 12146  		}
 12147  		if x1_2.AuxInt != arm64BFAuxInt(16, 16) {
 12148  			break
 12149  		}
 12150  		if w != x1_2.Args[0] {
 12151  			break
 12152  		}
 12153  		x2 := x1.Args[3]
 12154  		if x2.Op != OpARM64MOVBstoreidx {
 12155  			break
 12156  		}
 12157  		_ = x2.Args[3]
 12158  		if ptr != x2.Args[0] {
 12159  			break
 12160  		}
 12161  		x2_1 := x2.Args[1]
 12162  		if x2_1.Op != OpARM64ADDconst {
 12163  			break
 12164  		}
 12165  		if x2_1.AuxInt != 3 {
 12166  			break
 12167  		}
 12168  		if idx != x2_1.Args[0] {
 12169  			break
 12170  		}
 12171  		x2_2 := x2.Args[2]
 12172  		if x2_2.Op != OpARM64UBFX {
 12173  			break
 12174  		}
 12175  		if x2_2.AuxInt != arm64BFAuxInt(24, 8) {
 12176  			break
 12177  		}
 12178  		if w != x2_2.Args[0] {
 12179  			break
 12180  		}
 12181  		mem := x2.Args[3]
 12182  		if !(x0.Uses == 1 && x1.Uses == 1 && x2.Uses == 1 && clobber(x0) && clobber(x1) && clobber(x2)) {
 12183  			break
 12184  		}
 12185  		v.reset(OpARM64MOVWstoreidx)
 12186  		v.AddArg(ptr)
 12187  		v.AddArg(idx)
 12188  		v.AddArg(w)
 12189  		v.AddArg(mem)
 12190  		return true
 12191  	}
 12192  	// match: (MOVBstoreidx ptr (ADDconst [1] idx) w x:(MOVBstoreidx ptr idx (UBFX [arm64BFAuxInt(8, 8)] w) mem))
 12193  	// cond: x.Uses == 1 && clobber(x)
 12194  	// result: (MOVHstoreidx ptr idx (REV16W <w.Type> w) mem)
 12195  	for {
 12196  		_ = v.Args[3]
 12197  		ptr := v.Args[0]
 12198  		v_1 := v.Args[1]
 12199  		if v_1.Op != OpARM64ADDconst {
 12200  			break
 12201  		}
 12202  		if v_1.AuxInt != 1 {
 12203  			break
 12204  		}
 12205  		idx := v_1.Args[0]
 12206  		w := v.Args[2]
 12207  		x := v.Args[3]
 12208  		if x.Op != OpARM64MOVBstoreidx {
 12209  			break
 12210  		}
 12211  		_ = x.Args[3]
 12212  		if ptr != x.Args[0] {
 12213  			break
 12214  		}
 12215  		if idx != x.Args[1] {
 12216  			break
 12217  		}
 12218  		x_2 := x.Args[2]
 12219  		if x_2.Op != OpARM64UBFX {
 12220  			break
 12221  		}
 12222  		if x_2.AuxInt != arm64BFAuxInt(8, 8) {
 12223  			break
 12224  		}
 12225  		if w != x_2.Args[0] {
 12226  			break
 12227  		}
 12228  		mem := x.Args[3]
 12229  		if !(x.Uses == 1 && clobber(x)) {
 12230  			break
 12231  		}
 12232  		v.reset(OpARM64MOVHstoreidx)
 12233  		v.AddArg(ptr)
 12234  		v.AddArg(idx)
 12235  		v0 := b.NewValue0(v.Pos, OpARM64REV16W, w.Type)
 12236  		v0.AddArg(w)
 12237  		v.AddArg(v0)
 12238  		v.AddArg(mem)
 12239  		return true
 12240  	}
 12241  	// match: (MOVBstoreidx ptr idx w x:(MOVBstoreidx ptr (ADDconst [1] idx) (UBFX [arm64BFAuxInt(8, 8)] w) mem))
 12242  	// cond: x.Uses == 1 && clobber(x)
 12243  	// result: (MOVHstoreidx ptr idx w mem)
 12244  	for {
 12245  		_ = v.Args[3]
 12246  		ptr := v.Args[0]
 12247  		idx := v.Args[1]
 12248  		w := v.Args[2]
 12249  		x := v.Args[3]
 12250  		if x.Op != OpARM64MOVBstoreidx {
 12251  			break
 12252  		}
 12253  		_ = x.Args[3]
 12254  		if ptr != x.Args[0] {
 12255  			break
 12256  		}
 12257  		x_1 := x.Args[1]
 12258  		if x_1.Op != OpARM64ADDconst {
 12259  			break
 12260  		}
 12261  		if x_1.AuxInt != 1 {
 12262  			break
 12263  		}
 12264  		if idx != x_1.Args[0] {
 12265  			break
 12266  		}
 12267  		x_2 := x.Args[2]
 12268  		if x_2.Op != OpARM64UBFX {
 12269  			break
 12270  		}
 12271  		if x_2.AuxInt != arm64BFAuxInt(8, 8) {
 12272  			break
 12273  		}
 12274  		if w != x_2.Args[0] {
 12275  			break
 12276  		}
 12277  		mem := x.Args[3]
 12278  		if !(x.Uses == 1 && clobber(x)) {
 12279  			break
 12280  		}
 12281  		v.reset(OpARM64MOVHstoreidx)
 12282  		v.AddArg(ptr)
 12283  		v.AddArg(idx)
 12284  		v.AddArg(w)
 12285  		v.AddArg(mem)
 12286  		return true
 12287  	}
 12288  	return false
 12289  }
 12290  func rewriteValueARM64_OpARM64MOVBstorezero_0(v *Value) bool {
 12291  	b := v.Block
 12292  	_ = b
 12293  	config := b.Func.Config
 12294  	_ = config
 12295  	// match: (MOVBstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 12296  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12297  	// result: (MOVBstorezero [off1+off2] {sym} ptr mem)
 12298  	for {
 12299  		off1 := v.AuxInt
 12300  		sym := v.Aux
 12301  		_ = v.Args[1]
 12302  		v_0 := v.Args[0]
 12303  		if v_0.Op != OpARM64ADDconst {
 12304  			break
 12305  		}
 12306  		off2 := v_0.AuxInt
 12307  		ptr := v_0.Args[0]
 12308  		mem := v.Args[1]
 12309  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12310  			break
 12311  		}
 12312  		v.reset(OpARM64MOVBstorezero)
 12313  		v.AuxInt = off1 + off2
 12314  		v.Aux = sym
 12315  		v.AddArg(ptr)
 12316  		v.AddArg(mem)
 12317  		return true
 12318  	}
 12319  	// match: (MOVBstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 12320  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12321  	// result: (MOVBstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 12322  	for {
 12323  		off1 := v.AuxInt
 12324  		sym1 := v.Aux
 12325  		_ = v.Args[1]
 12326  		v_0 := v.Args[0]
 12327  		if v_0.Op != OpARM64MOVDaddr {
 12328  			break
 12329  		}
 12330  		off2 := v_0.AuxInt
 12331  		sym2 := v_0.Aux
 12332  		ptr := v_0.Args[0]
 12333  		mem := v.Args[1]
 12334  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12335  			break
 12336  		}
 12337  		v.reset(OpARM64MOVBstorezero)
 12338  		v.AuxInt = off1 + off2
 12339  		v.Aux = mergeSym(sym1, sym2)
 12340  		v.AddArg(ptr)
 12341  		v.AddArg(mem)
 12342  		return true
 12343  	}
 12344  	// match: (MOVBstorezero [off] {sym} (ADD ptr idx) mem)
 12345  	// cond: off == 0 && sym == nil
 12346  	// result: (MOVBstorezeroidx ptr idx mem)
 12347  	for {
 12348  		off := v.AuxInt
 12349  		sym := v.Aux
 12350  		_ = v.Args[1]
 12351  		v_0 := v.Args[0]
 12352  		if v_0.Op != OpARM64ADD {
 12353  			break
 12354  		}
 12355  		_ = v_0.Args[1]
 12356  		ptr := v_0.Args[0]
 12357  		idx := v_0.Args[1]
 12358  		mem := v.Args[1]
 12359  		if !(off == 0 && sym == nil) {
 12360  			break
 12361  		}
 12362  		v.reset(OpARM64MOVBstorezeroidx)
 12363  		v.AddArg(ptr)
 12364  		v.AddArg(idx)
 12365  		v.AddArg(mem)
 12366  		return true
 12367  	}
 12368  	// match: (MOVBstorezero [i] {s} ptr0 x:(MOVBstorezero [j] {s} ptr1 mem))
 12369  	// cond: x.Uses == 1 && areAdjacentOffsets(i,j,1) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x)
 12370  	// result: (MOVHstorezero [min(i,j)] {s} ptr0 mem)
 12371  	for {
 12372  		i := v.AuxInt
 12373  		s := v.Aux
 12374  		_ = v.Args[1]
 12375  		ptr0 := v.Args[0]
 12376  		x := v.Args[1]
 12377  		if x.Op != OpARM64MOVBstorezero {
 12378  			break
 12379  		}
 12380  		j := x.AuxInt
 12381  		if x.Aux != s {
 12382  			break
 12383  		}
 12384  		_ = x.Args[1]
 12385  		ptr1 := x.Args[0]
 12386  		mem := x.Args[1]
 12387  		if !(x.Uses == 1 && areAdjacentOffsets(i, j, 1) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 12388  			break
 12389  		}
 12390  		v.reset(OpARM64MOVHstorezero)
 12391  		v.AuxInt = min(i, j)
 12392  		v.Aux = s
 12393  		v.AddArg(ptr0)
 12394  		v.AddArg(mem)
 12395  		return true
 12396  	}
 12397  	// match: (MOVBstorezero [1] {s} (ADD ptr0 idx0) x:(MOVBstorezeroidx ptr1 idx1 mem))
 12398  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 12399  	// result: (MOVHstorezeroidx ptr1 idx1 mem)
 12400  	for {
 12401  		if v.AuxInt != 1 {
 12402  			break
 12403  		}
 12404  		s := v.Aux
 12405  		_ = v.Args[1]
 12406  		v_0 := v.Args[0]
 12407  		if v_0.Op != OpARM64ADD {
 12408  			break
 12409  		}
 12410  		_ = v_0.Args[1]
 12411  		ptr0 := v_0.Args[0]
 12412  		idx0 := v_0.Args[1]
 12413  		x := v.Args[1]
 12414  		if x.Op != OpARM64MOVBstorezeroidx {
 12415  			break
 12416  		}
 12417  		_ = x.Args[2]
 12418  		ptr1 := x.Args[0]
 12419  		idx1 := x.Args[1]
 12420  		mem := x.Args[2]
 12421  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 12422  			break
 12423  		}
 12424  		v.reset(OpARM64MOVHstorezeroidx)
 12425  		v.AddArg(ptr1)
 12426  		v.AddArg(idx1)
 12427  		v.AddArg(mem)
 12428  		return true
 12429  	}
 12430  	return false
 12431  }
 12432  func rewriteValueARM64_OpARM64MOVBstorezeroidx_0(v *Value) bool {
 12433  	// match: (MOVBstorezeroidx ptr (MOVDconst [c]) mem)
 12434  	// cond:
 12435  	// result: (MOVBstorezero [c] ptr mem)
 12436  	for {
 12437  		_ = v.Args[2]
 12438  		ptr := v.Args[0]
 12439  		v_1 := v.Args[1]
 12440  		if v_1.Op != OpARM64MOVDconst {
 12441  			break
 12442  		}
 12443  		c := v_1.AuxInt
 12444  		mem := v.Args[2]
 12445  		v.reset(OpARM64MOVBstorezero)
 12446  		v.AuxInt = c
 12447  		v.AddArg(ptr)
 12448  		v.AddArg(mem)
 12449  		return true
 12450  	}
 12451  	// match: (MOVBstorezeroidx (MOVDconst [c]) idx mem)
 12452  	// cond:
 12453  	// result: (MOVBstorezero [c] idx mem)
 12454  	for {
 12455  		_ = v.Args[2]
 12456  		v_0 := v.Args[0]
 12457  		if v_0.Op != OpARM64MOVDconst {
 12458  			break
 12459  		}
 12460  		c := v_0.AuxInt
 12461  		idx := v.Args[1]
 12462  		mem := v.Args[2]
 12463  		v.reset(OpARM64MOVBstorezero)
 12464  		v.AuxInt = c
 12465  		v.AddArg(idx)
 12466  		v.AddArg(mem)
 12467  		return true
 12468  	}
 12469  	// match: (MOVBstorezeroidx ptr (ADDconst [1] idx) x:(MOVBstorezeroidx ptr idx mem))
 12470  	// cond: x.Uses == 1 && clobber(x)
 12471  	// result: (MOVHstorezeroidx ptr idx mem)
 12472  	for {
 12473  		_ = v.Args[2]
 12474  		ptr := v.Args[0]
 12475  		v_1 := v.Args[1]
 12476  		if v_1.Op != OpARM64ADDconst {
 12477  			break
 12478  		}
 12479  		if v_1.AuxInt != 1 {
 12480  			break
 12481  		}
 12482  		idx := v_1.Args[0]
 12483  		x := v.Args[2]
 12484  		if x.Op != OpARM64MOVBstorezeroidx {
 12485  			break
 12486  		}
 12487  		_ = x.Args[2]
 12488  		if ptr != x.Args[0] {
 12489  			break
 12490  		}
 12491  		if idx != x.Args[1] {
 12492  			break
 12493  		}
 12494  		mem := x.Args[2]
 12495  		if !(x.Uses == 1 && clobber(x)) {
 12496  			break
 12497  		}
 12498  		v.reset(OpARM64MOVHstorezeroidx)
 12499  		v.AddArg(ptr)
 12500  		v.AddArg(idx)
 12501  		v.AddArg(mem)
 12502  		return true
 12503  	}
 12504  	return false
 12505  }
 12506  func rewriteValueARM64_OpARM64MOVDload_0(v *Value) bool {
 12507  	b := v.Block
 12508  	_ = b
 12509  	config := b.Func.Config
 12510  	_ = config
 12511  	// match: (MOVDload [off] {sym} ptr (FMOVDstore [off] {sym} ptr val _))
 12512  	// cond:
 12513  	// result: (FMOVDfpgp val)
 12514  	for {
 12515  		off := v.AuxInt
 12516  		sym := v.Aux
 12517  		_ = v.Args[1]
 12518  		ptr := v.Args[0]
 12519  		v_1 := v.Args[1]
 12520  		if v_1.Op != OpARM64FMOVDstore {
 12521  			break
 12522  		}
 12523  		if v_1.AuxInt != off {
 12524  			break
 12525  		}
 12526  		if v_1.Aux != sym {
 12527  			break
 12528  		}
 12529  		_ = v_1.Args[2]
 12530  		if ptr != v_1.Args[0] {
 12531  			break
 12532  		}
 12533  		val := v_1.Args[1]
 12534  		v.reset(OpARM64FMOVDfpgp)
 12535  		v.AddArg(val)
 12536  		return true
 12537  	}
 12538  	// match: (MOVDload [off1] {sym} (ADDconst [off2] ptr) mem)
 12539  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12540  	// result: (MOVDload [off1+off2] {sym} ptr mem)
 12541  	for {
 12542  		off1 := v.AuxInt
 12543  		sym := v.Aux
 12544  		_ = v.Args[1]
 12545  		v_0 := v.Args[0]
 12546  		if v_0.Op != OpARM64ADDconst {
 12547  			break
 12548  		}
 12549  		off2 := v_0.AuxInt
 12550  		ptr := v_0.Args[0]
 12551  		mem := v.Args[1]
 12552  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12553  			break
 12554  		}
 12555  		v.reset(OpARM64MOVDload)
 12556  		v.AuxInt = off1 + off2
 12557  		v.Aux = sym
 12558  		v.AddArg(ptr)
 12559  		v.AddArg(mem)
 12560  		return true
 12561  	}
 12562  	// match: (MOVDload [off] {sym} (ADD ptr idx) mem)
 12563  	// cond: off == 0 && sym == nil
 12564  	// result: (MOVDloadidx ptr idx mem)
 12565  	for {
 12566  		off := v.AuxInt
 12567  		sym := v.Aux
 12568  		_ = v.Args[1]
 12569  		v_0 := v.Args[0]
 12570  		if v_0.Op != OpARM64ADD {
 12571  			break
 12572  		}
 12573  		_ = v_0.Args[1]
 12574  		ptr := v_0.Args[0]
 12575  		idx := v_0.Args[1]
 12576  		mem := v.Args[1]
 12577  		if !(off == 0 && sym == nil) {
 12578  			break
 12579  		}
 12580  		v.reset(OpARM64MOVDloadidx)
 12581  		v.AddArg(ptr)
 12582  		v.AddArg(idx)
 12583  		v.AddArg(mem)
 12584  		return true
 12585  	}
 12586  	// match: (MOVDload [off] {sym} (ADDshiftLL [3] ptr idx) mem)
 12587  	// cond: off == 0 && sym == nil
 12588  	// result: (MOVDloadidx8 ptr idx mem)
 12589  	for {
 12590  		off := v.AuxInt
 12591  		sym := v.Aux
 12592  		_ = v.Args[1]
 12593  		v_0 := v.Args[0]
 12594  		if v_0.Op != OpARM64ADDshiftLL {
 12595  			break
 12596  		}
 12597  		if v_0.AuxInt != 3 {
 12598  			break
 12599  		}
 12600  		_ = v_0.Args[1]
 12601  		ptr := v_0.Args[0]
 12602  		idx := v_0.Args[1]
 12603  		mem := v.Args[1]
 12604  		if !(off == 0 && sym == nil) {
 12605  			break
 12606  		}
 12607  		v.reset(OpARM64MOVDloadidx8)
 12608  		v.AddArg(ptr)
 12609  		v.AddArg(idx)
 12610  		v.AddArg(mem)
 12611  		return true
 12612  	}
 12613  	// match: (MOVDload [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 12614  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12615  	// result: (MOVDload [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 12616  	for {
 12617  		off1 := v.AuxInt
 12618  		sym1 := v.Aux
 12619  		_ = v.Args[1]
 12620  		v_0 := v.Args[0]
 12621  		if v_0.Op != OpARM64MOVDaddr {
 12622  			break
 12623  		}
 12624  		off2 := v_0.AuxInt
 12625  		sym2 := v_0.Aux
 12626  		ptr := v_0.Args[0]
 12627  		mem := v.Args[1]
 12628  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12629  			break
 12630  		}
 12631  		v.reset(OpARM64MOVDload)
 12632  		v.AuxInt = off1 + off2
 12633  		v.Aux = mergeSym(sym1, sym2)
 12634  		v.AddArg(ptr)
 12635  		v.AddArg(mem)
 12636  		return true
 12637  	}
 12638  	// match: (MOVDload [off] {sym} ptr (MOVDstorezero [off2] {sym2} ptr2 _))
 12639  	// cond: sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)
 12640  	// result: (MOVDconst [0])
 12641  	for {
 12642  		off := v.AuxInt
 12643  		sym := v.Aux
 12644  		_ = v.Args[1]
 12645  		ptr := v.Args[0]
 12646  		v_1 := v.Args[1]
 12647  		if v_1.Op != OpARM64MOVDstorezero {
 12648  			break
 12649  		}
 12650  		off2 := v_1.AuxInt
 12651  		sym2 := v_1.Aux
 12652  		_ = v_1.Args[1]
 12653  		ptr2 := v_1.Args[0]
 12654  		if !(sym == sym2 && off == off2 && isSamePtr(ptr, ptr2)) {
 12655  			break
 12656  		}
 12657  		v.reset(OpARM64MOVDconst)
 12658  		v.AuxInt = 0
 12659  		return true
 12660  	}
 12661  	// match: (MOVDload [off] {sym} (SB) _)
 12662  	// cond: symIsRO(sym)
 12663  	// result: (MOVDconst [int64(read64(sym, off, config.BigEndian))])
 12664  	for {
 12665  		off := v.AuxInt
 12666  		sym := v.Aux
 12667  		_ = v.Args[1]
 12668  		v_0 := v.Args[0]
 12669  		if v_0.Op != OpSB {
 12670  			break
 12671  		}
 12672  		if !(symIsRO(sym)) {
 12673  			break
 12674  		}
 12675  		v.reset(OpARM64MOVDconst)
 12676  		v.AuxInt = int64(read64(sym, off, config.BigEndian))
 12677  		return true
 12678  	}
 12679  	return false
 12680  }
 12681  func rewriteValueARM64_OpARM64MOVDloadidx_0(v *Value) bool {
 12682  	// match: (MOVDloadidx ptr (MOVDconst [c]) mem)
 12683  	// cond:
 12684  	// result: (MOVDload [c] ptr mem)
 12685  	for {
 12686  		_ = v.Args[2]
 12687  		ptr := v.Args[0]
 12688  		v_1 := v.Args[1]
 12689  		if v_1.Op != OpARM64MOVDconst {
 12690  			break
 12691  		}
 12692  		c := v_1.AuxInt
 12693  		mem := v.Args[2]
 12694  		v.reset(OpARM64MOVDload)
 12695  		v.AuxInt = c
 12696  		v.AddArg(ptr)
 12697  		v.AddArg(mem)
 12698  		return true
 12699  	}
 12700  	// match: (MOVDloadidx (MOVDconst [c]) ptr mem)
 12701  	// cond:
 12702  	// result: (MOVDload [c] ptr mem)
 12703  	for {
 12704  		_ = v.Args[2]
 12705  		v_0 := v.Args[0]
 12706  		if v_0.Op != OpARM64MOVDconst {
 12707  			break
 12708  		}
 12709  		c := v_0.AuxInt
 12710  		ptr := v.Args[1]
 12711  		mem := v.Args[2]
 12712  		v.reset(OpARM64MOVDload)
 12713  		v.AuxInt = c
 12714  		v.AddArg(ptr)
 12715  		v.AddArg(mem)
 12716  		return true
 12717  	}
 12718  	// match: (MOVDloadidx ptr (SLLconst [3] idx) mem)
 12719  	// cond:
 12720  	// result: (MOVDloadidx8 ptr idx mem)
 12721  	for {
 12722  		_ = v.Args[2]
 12723  		ptr := v.Args[0]
 12724  		v_1 := v.Args[1]
 12725  		if v_1.Op != OpARM64SLLconst {
 12726  			break
 12727  		}
 12728  		if v_1.AuxInt != 3 {
 12729  			break
 12730  		}
 12731  		idx := v_1.Args[0]
 12732  		mem := v.Args[2]
 12733  		v.reset(OpARM64MOVDloadidx8)
 12734  		v.AddArg(ptr)
 12735  		v.AddArg(idx)
 12736  		v.AddArg(mem)
 12737  		return true
 12738  	}
 12739  	// match: (MOVDloadidx (SLLconst [3] idx) ptr mem)
 12740  	// cond:
 12741  	// result: (MOVDloadidx8 ptr idx mem)
 12742  	for {
 12743  		_ = v.Args[2]
 12744  		v_0 := v.Args[0]
 12745  		if v_0.Op != OpARM64SLLconst {
 12746  			break
 12747  		}
 12748  		if v_0.AuxInt != 3 {
 12749  			break
 12750  		}
 12751  		idx := v_0.Args[0]
 12752  		ptr := v.Args[1]
 12753  		mem := v.Args[2]
 12754  		v.reset(OpARM64MOVDloadidx8)
 12755  		v.AddArg(ptr)
 12756  		v.AddArg(idx)
 12757  		v.AddArg(mem)
 12758  		return true
 12759  	}
 12760  	// match: (MOVDloadidx ptr idx (MOVDstorezeroidx ptr2 idx2 _))
 12761  	// cond: (isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2))
 12762  	// result: (MOVDconst [0])
 12763  	for {
 12764  		_ = v.Args[2]
 12765  		ptr := v.Args[0]
 12766  		idx := v.Args[1]
 12767  		v_2 := v.Args[2]
 12768  		if v_2.Op != OpARM64MOVDstorezeroidx {
 12769  			break
 12770  		}
 12771  		_ = v_2.Args[2]
 12772  		ptr2 := v_2.Args[0]
 12773  		idx2 := v_2.Args[1]
 12774  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2) || isSamePtr(ptr, idx2) && isSamePtr(idx, ptr2)) {
 12775  			break
 12776  		}
 12777  		v.reset(OpARM64MOVDconst)
 12778  		v.AuxInt = 0
 12779  		return true
 12780  	}
 12781  	return false
 12782  }
 12783  func rewriteValueARM64_OpARM64MOVDloadidx8_0(v *Value) bool {
 12784  	// match: (MOVDloadidx8 ptr (MOVDconst [c]) mem)
 12785  	// cond:
 12786  	// result: (MOVDload [c<<3] ptr mem)
 12787  	for {
 12788  		_ = v.Args[2]
 12789  		ptr := v.Args[0]
 12790  		v_1 := v.Args[1]
 12791  		if v_1.Op != OpARM64MOVDconst {
 12792  			break
 12793  		}
 12794  		c := v_1.AuxInt
 12795  		mem := v.Args[2]
 12796  		v.reset(OpARM64MOVDload)
 12797  		v.AuxInt = c << 3
 12798  		v.AddArg(ptr)
 12799  		v.AddArg(mem)
 12800  		return true
 12801  	}
 12802  	// match: (MOVDloadidx8 ptr idx (MOVDstorezeroidx8 ptr2 idx2 _))
 12803  	// cond: isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)
 12804  	// result: (MOVDconst [0])
 12805  	for {
 12806  		_ = v.Args[2]
 12807  		ptr := v.Args[0]
 12808  		idx := v.Args[1]
 12809  		v_2 := v.Args[2]
 12810  		if v_2.Op != OpARM64MOVDstorezeroidx8 {
 12811  			break
 12812  		}
 12813  		_ = v_2.Args[2]
 12814  		ptr2 := v_2.Args[0]
 12815  		idx2 := v_2.Args[1]
 12816  		if !(isSamePtr(ptr, ptr2) && isSamePtr(idx, idx2)) {
 12817  			break
 12818  		}
 12819  		v.reset(OpARM64MOVDconst)
 12820  		v.AuxInt = 0
 12821  		return true
 12822  	}
 12823  	return false
 12824  }
 12825  func rewriteValueARM64_OpARM64MOVDreg_0(v *Value) bool {
 12826  	// match: (MOVDreg x)
 12827  	// cond: x.Uses == 1
 12828  	// result: (MOVDnop x)
 12829  	for {
 12830  		x := v.Args[0]
 12831  		if !(x.Uses == 1) {
 12832  			break
 12833  		}
 12834  		v.reset(OpARM64MOVDnop)
 12835  		v.AddArg(x)
 12836  		return true
 12837  	}
 12838  	// match: (MOVDreg (MOVDconst [c]))
 12839  	// cond:
 12840  	// result: (MOVDconst [c])
 12841  	for {
 12842  		v_0 := v.Args[0]
 12843  		if v_0.Op != OpARM64MOVDconst {
 12844  			break
 12845  		}
 12846  		c := v_0.AuxInt
 12847  		v.reset(OpARM64MOVDconst)
 12848  		v.AuxInt = c
 12849  		return true
 12850  	}
 12851  	return false
 12852  }
 12853  func rewriteValueARM64_OpARM64MOVDstore_0(v *Value) bool {
 12854  	b := v.Block
 12855  	_ = b
 12856  	config := b.Func.Config
 12857  	_ = config
 12858  	// match: (MOVDstore [off] {sym} ptr (FMOVDfpgp val) mem)
 12859  	// cond:
 12860  	// result: (FMOVDstore [off] {sym} ptr val mem)
 12861  	for {
 12862  		off := v.AuxInt
 12863  		sym := v.Aux
 12864  		_ = v.Args[2]
 12865  		ptr := v.Args[0]
 12866  		v_1 := v.Args[1]
 12867  		if v_1.Op != OpARM64FMOVDfpgp {
 12868  			break
 12869  		}
 12870  		val := v_1.Args[0]
 12871  		mem := v.Args[2]
 12872  		v.reset(OpARM64FMOVDstore)
 12873  		v.AuxInt = off
 12874  		v.Aux = sym
 12875  		v.AddArg(ptr)
 12876  		v.AddArg(val)
 12877  		v.AddArg(mem)
 12878  		return true
 12879  	}
 12880  	// match: (MOVDstore [off1] {sym} (ADDconst [off2] ptr) val mem)
 12881  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12882  	// result: (MOVDstore [off1+off2] {sym} ptr val mem)
 12883  	for {
 12884  		off1 := v.AuxInt
 12885  		sym := v.Aux
 12886  		_ = v.Args[2]
 12887  		v_0 := v.Args[0]
 12888  		if v_0.Op != OpARM64ADDconst {
 12889  			break
 12890  		}
 12891  		off2 := v_0.AuxInt
 12892  		ptr := v_0.Args[0]
 12893  		val := v.Args[1]
 12894  		mem := v.Args[2]
 12895  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12896  			break
 12897  		}
 12898  		v.reset(OpARM64MOVDstore)
 12899  		v.AuxInt = off1 + off2
 12900  		v.Aux = sym
 12901  		v.AddArg(ptr)
 12902  		v.AddArg(val)
 12903  		v.AddArg(mem)
 12904  		return true
 12905  	}
 12906  	// match: (MOVDstore [off] {sym} (ADD ptr idx) val mem)
 12907  	// cond: off == 0 && sym == nil
 12908  	// result: (MOVDstoreidx ptr idx val mem)
 12909  	for {
 12910  		off := v.AuxInt
 12911  		sym := v.Aux
 12912  		_ = v.Args[2]
 12913  		v_0 := v.Args[0]
 12914  		if v_0.Op != OpARM64ADD {
 12915  			break
 12916  		}
 12917  		_ = v_0.Args[1]
 12918  		ptr := v_0.Args[0]
 12919  		idx := v_0.Args[1]
 12920  		val := v.Args[1]
 12921  		mem := v.Args[2]
 12922  		if !(off == 0 && sym == nil) {
 12923  			break
 12924  		}
 12925  		v.reset(OpARM64MOVDstoreidx)
 12926  		v.AddArg(ptr)
 12927  		v.AddArg(idx)
 12928  		v.AddArg(val)
 12929  		v.AddArg(mem)
 12930  		return true
 12931  	}
 12932  	// match: (MOVDstore [off] {sym} (ADDshiftLL [3] ptr idx) val mem)
 12933  	// cond: off == 0 && sym == nil
 12934  	// result: (MOVDstoreidx8 ptr idx val mem)
 12935  	for {
 12936  		off := v.AuxInt
 12937  		sym := v.Aux
 12938  		_ = v.Args[2]
 12939  		v_0 := v.Args[0]
 12940  		if v_0.Op != OpARM64ADDshiftLL {
 12941  			break
 12942  		}
 12943  		if v_0.AuxInt != 3 {
 12944  			break
 12945  		}
 12946  		_ = v_0.Args[1]
 12947  		ptr := v_0.Args[0]
 12948  		idx := v_0.Args[1]
 12949  		val := v.Args[1]
 12950  		mem := v.Args[2]
 12951  		if !(off == 0 && sym == nil) {
 12952  			break
 12953  		}
 12954  		v.reset(OpARM64MOVDstoreidx8)
 12955  		v.AddArg(ptr)
 12956  		v.AddArg(idx)
 12957  		v.AddArg(val)
 12958  		v.AddArg(mem)
 12959  		return true
 12960  	}
 12961  	// match: (MOVDstore [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) val mem)
 12962  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 12963  	// result: (MOVDstore [off1+off2] {mergeSym(sym1,sym2)} ptr val mem)
 12964  	for {
 12965  		off1 := v.AuxInt
 12966  		sym1 := v.Aux
 12967  		_ = v.Args[2]
 12968  		v_0 := v.Args[0]
 12969  		if v_0.Op != OpARM64MOVDaddr {
 12970  			break
 12971  		}
 12972  		off2 := v_0.AuxInt
 12973  		sym2 := v_0.Aux
 12974  		ptr := v_0.Args[0]
 12975  		val := v.Args[1]
 12976  		mem := v.Args[2]
 12977  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 12978  			break
 12979  		}
 12980  		v.reset(OpARM64MOVDstore)
 12981  		v.AuxInt = off1 + off2
 12982  		v.Aux = mergeSym(sym1, sym2)
 12983  		v.AddArg(ptr)
 12984  		v.AddArg(val)
 12985  		v.AddArg(mem)
 12986  		return true
 12987  	}
 12988  	// match: (MOVDstore [off] {sym} ptr (MOVDconst [0]) mem)
 12989  	// cond:
 12990  	// result: (MOVDstorezero [off] {sym} ptr mem)
 12991  	for {
 12992  		off := v.AuxInt
 12993  		sym := v.Aux
 12994  		_ = v.Args[2]
 12995  		ptr := v.Args[0]
 12996  		v_1 := v.Args[1]
 12997  		if v_1.Op != OpARM64MOVDconst {
 12998  			break
 12999  		}
 13000  		if v_1.AuxInt != 0 {
 13001  			break
 13002  		}
 13003  		mem := v.Args[2]
 13004  		v.reset(OpARM64MOVDstorezero)
 13005  		v.AuxInt = off
 13006  		v.Aux = sym
 13007  		v.AddArg(ptr)
 13008  		v.AddArg(mem)
 13009  		return true
 13010  	}
 13011  	return false
 13012  }
 13013  func rewriteValueARM64_OpARM64MOVDstoreidx_0(v *Value) bool {
 13014  	// match: (MOVDstoreidx ptr (MOVDconst [c]) val mem)
 13015  	// cond:
 13016  	// result: (MOVDstore [c] ptr val mem)
 13017  	for {
 13018  		_ = v.Args[3]
 13019  		ptr := v.Args[0]
 13020  		v_1 := v.Args[1]
 13021  		if v_1.Op != OpARM64MOVDconst {
 13022  			break
 13023  		}
 13024  		c := v_1.AuxInt
 13025  		val := v.Args[2]
 13026  		mem := v.Args[3]
 13027  		v.reset(OpARM64MOVDstore)
 13028  		v.AuxInt = c
 13029  		v.AddArg(ptr)
 13030  		v.AddArg(val)
 13031  		v.AddArg(mem)
 13032  		return true
 13033  	}
 13034  	// match: (MOVDstoreidx (MOVDconst [c]) idx val mem)
 13035  	// cond:
 13036  	// result: (MOVDstore [c] idx val mem)
 13037  	for {
 13038  		_ = v.Args[3]
 13039  		v_0 := v.Args[0]
 13040  		if v_0.Op != OpARM64MOVDconst {
 13041  			break
 13042  		}
 13043  		c := v_0.AuxInt
 13044  		idx := v.Args[1]
 13045  		val := v.Args[2]
 13046  		mem := v.Args[3]
 13047  		v.reset(OpARM64MOVDstore)
 13048  		v.AuxInt = c
 13049  		v.AddArg(idx)
 13050  		v.AddArg(val)
 13051  		v.AddArg(mem)
 13052  		return true
 13053  	}
 13054  	// match: (MOVDstoreidx ptr (SLLconst [3] idx) val mem)
 13055  	// cond:
 13056  	// result: (MOVDstoreidx8 ptr idx val mem)
 13057  	for {
 13058  		_ = v.Args[3]
 13059  		ptr := v.Args[0]
 13060  		v_1 := v.Args[1]
 13061  		if v_1.Op != OpARM64SLLconst {
 13062  			break
 13063  		}
 13064  		if v_1.AuxInt != 3 {
 13065  			break
 13066  		}
 13067  		idx := v_1.Args[0]
 13068  		val := v.Args[2]
 13069  		mem := v.Args[3]
 13070  		v.reset(OpARM64MOVDstoreidx8)
 13071  		v.AddArg(ptr)
 13072  		v.AddArg(idx)
 13073  		v.AddArg(val)
 13074  		v.AddArg(mem)
 13075  		return true
 13076  	}
 13077  	// match: (MOVDstoreidx (SLLconst [3] idx) ptr val mem)
 13078  	// cond:
 13079  	// result: (MOVDstoreidx8 ptr idx val mem)
 13080  	for {
 13081  		_ = v.Args[3]
 13082  		v_0 := v.Args[0]
 13083  		if v_0.Op != OpARM64SLLconst {
 13084  			break
 13085  		}
 13086  		if v_0.AuxInt != 3 {
 13087  			break
 13088  		}
 13089  		idx := v_0.Args[0]
 13090  		ptr := v.Args[1]
 13091  		val := v.Args[2]
 13092  		mem := v.Args[3]
 13093  		v.reset(OpARM64MOVDstoreidx8)
 13094  		v.AddArg(ptr)
 13095  		v.AddArg(idx)
 13096  		v.AddArg(val)
 13097  		v.AddArg(mem)
 13098  		return true
 13099  	}
 13100  	// match: (MOVDstoreidx ptr idx (MOVDconst [0]) mem)
 13101  	// cond:
 13102  	// result: (MOVDstorezeroidx ptr idx mem)
 13103  	for {
 13104  		_ = v.Args[3]
 13105  		ptr := v.Args[0]
 13106  		idx := v.Args[1]
 13107  		v_2 := v.Args[2]
 13108  		if v_2.Op != OpARM64MOVDconst {
 13109  			break
 13110  		}
 13111  		if v_2.AuxInt != 0 {
 13112  			break
 13113  		}
 13114  		mem := v.Args[3]
 13115  		v.reset(OpARM64MOVDstorezeroidx)
 13116  		v.AddArg(ptr)
 13117  		v.AddArg(idx)
 13118  		v.AddArg(mem)
 13119  		return true
 13120  	}
 13121  	return false
 13122  }
 13123  func rewriteValueARM64_OpARM64MOVDstoreidx8_0(v *Value) bool {
 13124  	// match: (MOVDstoreidx8 ptr (MOVDconst [c]) val mem)
 13125  	// cond:
 13126  	// result: (MOVDstore [c<<3] ptr val mem)
 13127  	for {
 13128  		_ = v.Args[3]
 13129  		ptr := v.Args[0]
 13130  		v_1 := v.Args[1]
 13131  		if v_1.Op != OpARM64MOVDconst {
 13132  			break
 13133  		}
 13134  		c := v_1.AuxInt
 13135  		val := v.Args[2]
 13136  		mem := v.Args[3]
 13137  		v.reset(OpARM64MOVDstore)
 13138  		v.AuxInt = c << 3
 13139  		v.AddArg(ptr)
 13140  		v.AddArg(val)
 13141  		v.AddArg(mem)
 13142  		return true
 13143  	}
 13144  	// match: (MOVDstoreidx8 ptr idx (MOVDconst [0]) mem)
 13145  	// cond:
 13146  	// result: (MOVDstorezeroidx8 ptr idx mem)
 13147  	for {
 13148  		_ = v.Args[3]
 13149  		ptr := v.Args[0]
 13150  		idx := v.Args[1]
 13151  		v_2 := v.Args[2]
 13152  		if v_2.Op != OpARM64MOVDconst {
 13153  			break
 13154  		}
 13155  		if v_2.AuxInt != 0 {
 13156  			break
 13157  		}
 13158  		mem := v.Args[3]
 13159  		v.reset(OpARM64MOVDstorezeroidx8)
 13160  		v.AddArg(ptr)
 13161  		v.AddArg(idx)
 13162  		v.AddArg(mem)
 13163  		return true
 13164  	}
 13165  	return false
 13166  }
 13167  func rewriteValueARM64_OpARM64MOVDstorezero_0(v *Value) bool {
 13168  	b := v.Block
 13169  	_ = b
 13170  	config := b.Func.Config
 13171  	_ = config
 13172  	// match: (MOVDstorezero [off1] {sym} (ADDconst [off2] ptr) mem)
 13173  	// cond: is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13174  	// result: (MOVDstorezero [off1+off2] {sym} ptr mem)
 13175  	for {
 13176  		off1 := v.AuxInt
 13177  		sym := v.Aux
 13178  		_ = v.Args[1]
 13179  		v_0 := v.Args[0]
 13180  		if v_0.Op != OpARM64ADDconst {
 13181  			break
 13182  		}
 13183  		off2 := v_0.AuxInt
 13184  		ptr := v_0.Args[0]
 13185  		mem := v.Args[1]
 13186  		if !(is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13187  			break
 13188  		}
 13189  		v.reset(OpARM64MOVDstorezero)
 13190  		v.AuxInt = off1 + off2
 13191  		v.Aux = sym
 13192  		v.AddArg(ptr)
 13193  		v.AddArg(mem)
 13194  		return true
 13195  	}
 13196  	// match: (MOVDstorezero [off1] {sym1} (MOVDaddr [off2] {sym2} ptr) mem)
 13197  	// cond: canMergeSym(sym1,sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)
 13198  	// result: (MOVDstorezero [off1+off2] {mergeSym(sym1,sym2)} ptr mem)
 13199  	for {
 13200  		off1 := v.AuxInt
 13201  		sym1 := v.Aux
 13202  		_ = v.Args[1]
 13203  		v_0 := v.Args[0]
 13204  		if v_0.Op != OpARM64MOVDaddr {
 13205  			break
 13206  		}
 13207  		off2 := v_0.AuxInt
 13208  		sym2 := v_0.Aux
 13209  		ptr := v_0.Args[0]
 13210  		mem := v.Args[1]
 13211  		if !(canMergeSym(sym1, sym2) && is32Bit(off1+off2) && (ptr.Op != OpSB || !config.ctxt.Flag_shared)) {
 13212  			break
 13213  		}
 13214  		v.reset(OpARM64MOVDstorezero)
 13215  		v.AuxInt = off1 + off2
 13216  		v.Aux = mergeSym(sym1, sym2)
 13217  		v.AddArg(ptr)
 13218  		v.AddArg(mem)
 13219  		return true
 13220  	}
 13221  	// match: (MOVDstorezero [off] {sym} (ADD ptr idx) mem)
 13222  	// cond: off == 0 && sym == nil
 13223  	// result: (MOVDstorezeroidx ptr idx mem)
 13224  	for {
 13225  		off := v.AuxInt
 13226  		sym := v.Aux
 13227  		_ = v.Args[1]
 13228  		v_0 := v.Args[0]
 13229  		if v_0.Op != OpARM64ADD {
 13230  			break
 13231  		}
 13232  		_ = v_0.Args[1]
 13233  		ptr := v_0.Args[0]
 13234  		idx := v_0.Args[1]
 13235  		mem := v.Args[1]
 13236  		if !(off == 0 && sym == nil) {
 13237  			break
 13238  		}
 13239  		v.reset(OpARM64MOVDstorezeroidx)
 13240  		v.AddArg(ptr)
 13241  		v.AddArg(idx)
 13242  		v.AddArg(mem)
 13243  		return true
 13244  	}
 13245  	// match: (MOVDstorezero [off] {sym} (ADDshiftLL [3] ptr idx) mem)
 13246  	// cond: off == 0 && sym == nil
 13247  	// result: (MOVDstorezeroidx8 ptr idx mem)
 13248  	for {
 13249  		off := v.AuxInt
 13250  		sym := v.Aux
 13251  		_ = v.Args[1]
 13252  		v_0 := v.Args[0]
 13253  		if v_0.Op != OpARM64ADDshiftLL {
 13254  			break
 13255  		}
 13256  		if v_0.AuxInt != 3 {
 13257  			break
 13258  		}
 13259  		_ = v_0.Args[1]
 13260  		ptr := v_0.Args[0]
 13261  		idx := v_0.Args[1]
 13262  		mem := v.Args[1]
 13263  		if !(off == 0 && sym == nil) {
 13264  			break
 13265  		}
 13266  		v.reset(OpARM64MOVDstorezeroidx8)
 13267  		v.AddArg(ptr)
 13268  		v.AddArg(idx)
 13269  		v.AddArg(mem)
 13270  		return true
 13271  	}
 13272  	// match: (MOVDstorezero [i] {s} ptr0 x:(MOVDstorezero [j] {s} ptr1 mem))
 13273  	// cond: x.Uses == 1 && areAdjacentOffsets(i,j,8) && is32Bit(min(i,j)) && isSamePtr(ptr0, ptr1) && clobber(x)
 13274  	// result: (MOVQstorezero [min(i,j)] {s} ptr0 mem)
 13275  	for {
 13276  		i := v.AuxInt
 13277  		s := v.Aux
 13278  		_ = v.Args[1]
 13279  		ptr0 := v.Args[0]
 13280  		x := v.Args[1]
 13281  		if x.Op != OpARM64MOVDstorezero {
 13282  			break
 13283  		}
 13284  		j := x.AuxInt
 13285  		if x.Aux != s {
 13286  			break
 13287  		}
 13288  		_ = x.Args[1]
 13289  		ptr1 := x.Args[0]
 13290  		mem := x.Args[1]
 13291  		if !(x.Uses == 1 && areAdjacentOffsets(i, j, 8) && is32Bit(min(i, j)) && isSamePtr(ptr0, ptr1) && clobber(x)) {
 13292  			break
 13293  		}
 13294  		v.reset(OpARM64MOVQstorezero)
 13295  		v.AuxInt = min(i, j)
 13296  		v.Aux = s
 13297  		v.AddArg(ptr0)
 13298  		v.AddArg(mem)
 13299  		return true
 13300  	}
 13301  	// match: (MOVDstorezero [8] {s} p0:(ADD ptr0 idx0) x:(MOVDstorezeroidx ptr1 idx1 mem))
 13302  	// cond: x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)
 13303  	// result: (MOVQstorezero [0] {s} p0 mem)
 13304  	for {
 13305  		if v.AuxInt != 8 {
 13306  			break
 13307  		}
 13308  		s := v.Aux
 13309  		_ = v.Args[1]
 13310  		p0 := v.Args[0]
 13311  		if p0.Op != OpARM64ADD {
 13312  			break
 13313  		}
 13314  		_ = p0.Args[1]
 13315  		ptr0 := p0.Args[0]
 13316  		idx0 := p0.Args[1]
 13317  		x := v.Args[1]
 13318  		if x.Op != OpARM64MOVDstorezeroidx {
 13319  			break
 13320  		}
 13321  		_ = x.Args[2]
 13322  		ptr1 := x.Args[0]
 13323  		idx1 := x.Args[1]
 13324  		mem := x.Args[2]
 13325  		if !(x.Uses == 1 && s == nil && (isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) || isSamePtr(ptr0, idx1) && isSamePtr(idx0, ptr1)) && clobber(x)) {
 13326  			break
 13327  		}
 13328  		v.reset(OpARM64MOVQstorezero)
 13329  		v.AuxInt = 0
 13330  		v.Aux = s
 13331  		v.AddArg(p0)
 13332  		v.AddArg(mem)
 13333  		return true
 13334  	}
 13335  	// match: (MOVDstorezero [8] {s} p0:(ADDshiftLL [3] ptr0 idx0) x:(MOVDstorezeroidx8 ptr1 idx1 mem))
 13336  	// cond: x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)
 13337  	// result: (MOVQstorezero [0] {s} p0 mem)
 13338  	for {
 13339  		if v.AuxInt != 8 {
 13340  			break
 13341  		}
 13342  		s := v.Aux
 13343  		_ = v.Args[1]
 13344  		p0 := v.Args[0]
 13345  		if p0.Op != OpARM64ADDshiftLL {
 13346  			break
 13347  		}
 13348  		if p0.AuxInt != 3 {
 13349  			break
 13350  		}
 13351  		_ = p0.Args[1]
 13352  		ptr0 := p0.Args[0]
 13353  		idx0 := p0.Args[1]
 13354  		x := v.Args[1]
 13355  		if x.Op != OpARM64MOVDstorezeroidx8 {
 13356  			break
 13357  		}
 13358  		_ = x.Args[2]
 13359  		ptr1 := x.Args[0]
 13360  		idx1 := x.Args[1]
 13361  		mem := x.Args[2]
 13362  		if !(x.Uses == 1 && s == nil && isSamePtr(ptr0, ptr1) && isSamePtr(idx0, idx1) && clobber(x)) {
 13363  			break
 13364  		}
 13365  		v.reset(OpARM64MOVQstorezero)
 13366  		v.AuxInt = 0
 13367  		v.Aux = s
 13368  		v.AddArg(p0)
 13369  		v.AddArg(mem)
 13370  		return true
 13371  	}
 13372  	return false
 13373  }
 13374  func rewriteValueARM64_OpARM64MOVDstorezeroidx_0(v *Value) bool {
 13375  	// match: (MOVDstorezeroidx ptr (MOVDconst [c]) mem)
 13376  	// cond:
 13377  	// result: (MOVDstorezero [c] ptr mem)
 13378  	for {
 13379  		_ = v.Args[2]
 13380  		ptr := v.Args[0]
 13381  		v_1 := v.Args[1]
 13382  		if v_1.Op != OpARM64MOVDconst {
 13383  			break
 13384  		}
 13385  		c := v_1.AuxInt
 13386  		mem := v.Args[2]
 13387  		v.reset(OpARM64MOVDstorezero)
 13388  		v.AuxInt = c
 13389  		v.AddArg(ptr)
 13390  		v.AddArg(mem)
 13391  		return true
 13392  	}
 13393  	// match: (MOVDstorezeroidx (MOVDconst [c]) idx mem)
 13394  	// cond:
 13395  	// result: (MOVDstorezero [c] idx mem)
 13396  	for {
 13397  		_ = v.Args[2]
 13398  		v_0 := v.Args[0]
 13399  		if v_0.Op != OpARM64MOVDconst {
 13400  			break
 13401  		}
 13402  		c := v_0.AuxInt
 13403  		idx := v.Args[1]
 13404  		mem := v.Args[2]
 13405  		v.reset(OpARM64MOVDstorezero)
 13406  		v.AuxInt = c
 13407  		v.AddArg(idx)
 13408  		v.AddArg(mem)
 13409  		return true
 13410  	}
 13411  	// match: (MOVDstorezeroidx ptr (SLLconst [3] idx) mem)
 13412  	// cond:
 13413  	// result: (MOVDstorezeroidx8 ptr idx mem)
 13414  	for {
 13415  		_ = v.Args[2]
 13416  		ptr := v.Args[0]
 13417  		v_1 := v.Args[1]
 13418  		if v_1.Op != OpARM64SLLconst {
 13419  			break
 13420  		}
 13421  		if v_1.AuxInt != 3 {
 13422  			break
 13423  		}
 13424  		idx := v_1.Args[0]
 13425  		mem := v.Args[2]
 13426  		v.reset(OpARM64MOVDstorezeroidx8)
 13427  		v.AddArg(ptr)
 13428  		v.AddArg(idx)
 13429  		v.AddArg(mem)
 13430  		return true
 13431  	}
 13432  	// match: (MOVDstorezeroidx (SLLconst [3] idx) ptr mem)
 13433  	// cond:
 13434  	// result: (MOVDstorezeroidx8 ptr idx mem)
 13435  	for {
 13436  		_ = v.Args[2]
 13437  		v_0 := v.Args[0]
 13438  		if v_0.Op != OpARM64SLLconst {
 13439  			break
 13440  		}
 13441  		if v_0.AuxInt != 3 {
 13442  			break
 13443  		}
 13444  		idx := v_0.Args[0]
 13445  		ptr := v.Args[1]
 13446  		mem := v.Args[2]
 13447  		v.reset(OpARM64MOVDstorezeroidx8)
 13448  		v.AddArg(ptr)
 13449  		v.AddArg(idx)
 13450  		v.AddArg(mem)
 13451  		return true
 13452  	}
 13453  	return false