Black Lives Matter. Support the Equal Justice Initiative.

Source file src/cmd/compile/internal/ssa/rewriteAMD64splitload.go

Documentation: cmd/compile/internal/ssa

     1  // Code generated from gen/AMD64splitload.rules; DO NOT EDIT.
     2  // generated with: cd gen; go run *.go
     3  
     4  package ssa
     5  
     6  func rewriteValueAMD64splitload(v *Value) bool {
     7  	switch v.Op {
     8  	case OpAMD64CMPBconstload:
     9  		return rewriteValueAMD64splitload_OpAMD64CMPBconstload(v)
    10  	case OpAMD64CMPBconstloadidx1:
    11  		return rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v)
    12  	case OpAMD64CMPBload:
    13  		return rewriteValueAMD64splitload_OpAMD64CMPBload(v)
    14  	case OpAMD64CMPBloadidx1:
    15  		return rewriteValueAMD64splitload_OpAMD64CMPBloadidx1(v)
    16  	case OpAMD64CMPLconstload:
    17  		return rewriteValueAMD64splitload_OpAMD64CMPLconstload(v)
    18  	case OpAMD64CMPLconstloadidx1:
    19  		return rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v)
    20  	case OpAMD64CMPLconstloadidx4:
    21  		return rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v)
    22  	case OpAMD64CMPLload:
    23  		return rewriteValueAMD64splitload_OpAMD64CMPLload(v)
    24  	case OpAMD64CMPLloadidx1:
    25  		return rewriteValueAMD64splitload_OpAMD64CMPLloadidx1(v)
    26  	case OpAMD64CMPLloadidx4:
    27  		return rewriteValueAMD64splitload_OpAMD64CMPLloadidx4(v)
    28  	case OpAMD64CMPQconstload:
    29  		return rewriteValueAMD64splitload_OpAMD64CMPQconstload(v)
    30  	case OpAMD64CMPQconstloadidx1:
    31  		return rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v)
    32  	case OpAMD64CMPQconstloadidx8:
    33  		return rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v)
    34  	case OpAMD64CMPQload:
    35  		return rewriteValueAMD64splitload_OpAMD64CMPQload(v)
    36  	case OpAMD64CMPQloadidx1:
    37  		return rewriteValueAMD64splitload_OpAMD64CMPQloadidx1(v)
    38  	case OpAMD64CMPQloadidx8:
    39  		return rewriteValueAMD64splitload_OpAMD64CMPQloadidx8(v)
    40  	case OpAMD64CMPWconstload:
    41  		return rewriteValueAMD64splitload_OpAMD64CMPWconstload(v)
    42  	case OpAMD64CMPWconstloadidx1:
    43  		return rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v)
    44  	case OpAMD64CMPWconstloadidx2:
    45  		return rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v)
    46  	case OpAMD64CMPWload:
    47  		return rewriteValueAMD64splitload_OpAMD64CMPWload(v)
    48  	case OpAMD64CMPWloadidx1:
    49  		return rewriteValueAMD64splitload_OpAMD64CMPWloadidx1(v)
    50  	case OpAMD64CMPWloadidx2:
    51  		return rewriteValueAMD64splitload_OpAMD64CMPWloadidx2(v)
    52  	}
    53  	return false
    54  }
    55  func rewriteValueAMD64splitload_OpAMD64CMPBconstload(v *Value) bool {
    56  	v_1 := v.Args[1]
    57  	v_0 := v.Args[0]
    58  	b := v.Block
    59  	typ := &b.Func.Config.Types
    60  	// match: (CMPBconstload {sym} [vo] ptr mem)
    61  	// cond: vo.Val() == 0
    62  	// result: (TESTB x:(MOVBload {sym} [vo.Off32()] ptr mem) x)
    63  	for {
    64  		vo := auxIntToValAndOff(v.AuxInt)
    65  		sym := auxToSym(v.Aux)
    66  		ptr := v_0
    67  		mem := v_1
    68  		if !(vo.Val() == 0) {
    69  			break
    70  		}
    71  		v.reset(OpAMD64TESTB)
    72  		x := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
    73  		x.AuxInt = int32ToAuxInt(vo.Off32())
    74  		x.Aux = symToAux(sym)
    75  		x.AddArg2(ptr, mem)
    76  		v.AddArg2(x, x)
    77  		return true
    78  	}
    79  	// match: (CMPBconstload {sym} [vo] ptr mem)
    80  	// cond: vo.Val() != 0
    81  	// result: (CMPBconst (MOVBload {sym} [vo.Off32()] ptr mem) [vo.Val8()])
    82  	for {
    83  		vo := auxIntToValAndOff(v.AuxInt)
    84  		sym := auxToSym(v.Aux)
    85  		ptr := v_0
    86  		mem := v_1
    87  		if !(vo.Val() != 0) {
    88  			break
    89  		}
    90  		v.reset(OpAMD64CMPBconst)
    91  		v.AuxInt = int8ToAuxInt(vo.Val8())
    92  		v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
    93  		v0.AuxInt = int32ToAuxInt(vo.Off32())
    94  		v0.Aux = symToAux(sym)
    95  		v0.AddArg2(ptr, mem)
    96  		v.AddArg(v0)
    97  		return true
    98  	}
    99  	return false
   100  }
   101  func rewriteValueAMD64splitload_OpAMD64CMPBconstloadidx1(v *Value) bool {
   102  	v_2 := v.Args[2]
   103  	v_1 := v.Args[1]
   104  	v_0 := v.Args[0]
   105  	b := v.Block
   106  	typ := &b.Func.Config.Types
   107  	// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
   108  	// cond: vo.Val() == 0
   109  	// result: (TESTB x:(MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
   110  	for {
   111  		vo := auxIntToValAndOff(v.AuxInt)
   112  		sym := auxToSym(v.Aux)
   113  		ptr := v_0
   114  		idx := v_1
   115  		mem := v_2
   116  		if !(vo.Val() == 0) {
   117  			break
   118  		}
   119  		v.reset(OpAMD64TESTB)
   120  		x := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
   121  		x.AuxInt = int32ToAuxInt(vo.Off32())
   122  		x.Aux = symToAux(sym)
   123  		x.AddArg3(ptr, idx, mem)
   124  		v.AddArg2(x, x)
   125  		return true
   126  	}
   127  	// match: (CMPBconstloadidx1 {sym} [vo] ptr idx mem)
   128  	// cond: vo.Val() != 0
   129  	// result: (CMPBconst (MOVBloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val8()])
   130  	for {
   131  		vo := auxIntToValAndOff(v.AuxInt)
   132  		sym := auxToSym(v.Aux)
   133  		ptr := v_0
   134  		idx := v_1
   135  		mem := v_2
   136  		if !(vo.Val() != 0) {
   137  			break
   138  		}
   139  		v.reset(OpAMD64CMPBconst)
   140  		v.AuxInt = int8ToAuxInt(vo.Val8())
   141  		v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
   142  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   143  		v0.Aux = symToAux(sym)
   144  		v0.AddArg3(ptr, idx, mem)
   145  		v.AddArg(v0)
   146  		return true
   147  	}
   148  	return false
   149  }
   150  func rewriteValueAMD64splitload_OpAMD64CMPBload(v *Value) bool {
   151  	v_2 := v.Args[2]
   152  	v_1 := v.Args[1]
   153  	v_0 := v.Args[0]
   154  	b := v.Block
   155  	typ := &b.Func.Config.Types
   156  	// match: (CMPBload {sym} [off] ptr x mem)
   157  	// result: (CMPB (MOVBload {sym} [off] ptr mem) x)
   158  	for {
   159  		off := auxIntToInt32(v.AuxInt)
   160  		sym := auxToSym(v.Aux)
   161  		ptr := v_0
   162  		x := v_1
   163  		mem := v_2
   164  		v.reset(OpAMD64CMPB)
   165  		v0 := b.NewValue0(v.Pos, OpAMD64MOVBload, typ.UInt8)
   166  		v0.AuxInt = int32ToAuxInt(off)
   167  		v0.Aux = symToAux(sym)
   168  		v0.AddArg2(ptr, mem)
   169  		v.AddArg2(v0, x)
   170  		return true
   171  	}
   172  }
   173  func rewriteValueAMD64splitload_OpAMD64CMPBloadidx1(v *Value) bool {
   174  	v_3 := v.Args[3]
   175  	v_2 := v.Args[2]
   176  	v_1 := v.Args[1]
   177  	v_0 := v.Args[0]
   178  	b := v.Block
   179  	typ := &b.Func.Config.Types
   180  	// match: (CMPBloadidx1 {sym} [off] ptr idx x mem)
   181  	// result: (CMPB (MOVBloadidx1 {sym} [off] ptr idx mem) x)
   182  	for {
   183  		off := auxIntToInt32(v.AuxInt)
   184  		sym := auxToSym(v.Aux)
   185  		ptr := v_0
   186  		idx := v_1
   187  		x := v_2
   188  		mem := v_3
   189  		v.reset(OpAMD64CMPB)
   190  		v0 := b.NewValue0(v.Pos, OpAMD64MOVBloadidx1, typ.UInt8)
   191  		v0.AuxInt = int32ToAuxInt(off)
   192  		v0.Aux = symToAux(sym)
   193  		v0.AddArg3(ptr, idx, mem)
   194  		v.AddArg2(v0, x)
   195  		return true
   196  	}
   197  }
   198  func rewriteValueAMD64splitload_OpAMD64CMPLconstload(v *Value) bool {
   199  	v_1 := v.Args[1]
   200  	v_0 := v.Args[0]
   201  	b := v.Block
   202  	typ := &b.Func.Config.Types
   203  	// match: (CMPLconstload {sym} [vo] ptr mem)
   204  	// cond: vo.Val() == 0
   205  	// result: (TESTL x:(MOVLload {sym} [vo.Off32()] ptr mem) x)
   206  	for {
   207  		vo := auxIntToValAndOff(v.AuxInt)
   208  		sym := auxToSym(v.Aux)
   209  		ptr := v_0
   210  		mem := v_1
   211  		if !(vo.Val() == 0) {
   212  			break
   213  		}
   214  		v.reset(OpAMD64TESTL)
   215  		x := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
   216  		x.AuxInt = int32ToAuxInt(vo.Off32())
   217  		x.Aux = symToAux(sym)
   218  		x.AddArg2(ptr, mem)
   219  		v.AddArg2(x, x)
   220  		return true
   221  	}
   222  	// match: (CMPLconstload {sym} [vo] ptr mem)
   223  	// cond: vo.Val() != 0
   224  	// result: (CMPLconst (MOVLload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
   225  	for {
   226  		vo := auxIntToValAndOff(v.AuxInt)
   227  		sym := auxToSym(v.Aux)
   228  		ptr := v_0
   229  		mem := v_1
   230  		if !(vo.Val() != 0) {
   231  			break
   232  		}
   233  		v.reset(OpAMD64CMPLconst)
   234  		v.AuxInt = int32ToAuxInt(vo.Val32())
   235  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
   236  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   237  		v0.Aux = symToAux(sym)
   238  		v0.AddArg2(ptr, mem)
   239  		v.AddArg(v0)
   240  		return true
   241  	}
   242  	return false
   243  }
   244  func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx1(v *Value) bool {
   245  	v_2 := v.Args[2]
   246  	v_1 := v.Args[1]
   247  	v_0 := v.Args[0]
   248  	b := v.Block
   249  	typ := &b.Func.Config.Types
   250  	// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
   251  	// cond: vo.Val() == 0
   252  	// result: (TESTL x:(MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
   253  	for {
   254  		vo := auxIntToValAndOff(v.AuxInt)
   255  		sym := auxToSym(v.Aux)
   256  		ptr := v_0
   257  		idx := v_1
   258  		mem := v_2
   259  		if !(vo.Val() == 0) {
   260  			break
   261  		}
   262  		v.reset(OpAMD64TESTL)
   263  		x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
   264  		x.AuxInt = int32ToAuxInt(vo.Off32())
   265  		x.Aux = symToAux(sym)
   266  		x.AddArg3(ptr, idx, mem)
   267  		v.AddArg2(x, x)
   268  		return true
   269  	}
   270  	// match: (CMPLconstloadidx1 {sym} [vo] ptr idx mem)
   271  	// cond: vo.Val() != 0
   272  	// result: (CMPLconst (MOVLloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
   273  	for {
   274  		vo := auxIntToValAndOff(v.AuxInt)
   275  		sym := auxToSym(v.Aux)
   276  		ptr := v_0
   277  		idx := v_1
   278  		mem := v_2
   279  		if !(vo.Val() != 0) {
   280  			break
   281  		}
   282  		v.reset(OpAMD64CMPLconst)
   283  		v.AuxInt = int32ToAuxInt(vo.Val32())
   284  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
   285  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   286  		v0.Aux = symToAux(sym)
   287  		v0.AddArg3(ptr, idx, mem)
   288  		v.AddArg(v0)
   289  		return true
   290  	}
   291  	return false
   292  }
   293  func rewriteValueAMD64splitload_OpAMD64CMPLconstloadidx4(v *Value) bool {
   294  	v_2 := v.Args[2]
   295  	v_1 := v.Args[1]
   296  	v_0 := v.Args[0]
   297  	b := v.Block
   298  	typ := &b.Func.Config.Types
   299  	// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
   300  	// cond: vo.Val() == 0
   301  	// result: (TESTL x:(MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) x)
   302  	for {
   303  		vo := auxIntToValAndOff(v.AuxInt)
   304  		sym := auxToSym(v.Aux)
   305  		ptr := v_0
   306  		idx := v_1
   307  		mem := v_2
   308  		if !(vo.Val() == 0) {
   309  			break
   310  		}
   311  		v.reset(OpAMD64TESTL)
   312  		x := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
   313  		x.AuxInt = int32ToAuxInt(vo.Off32())
   314  		x.Aux = symToAux(sym)
   315  		x.AddArg3(ptr, idx, mem)
   316  		v.AddArg2(x, x)
   317  		return true
   318  	}
   319  	// match: (CMPLconstloadidx4 {sym} [vo] ptr idx mem)
   320  	// cond: vo.Val() != 0
   321  	// result: (CMPLconst (MOVLloadidx4 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
   322  	for {
   323  		vo := auxIntToValAndOff(v.AuxInt)
   324  		sym := auxToSym(v.Aux)
   325  		ptr := v_0
   326  		idx := v_1
   327  		mem := v_2
   328  		if !(vo.Val() != 0) {
   329  			break
   330  		}
   331  		v.reset(OpAMD64CMPLconst)
   332  		v.AuxInt = int32ToAuxInt(vo.Val32())
   333  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
   334  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   335  		v0.Aux = symToAux(sym)
   336  		v0.AddArg3(ptr, idx, mem)
   337  		v.AddArg(v0)
   338  		return true
   339  	}
   340  	return false
   341  }
   342  func rewriteValueAMD64splitload_OpAMD64CMPLload(v *Value) bool {
   343  	v_2 := v.Args[2]
   344  	v_1 := v.Args[1]
   345  	v_0 := v.Args[0]
   346  	b := v.Block
   347  	typ := &b.Func.Config.Types
   348  	// match: (CMPLload {sym} [off] ptr x mem)
   349  	// result: (CMPL (MOVLload {sym} [off] ptr mem) x)
   350  	for {
   351  		off := auxIntToInt32(v.AuxInt)
   352  		sym := auxToSym(v.Aux)
   353  		ptr := v_0
   354  		x := v_1
   355  		mem := v_2
   356  		v.reset(OpAMD64CMPL)
   357  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLload, typ.UInt32)
   358  		v0.AuxInt = int32ToAuxInt(off)
   359  		v0.Aux = symToAux(sym)
   360  		v0.AddArg2(ptr, mem)
   361  		v.AddArg2(v0, x)
   362  		return true
   363  	}
   364  }
   365  func rewriteValueAMD64splitload_OpAMD64CMPLloadidx1(v *Value) bool {
   366  	v_3 := v.Args[3]
   367  	v_2 := v.Args[2]
   368  	v_1 := v.Args[1]
   369  	v_0 := v.Args[0]
   370  	b := v.Block
   371  	typ := &b.Func.Config.Types
   372  	// match: (CMPLloadidx1 {sym} [off] ptr idx x mem)
   373  	// result: (CMPL (MOVLloadidx1 {sym} [off] ptr idx mem) x)
   374  	for {
   375  		off := auxIntToInt32(v.AuxInt)
   376  		sym := auxToSym(v.Aux)
   377  		ptr := v_0
   378  		idx := v_1
   379  		x := v_2
   380  		mem := v_3
   381  		v.reset(OpAMD64CMPL)
   382  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx1, typ.UInt32)
   383  		v0.AuxInt = int32ToAuxInt(off)
   384  		v0.Aux = symToAux(sym)
   385  		v0.AddArg3(ptr, idx, mem)
   386  		v.AddArg2(v0, x)
   387  		return true
   388  	}
   389  }
   390  func rewriteValueAMD64splitload_OpAMD64CMPLloadidx4(v *Value) bool {
   391  	v_3 := v.Args[3]
   392  	v_2 := v.Args[2]
   393  	v_1 := v.Args[1]
   394  	v_0 := v.Args[0]
   395  	b := v.Block
   396  	typ := &b.Func.Config.Types
   397  	// match: (CMPLloadidx4 {sym} [off] ptr idx x mem)
   398  	// result: (CMPL (MOVLloadidx4 {sym} [off] ptr idx mem) x)
   399  	for {
   400  		off := auxIntToInt32(v.AuxInt)
   401  		sym := auxToSym(v.Aux)
   402  		ptr := v_0
   403  		idx := v_1
   404  		x := v_2
   405  		mem := v_3
   406  		v.reset(OpAMD64CMPL)
   407  		v0 := b.NewValue0(v.Pos, OpAMD64MOVLloadidx4, typ.UInt32)
   408  		v0.AuxInt = int32ToAuxInt(off)
   409  		v0.Aux = symToAux(sym)
   410  		v0.AddArg3(ptr, idx, mem)
   411  		v.AddArg2(v0, x)
   412  		return true
   413  	}
   414  }
   415  func rewriteValueAMD64splitload_OpAMD64CMPQconstload(v *Value) bool {
   416  	v_1 := v.Args[1]
   417  	v_0 := v.Args[0]
   418  	b := v.Block
   419  	typ := &b.Func.Config.Types
   420  	// match: (CMPQconstload {sym} [vo] ptr mem)
   421  	// cond: vo.Val() == 0
   422  	// result: (TESTQ x:(MOVQload {sym} [vo.Off32()] ptr mem) x)
   423  	for {
   424  		vo := auxIntToValAndOff(v.AuxInt)
   425  		sym := auxToSym(v.Aux)
   426  		ptr := v_0
   427  		mem := v_1
   428  		if !(vo.Val() == 0) {
   429  			break
   430  		}
   431  		v.reset(OpAMD64TESTQ)
   432  		x := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
   433  		x.AuxInt = int32ToAuxInt(vo.Off32())
   434  		x.Aux = symToAux(sym)
   435  		x.AddArg2(ptr, mem)
   436  		v.AddArg2(x, x)
   437  		return true
   438  	}
   439  	// match: (CMPQconstload {sym} [vo] ptr mem)
   440  	// cond: vo.Val() != 0
   441  	// result: (CMPQconst (MOVQload {sym} [vo.Off32()] ptr mem) [vo.Val32()])
   442  	for {
   443  		vo := auxIntToValAndOff(v.AuxInt)
   444  		sym := auxToSym(v.Aux)
   445  		ptr := v_0
   446  		mem := v_1
   447  		if !(vo.Val() != 0) {
   448  			break
   449  		}
   450  		v.reset(OpAMD64CMPQconst)
   451  		v.AuxInt = int32ToAuxInt(vo.Val32())
   452  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
   453  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   454  		v0.Aux = symToAux(sym)
   455  		v0.AddArg2(ptr, mem)
   456  		v.AddArg(v0)
   457  		return true
   458  	}
   459  	return false
   460  }
   461  func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx1(v *Value) bool {
   462  	v_2 := v.Args[2]
   463  	v_1 := v.Args[1]
   464  	v_0 := v.Args[0]
   465  	b := v.Block
   466  	typ := &b.Func.Config.Types
   467  	// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
   468  	// cond: vo.Val() == 0
   469  	// result: (TESTQ x:(MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
   470  	for {
   471  		vo := auxIntToValAndOff(v.AuxInt)
   472  		sym := auxToSym(v.Aux)
   473  		ptr := v_0
   474  		idx := v_1
   475  		mem := v_2
   476  		if !(vo.Val() == 0) {
   477  			break
   478  		}
   479  		v.reset(OpAMD64TESTQ)
   480  		x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
   481  		x.AuxInt = int32ToAuxInt(vo.Off32())
   482  		x.Aux = symToAux(sym)
   483  		x.AddArg3(ptr, idx, mem)
   484  		v.AddArg2(x, x)
   485  		return true
   486  	}
   487  	// match: (CMPQconstloadidx1 {sym} [vo] ptr idx mem)
   488  	// cond: vo.Val() != 0
   489  	// result: (CMPQconst (MOVQloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
   490  	for {
   491  		vo := auxIntToValAndOff(v.AuxInt)
   492  		sym := auxToSym(v.Aux)
   493  		ptr := v_0
   494  		idx := v_1
   495  		mem := v_2
   496  		if !(vo.Val() != 0) {
   497  			break
   498  		}
   499  		v.reset(OpAMD64CMPQconst)
   500  		v.AuxInt = int32ToAuxInt(vo.Val32())
   501  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
   502  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   503  		v0.Aux = symToAux(sym)
   504  		v0.AddArg3(ptr, idx, mem)
   505  		v.AddArg(v0)
   506  		return true
   507  	}
   508  	return false
   509  }
   510  func rewriteValueAMD64splitload_OpAMD64CMPQconstloadidx8(v *Value) bool {
   511  	v_2 := v.Args[2]
   512  	v_1 := v.Args[1]
   513  	v_0 := v.Args[0]
   514  	b := v.Block
   515  	typ := &b.Func.Config.Types
   516  	// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
   517  	// cond: vo.Val() == 0
   518  	// result: (TESTQ x:(MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) x)
   519  	for {
   520  		vo := auxIntToValAndOff(v.AuxInt)
   521  		sym := auxToSym(v.Aux)
   522  		ptr := v_0
   523  		idx := v_1
   524  		mem := v_2
   525  		if !(vo.Val() == 0) {
   526  			break
   527  		}
   528  		v.reset(OpAMD64TESTQ)
   529  		x := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
   530  		x.AuxInt = int32ToAuxInt(vo.Off32())
   531  		x.Aux = symToAux(sym)
   532  		x.AddArg3(ptr, idx, mem)
   533  		v.AddArg2(x, x)
   534  		return true
   535  	}
   536  	// match: (CMPQconstloadidx8 {sym} [vo] ptr idx mem)
   537  	// cond: vo.Val() != 0
   538  	// result: (CMPQconst (MOVQloadidx8 {sym} [vo.Off32()] ptr idx mem) [vo.Val32()])
   539  	for {
   540  		vo := auxIntToValAndOff(v.AuxInt)
   541  		sym := auxToSym(v.Aux)
   542  		ptr := v_0
   543  		idx := v_1
   544  		mem := v_2
   545  		if !(vo.Val() != 0) {
   546  			break
   547  		}
   548  		v.reset(OpAMD64CMPQconst)
   549  		v.AuxInt = int32ToAuxInt(vo.Val32())
   550  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
   551  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   552  		v0.Aux = symToAux(sym)
   553  		v0.AddArg3(ptr, idx, mem)
   554  		v.AddArg(v0)
   555  		return true
   556  	}
   557  	return false
   558  }
   559  func rewriteValueAMD64splitload_OpAMD64CMPQload(v *Value) bool {
   560  	v_2 := v.Args[2]
   561  	v_1 := v.Args[1]
   562  	v_0 := v.Args[0]
   563  	b := v.Block
   564  	typ := &b.Func.Config.Types
   565  	// match: (CMPQload {sym} [off] ptr x mem)
   566  	// result: (CMPQ (MOVQload {sym} [off] ptr mem) x)
   567  	for {
   568  		off := auxIntToInt32(v.AuxInt)
   569  		sym := auxToSym(v.Aux)
   570  		ptr := v_0
   571  		x := v_1
   572  		mem := v_2
   573  		v.reset(OpAMD64CMPQ)
   574  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQload, typ.UInt64)
   575  		v0.AuxInt = int32ToAuxInt(off)
   576  		v0.Aux = symToAux(sym)
   577  		v0.AddArg2(ptr, mem)
   578  		v.AddArg2(v0, x)
   579  		return true
   580  	}
   581  }
   582  func rewriteValueAMD64splitload_OpAMD64CMPQloadidx1(v *Value) bool {
   583  	v_3 := v.Args[3]
   584  	v_2 := v.Args[2]
   585  	v_1 := v.Args[1]
   586  	v_0 := v.Args[0]
   587  	b := v.Block
   588  	typ := &b.Func.Config.Types
   589  	// match: (CMPQloadidx1 {sym} [off] ptr idx x mem)
   590  	// result: (CMPQ (MOVQloadidx1 {sym} [off] ptr idx mem) x)
   591  	for {
   592  		off := auxIntToInt32(v.AuxInt)
   593  		sym := auxToSym(v.Aux)
   594  		ptr := v_0
   595  		idx := v_1
   596  		x := v_2
   597  		mem := v_3
   598  		v.reset(OpAMD64CMPQ)
   599  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx1, typ.UInt64)
   600  		v0.AuxInt = int32ToAuxInt(off)
   601  		v0.Aux = symToAux(sym)
   602  		v0.AddArg3(ptr, idx, mem)
   603  		v.AddArg2(v0, x)
   604  		return true
   605  	}
   606  }
   607  func rewriteValueAMD64splitload_OpAMD64CMPQloadidx8(v *Value) bool {
   608  	v_3 := v.Args[3]
   609  	v_2 := v.Args[2]
   610  	v_1 := v.Args[1]
   611  	v_0 := v.Args[0]
   612  	b := v.Block
   613  	typ := &b.Func.Config.Types
   614  	// match: (CMPQloadidx8 {sym} [off] ptr idx x mem)
   615  	// result: (CMPQ (MOVQloadidx8 {sym} [off] ptr idx mem) x)
   616  	for {
   617  		off := auxIntToInt32(v.AuxInt)
   618  		sym := auxToSym(v.Aux)
   619  		ptr := v_0
   620  		idx := v_1
   621  		x := v_2
   622  		mem := v_3
   623  		v.reset(OpAMD64CMPQ)
   624  		v0 := b.NewValue0(v.Pos, OpAMD64MOVQloadidx8, typ.UInt64)
   625  		v0.AuxInt = int32ToAuxInt(off)
   626  		v0.Aux = symToAux(sym)
   627  		v0.AddArg3(ptr, idx, mem)
   628  		v.AddArg2(v0, x)
   629  		return true
   630  	}
   631  }
   632  func rewriteValueAMD64splitload_OpAMD64CMPWconstload(v *Value) bool {
   633  	v_1 := v.Args[1]
   634  	v_0 := v.Args[0]
   635  	b := v.Block
   636  	typ := &b.Func.Config.Types
   637  	// match: (CMPWconstload {sym} [vo] ptr mem)
   638  	// cond: vo.Val() == 0
   639  	// result: (TESTW x:(MOVWload {sym} [vo.Off32()] ptr mem) x)
   640  	for {
   641  		vo := auxIntToValAndOff(v.AuxInt)
   642  		sym := auxToSym(v.Aux)
   643  		ptr := v_0
   644  		mem := v_1
   645  		if !(vo.Val() == 0) {
   646  			break
   647  		}
   648  		v.reset(OpAMD64TESTW)
   649  		x := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
   650  		x.AuxInt = int32ToAuxInt(vo.Off32())
   651  		x.Aux = symToAux(sym)
   652  		x.AddArg2(ptr, mem)
   653  		v.AddArg2(x, x)
   654  		return true
   655  	}
   656  	// match: (CMPWconstload {sym} [vo] ptr mem)
   657  	// cond: vo.Val() != 0
   658  	// result: (CMPWconst (MOVWload {sym} [vo.Off32()] ptr mem) [vo.Val16()])
   659  	for {
   660  		vo := auxIntToValAndOff(v.AuxInt)
   661  		sym := auxToSym(v.Aux)
   662  		ptr := v_0
   663  		mem := v_1
   664  		if !(vo.Val() != 0) {
   665  			break
   666  		}
   667  		v.reset(OpAMD64CMPWconst)
   668  		v.AuxInt = int16ToAuxInt(vo.Val16())
   669  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
   670  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   671  		v0.Aux = symToAux(sym)
   672  		v0.AddArg2(ptr, mem)
   673  		v.AddArg(v0)
   674  		return true
   675  	}
   676  	return false
   677  }
   678  func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx1(v *Value) bool {
   679  	v_2 := v.Args[2]
   680  	v_1 := v.Args[1]
   681  	v_0 := v.Args[0]
   682  	b := v.Block
   683  	typ := &b.Func.Config.Types
   684  	// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
   685  	// cond: vo.Val() == 0
   686  	// result: (TESTW x:(MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) x)
   687  	for {
   688  		vo := auxIntToValAndOff(v.AuxInt)
   689  		sym := auxToSym(v.Aux)
   690  		ptr := v_0
   691  		idx := v_1
   692  		mem := v_2
   693  		if !(vo.Val() == 0) {
   694  			break
   695  		}
   696  		v.reset(OpAMD64TESTW)
   697  		x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
   698  		x.AuxInt = int32ToAuxInt(vo.Off32())
   699  		x.Aux = symToAux(sym)
   700  		x.AddArg3(ptr, idx, mem)
   701  		v.AddArg2(x, x)
   702  		return true
   703  	}
   704  	// match: (CMPWconstloadidx1 {sym} [vo] ptr idx mem)
   705  	// cond: vo.Val() != 0
   706  	// result: (CMPWconst (MOVWloadidx1 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
   707  	for {
   708  		vo := auxIntToValAndOff(v.AuxInt)
   709  		sym := auxToSym(v.Aux)
   710  		ptr := v_0
   711  		idx := v_1
   712  		mem := v_2
   713  		if !(vo.Val() != 0) {
   714  			break
   715  		}
   716  		v.reset(OpAMD64CMPWconst)
   717  		v.AuxInt = int16ToAuxInt(vo.Val16())
   718  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
   719  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   720  		v0.Aux = symToAux(sym)
   721  		v0.AddArg3(ptr, idx, mem)
   722  		v.AddArg(v0)
   723  		return true
   724  	}
   725  	return false
   726  }
   727  func rewriteValueAMD64splitload_OpAMD64CMPWconstloadidx2(v *Value) bool {
   728  	v_2 := v.Args[2]
   729  	v_1 := v.Args[1]
   730  	v_0 := v.Args[0]
   731  	b := v.Block
   732  	typ := &b.Func.Config.Types
   733  	// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
   734  	// cond: vo.Val() == 0
   735  	// result: (TESTW x:(MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) x)
   736  	for {
   737  		vo := auxIntToValAndOff(v.AuxInt)
   738  		sym := auxToSym(v.Aux)
   739  		ptr := v_0
   740  		idx := v_1
   741  		mem := v_2
   742  		if !(vo.Val() == 0) {
   743  			break
   744  		}
   745  		v.reset(OpAMD64TESTW)
   746  		x := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
   747  		x.AuxInt = int32ToAuxInt(vo.Off32())
   748  		x.Aux = symToAux(sym)
   749  		x.AddArg3(ptr, idx, mem)
   750  		v.AddArg2(x, x)
   751  		return true
   752  	}
   753  	// match: (CMPWconstloadidx2 {sym} [vo] ptr idx mem)
   754  	// cond: vo.Val() != 0
   755  	// result: (CMPWconst (MOVWloadidx2 {sym} [vo.Off32()] ptr idx mem) [vo.Val16()])
   756  	for {
   757  		vo := auxIntToValAndOff(v.AuxInt)
   758  		sym := auxToSym(v.Aux)
   759  		ptr := v_0
   760  		idx := v_1
   761  		mem := v_2
   762  		if !(vo.Val() != 0) {
   763  			break
   764  		}
   765  		v.reset(OpAMD64CMPWconst)
   766  		v.AuxInt = int16ToAuxInt(vo.Val16())
   767  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
   768  		v0.AuxInt = int32ToAuxInt(vo.Off32())
   769  		v0.Aux = symToAux(sym)
   770  		v0.AddArg3(ptr, idx, mem)
   771  		v.AddArg(v0)
   772  		return true
   773  	}
   774  	return false
   775  }
   776  func rewriteValueAMD64splitload_OpAMD64CMPWload(v *Value) bool {
   777  	v_2 := v.Args[2]
   778  	v_1 := v.Args[1]
   779  	v_0 := v.Args[0]
   780  	b := v.Block
   781  	typ := &b.Func.Config.Types
   782  	// match: (CMPWload {sym} [off] ptr x mem)
   783  	// result: (CMPW (MOVWload {sym} [off] ptr mem) x)
   784  	for {
   785  		off := auxIntToInt32(v.AuxInt)
   786  		sym := auxToSym(v.Aux)
   787  		ptr := v_0
   788  		x := v_1
   789  		mem := v_2
   790  		v.reset(OpAMD64CMPW)
   791  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWload, typ.UInt16)
   792  		v0.AuxInt = int32ToAuxInt(off)
   793  		v0.Aux = symToAux(sym)
   794  		v0.AddArg2(ptr, mem)
   795  		v.AddArg2(v0, x)
   796  		return true
   797  	}
   798  }
   799  func rewriteValueAMD64splitload_OpAMD64CMPWloadidx1(v *Value) bool {
   800  	v_3 := v.Args[3]
   801  	v_2 := v.Args[2]
   802  	v_1 := v.Args[1]
   803  	v_0 := v.Args[0]
   804  	b := v.Block
   805  	typ := &b.Func.Config.Types
   806  	// match: (CMPWloadidx1 {sym} [off] ptr idx x mem)
   807  	// result: (CMPW (MOVWloadidx1 {sym} [off] ptr idx mem) x)
   808  	for {
   809  		off := auxIntToInt32(v.AuxInt)
   810  		sym := auxToSym(v.Aux)
   811  		ptr := v_0
   812  		idx := v_1
   813  		x := v_2
   814  		mem := v_3
   815  		v.reset(OpAMD64CMPW)
   816  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx1, typ.UInt16)
   817  		v0.AuxInt = int32ToAuxInt(off)
   818  		v0.Aux = symToAux(sym)
   819  		v0.AddArg3(ptr, idx, mem)
   820  		v.AddArg2(v0, x)
   821  		return true
   822  	}
   823  }
   824  func rewriteValueAMD64splitload_OpAMD64CMPWloadidx2(v *Value) bool {
   825  	v_3 := v.Args[3]
   826  	v_2 := v.Args[2]
   827  	v_1 := v.Args[1]
   828  	v_0 := v.Args[0]
   829  	b := v.Block
   830  	typ := &b.Func.Config.Types
   831  	// match: (CMPWloadidx2 {sym} [off] ptr idx x mem)
   832  	// result: (CMPW (MOVWloadidx2 {sym} [off] ptr idx mem) x)
   833  	for {
   834  		off := auxIntToInt32(v.AuxInt)
   835  		sym := auxToSym(v.Aux)
   836  		ptr := v_0
   837  		idx := v_1
   838  		x := v_2
   839  		mem := v_3
   840  		v.reset(OpAMD64CMPW)
   841  		v0 := b.NewValue0(v.Pos, OpAMD64MOVWloadidx2, typ.UInt16)
   842  		v0.AuxInt = int32ToAuxInt(off)
   843  		v0.Aux = symToAux(sym)
   844  		v0.AddArg3(ptr, idx, mem)
   845  		v.AddArg2(v0, x)
   846  		return true
   847  	}
   848  }
   849  func rewriteBlockAMD64splitload(b *Block) bool {
   850  	switch b.Kind {
   851  	}
   852  	return false
   853  }
   854  

View as plain text