1
2
3
4 package ssa
5
6 import "cmd/compile/internal/types"
7
8 func rewriteValueMIPS64(v *Value) bool {
9 switch v.Op {
10 case OpAdd16:
11 v.Op = OpMIPS64ADDV
12 return true
13 case OpAdd32:
14 v.Op = OpMIPS64ADDV
15 return true
16 case OpAdd32F:
17 v.Op = OpMIPS64ADDF
18 return true
19 case OpAdd64:
20 v.Op = OpMIPS64ADDV
21 return true
22 case OpAdd64F:
23 v.Op = OpMIPS64ADDD
24 return true
25 case OpAdd8:
26 v.Op = OpMIPS64ADDV
27 return true
28 case OpAddPtr:
29 v.Op = OpMIPS64ADDV
30 return true
31 case OpAddr:
32 return rewriteValueMIPS64_OpAddr(v)
33 case OpAnd16:
34 v.Op = OpMIPS64AND
35 return true
36 case OpAnd32:
37 v.Op = OpMIPS64AND
38 return true
39 case OpAnd64:
40 v.Op = OpMIPS64AND
41 return true
42 case OpAnd8:
43 v.Op = OpMIPS64AND
44 return true
45 case OpAndB:
46 v.Op = OpMIPS64AND
47 return true
48 case OpAtomicAdd32:
49 v.Op = OpMIPS64LoweredAtomicAdd32
50 return true
51 case OpAtomicAdd64:
52 v.Op = OpMIPS64LoweredAtomicAdd64
53 return true
54 case OpAtomicCompareAndSwap32:
55 v.Op = OpMIPS64LoweredAtomicCas32
56 return true
57 case OpAtomicCompareAndSwap64:
58 v.Op = OpMIPS64LoweredAtomicCas64
59 return true
60 case OpAtomicExchange32:
61 v.Op = OpMIPS64LoweredAtomicExchange32
62 return true
63 case OpAtomicExchange64:
64 v.Op = OpMIPS64LoweredAtomicExchange64
65 return true
66 case OpAtomicLoad32:
67 v.Op = OpMIPS64LoweredAtomicLoad32
68 return true
69 case OpAtomicLoad64:
70 v.Op = OpMIPS64LoweredAtomicLoad64
71 return true
72 case OpAtomicLoad8:
73 v.Op = OpMIPS64LoweredAtomicLoad8
74 return true
75 case OpAtomicLoadPtr:
76 v.Op = OpMIPS64LoweredAtomicLoad64
77 return true
78 case OpAtomicStore32:
79 v.Op = OpMIPS64LoweredAtomicStore32
80 return true
81 case OpAtomicStore64:
82 v.Op = OpMIPS64LoweredAtomicStore64
83 return true
84 case OpAtomicStore8:
85 v.Op = OpMIPS64LoweredAtomicStore8
86 return true
87 case OpAtomicStorePtrNoWB:
88 v.Op = OpMIPS64LoweredAtomicStore64
89 return true
90 case OpAvg64u:
91 return rewriteValueMIPS64_OpAvg64u(v)
92 case OpClosureCall:
93 v.Op = OpMIPS64CALLclosure
94 return true
95 case OpCom16:
96 return rewriteValueMIPS64_OpCom16(v)
97 case OpCom32:
98 return rewriteValueMIPS64_OpCom32(v)
99 case OpCom64:
100 return rewriteValueMIPS64_OpCom64(v)
101 case OpCom8:
102 return rewriteValueMIPS64_OpCom8(v)
103 case OpConst16:
104 return rewriteValueMIPS64_OpConst16(v)
105 case OpConst32:
106 return rewriteValueMIPS64_OpConst32(v)
107 case OpConst32F:
108 return rewriteValueMIPS64_OpConst32F(v)
109 case OpConst64:
110 return rewriteValueMIPS64_OpConst64(v)
111 case OpConst64F:
112 return rewriteValueMIPS64_OpConst64F(v)
113 case OpConst8:
114 return rewriteValueMIPS64_OpConst8(v)
115 case OpConstBool:
116 return rewriteValueMIPS64_OpConstBool(v)
117 case OpConstNil:
118 return rewriteValueMIPS64_OpConstNil(v)
119 case OpCvt32Fto32:
120 v.Op = OpMIPS64TRUNCFW
121 return true
122 case OpCvt32Fto64:
123 v.Op = OpMIPS64TRUNCFV
124 return true
125 case OpCvt32Fto64F:
126 v.Op = OpMIPS64MOVFD
127 return true
128 case OpCvt32to32F:
129 v.Op = OpMIPS64MOVWF
130 return true
131 case OpCvt32to64F:
132 v.Op = OpMIPS64MOVWD
133 return true
134 case OpCvt64Fto32:
135 v.Op = OpMIPS64TRUNCDW
136 return true
137 case OpCvt64Fto32F:
138 v.Op = OpMIPS64MOVDF
139 return true
140 case OpCvt64Fto64:
141 v.Op = OpMIPS64TRUNCDV
142 return true
143 case OpCvt64to32F:
144 v.Op = OpMIPS64MOVVF
145 return true
146 case OpCvt64to64F:
147 v.Op = OpMIPS64MOVVD
148 return true
149 case OpCvtBoolToUint8:
150 v.Op = OpCopy
151 return true
152 case OpDiv16:
153 return rewriteValueMIPS64_OpDiv16(v)
154 case OpDiv16u:
155 return rewriteValueMIPS64_OpDiv16u(v)
156 case OpDiv32:
157 return rewriteValueMIPS64_OpDiv32(v)
158 case OpDiv32F:
159 v.Op = OpMIPS64DIVF
160 return true
161 case OpDiv32u:
162 return rewriteValueMIPS64_OpDiv32u(v)
163 case OpDiv64:
164 return rewriteValueMIPS64_OpDiv64(v)
165 case OpDiv64F:
166 v.Op = OpMIPS64DIVD
167 return true
168 case OpDiv64u:
169 return rewriteValueMIPS64_OpDiv64u(v)
170 case OpDiv8:
171 return rewriteValueMIPS64_OpDiv8(v)
172 case OpDiv8u:
173 return rewriteValueMIPS64_OpDiv8u(v)
174 case OpEq16:
175 return rewriteValueMIPS64_OpEq16(v)
176 case OpEq32:
177 return rewriteValueMIPS64_OpEq32(v)
178 case OpEq32F:
179 return rewriteValueMIPS64_OpEq32F(v)
180 case OpEq64:
181 return rewriteValueMIPS64_OpEq64(v)
182 case OpEq64F:
183 return rewriteValueMIPS64_OpEq64F(v)
184 case OpEq8:
185 return rewriteValueMIPS64_OpEq8(v)
186 case OpEqB:
187 return rewriteValueMIPS64_OpEqB(v)
188 case OpEqPtr:
189 return rewriteValueMIPS64_OpEqPtr(v)
190 case OpGetCallerPC:
191 v.Op = OpMIPS64LoweredGetCallerPC
192 return true
193 case OpGetCallerSP:
194 v.Op = OpMIPS64LoweredGetCallerSP
195 return true
196 case OpGetClosurePtr:
197 v.Op = OpMIPS64LoweredGetClosurePtr
198 return true
199 case OpHmul32:
200 return rewriteValueMIPS64_OpHmul32(v)
201 case OpHmul32u:
202 return rewriteValueMIPS64_OpHmul32u(v)
203 case OpHmul64:
204 return rewriteValueMIPS64_OpHmul64(v)
205 case OpHmul64u:
206 return rewriteValueMIPS64_OpHmul64u(v)
207 case OpInterCall:
208 v.Op = OpMIPS64CALLinter
209 return true
210 case OpIsInBounds:
211 return rewriteValueMIPS64_OpIsInBounds(v)
212 case OpIsNonNil:
213 return rewriteValueMIPS64_OpIsNonNil(v)
214 case OpIsSliceInBounds:
215 return rewriteValueMIPS64_OpIsSliceInBounds(v)
216 case OpLeq16:
217 return rewriteValueMIPS64_OpLeq16(v)
218 case OpLeq16U:
219 return rewriteValueMIPS64_OpLeq16U(v)
220 case OpLeq32:
221 return rewriteValueMIPS64_OpLeq32(v)
222 case OpLeq32F:
223 return rewriteValueMIPS64_OpLeq32F(v)
224 case OpLeq32U:
225 return rewriteValueMIPS64_OpLeq32U(v)
226 case OpLeq64:
227 return rewriteValueMIPS64_OpLeq64(v)
228 case OpLeq64F:
229 return rewriteValueMIPS64_OpLeq64F(v)
230 case OpLeq64U:
231 return rewriteValueMIPS64_OpLeq64U(v)
232 case OpLeq8:
233 return rewriteValueMIPS64_OpLeq8(v)
234 case OpLeq8U:
235 return rewriteValueMIPS64_OpLeq8U(v)
236 case OpLess16:
237 return rewriteValueMIPS64_OpLess16(v)
238 case OpLess16U:
239 return rewriteValueMIPS64_OpLess16U(v)
240 case OpLess32:
241 return rewriteValueMIPS64_OpLess32(v)
242 case OpLess32F:
243 return rewriteValueMIPS64_OpLess32F(v)
244 case OpLess32U:
245 return rewriteValueMIPS64_OpLess32U(v)
246 case OpLess64:
247 return rewriteValueMIPS64_OpLess64(v)
248 case OpLess64F:
249 return rewriteValueMIPS64_OpLess64F(v)
250 case OpLess64U:
251 return rewriteValueMIPS64_OpLess64U(v)
252 case OpLess8:
253 return rewriteValueMIPS64_OpLess8(v)
254 case OpLess8U:
255 return rewriteValueMIPS64_OpLess8U(v)
256 case OpLoad:
257 return rewriteValueMIPS64_OpLoad(v)
258 case OpLocalAddr:
259 return rewriteValueMIPS64_OpLocalAddr(v)
260 case OpLsh16x16:
261 return rewriteValueMIPS64_OpLsh16x16(v)
262 case OpLsh16x32:
263 return rewriteValueMIPS64_OpLsh16x32(v)
264 case OpLsh16x64:
265 return rewriteValueMIPS64_OpLsh16x64(v)
266 case OpLsh16x8:
267 return rewriteValueMIPS64_OpLsh16x8(v)
268 case OpLsh32x16:
269 return rewriteValueMIPS64_OpLsh32x16(v)
270 case OpLsh32x32:
271 return rewriteValueMIPS64_OpLsh32x32(v)
272 case OpLsh32x64:
273 return rewriteValueMIPS64_OpLsh32x64(v)
274 case OpLsh32x8:
275 return rewriteValueMIPS64_OpLsh32x8(v)
276 case OpLsh64x16:
277 return rewriteValueMIPS64_OpLsh64x16(v)
278 case OpLsh64x32:
279 return rewriteValueMIPS64_OpLsh64x32(v)
280 case OpLsh64x64:
281 return rewriteValueMIPS64_OpLsh64x64(v)
282 case OpLsh64x8:
283 return rewriteValueMIPS64_OpLsh64x8(v)
284 case OpLsh8x16:
285 return rewriteValueMIPS64_OpLsh8x16(v)
286 case OpLsh8x32:
287 return rewriteValueMIPS64_OpLsh8x32(v)
288 case OpLsh8x64:
289 return rewriteValueMIPS64_OpLsh8x64(v)
290 case OpLsh8x8:
291 return rewriteValueMIPS64_OpLsh8x8(v)
292 case OpMIPS64ADDV:
293 return rewriteValueMIPS64_OpMIPS64ADDV(v)
294 case OpMIPS64ADDVconst:
295 return rewriteValueMIPS64_OpMIPS64ADDVconst(v)
296 case OpMIPS64AND:
297 return rewriteValueMIPS64_OpMIPS64AND(v)
298 case OpMIPS64ANDconst:
299 return rewriteValueMIPS64_OpMIPS64ANDconst(v)
300 case OpMIPS64LoweredAtomicAdd32:
301 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v)
302 case OpMIPS64LoweredAtomicAdd64:
303 return rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v)
304 case OpMIPS64LoweredAtomicStore32:
305 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v)
306 case OpMIPS64LoweredAtomicStore64:
307 return rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v)
308 case OpMIPS64MOVBUload:
309 return rewriteValueMIPS64_OpMIPS64MOVBUload(v)
310 case OpMIPS64MOVBUreg:
311 return rewriteValueMIPS64_OpMIPS64MOVBUreg(v)
312 case OpMIPS64MOVBload:
313 return rewriteValueMIPS64_OpMIPS64MOVBload(v)
314 case OpMIPS64MOVBreg:
315 return rewriteValueMIPS64_OpMIPS64MOVBreg(v)
316 case OpMIPS64MOVBstore:
317 return rewriteValueMIPS64_OpMIPS64MOVBstore(v)
318 case OpMIPS64MOVBstorezero:
319 return rewriteValueMIPS64_OpMIPS64MOVBstorezero(v)
320 case OpMIPS64MOVDload:
321 return rewriteValueMIPS64_OpMIPS64MOVDload(v)
322 case OpMIPS64MOVDstore:
323 return rewriteValueMIPS64_OpMIPS64MOVDstore(v)
324 case OpMIPS64MOVFload:
325 return rewriteValueMIPS64_OpMIPS64MOVFload(v)
326 case OpMIPS64MOVFstore:
327 return rewriteValueMIPS64_OpMIPS64MOVFstore(v)
328 case OpMIPS64MOVHUload:
329 return rewriteValueMIPS64_OpMIPS64MOVHUload(v)
330 case OpMIPS64MOVHUreg:
331 return rewriteValueMIPS64_OpMIPS64MOVHUreg(v)
332 case OpMIPS64MOVHload:
333 return rewriteValueMIPS64_OpMIPS64MOVHload(v)
334 case OpMIPS64MOVHreg:
335 return rewriteValueMIPS64_OpMIPS64MOVHreg(v)
336 case OpMIPS64MOVHstore:
337 return rewriteValueMIPS64_OpMIPS64MOVHstore(v)
338 case OpMIPS64MOVHstorezero:
339 return rewriteValueMIPS64_OpMIPS64MOVHstorezero(v)
340 case OpMIPS64MOVVload:
341 return rewriteValueMIPS64_OpMIPS64MOVVload(v)
342 case OpMIPS64MOVVreg:
343 return rewriteValueMIPS64_OpMIPS64MOVVreg(v)
344 case OpMIPS64MOVVstore:
345 return rewriteValueMIPS64_OpMIPS64MOVVstore(v)
346 case OpMIPS64MOVVstorezero:
347 return rewriteValueMIPS64_OpMIPS64MOVVstorezero(v)
348 case OpMIPS64MOVWUload:
349 return rewriteValueMIPS64_OpMIPS64MOVWUload(v)
350 case OpMIPS64MOVWUreg:
351 return rewriteValueMIPS64_OpMIPS64MOVWUreg(v)
352 case OpMIPS64MOVWload:
353 return rewriteValueMIPS64_OpMIPS64MOVWload(v)
354 case OpMIPS64MOVWreg:
355 return rewriteValueMIPS64_OpMIPS64MOVWreg(v)
356 case OpMIPS64MOVWstore:
357 return rewriteValueMIPS64_OpMIPS64MOVWstore(v)
358 case OpMIPS64MOVWstorezero:
359 return rewriteValueMIPS64_OpMIPS64MOVWstorezero(v)
360 case OpMIPS64NEGV:
361 return rewriteValueMIPS64_OpMIPS64NEGV(v)
362 case OpMIPS64NOR:
363 return rewriteValueMIPS64_OpMIPS64NOR(v)
364 case OpMIPS64NORconst:
365 return rewriteValueMIPS64_OpMIPS64NORconst(v)
366 case OpMIPS64OR:
367 return rewriteValueMIPS64_OpMIPS64OR(v)
368 case OpMIPS64ORconst:
369 return rewriteValueMIPS64_OpMIPS64ORconst(v)
370 case OpMIPS64SGT:
371 return rewriteValueMIPS64_OpMIPS64SGT(v)
372 case OpMIPS64SGTU:
373 return rewriteValueMIPS64_OpMIPS64SGTU(v)
374 case OpMIPS64SGTUconst:
375 return rewriteValueMIPS64_OpMIPS64SGTUconst(v)
376 case OpMIPS64SGTconst:
377 return rewriteValueMIPS64_OpMIPS64SGTconst(v)
378 case OpMIPS64SLLV:
379 return rewriteValueMIPS64_OpMIPS64SLLV(v)
380 case OpMIPS64SLLVconst:
381 return rewriteValueMIPS64_OpMIPS64SLLVconst(v)
382 case OpMIPS64SRAV:
383 return rewriteValueMIPS64_OpMIPS64SRAV(v)
384 case OpMIPS64SRAVconst:
385 return rewriteValueMIPS64_OpMIPS64SRAVconst(v)
386 case OpMIPS64SRLV:
387 return rewriteValueMIPS64_OpMIPS64SRLV(v)
388 case OpMIPS64SRLVconst:
389 return rewriteValueMIPS64_OpMIPS64SRLVconst(v)
390 case OpMIPS64SUBV:
391 return rewriteValueMIPS64_OpMIPS64SUBV(v)
392 case OpMIPS64SUBVconst:
393 return rewriteValueMIPS64_OpMIPS64SUBVconst(v)
394 case OpMIPS64XOR:
395 return rewriteValueMIPS64_OpMIPS64XOR(v)
396 case OpMIPS64XORconst:
397 return rewriteValueMIPS64_OpMIPS64XORconst(v)
398 case OpMod16:
399 return rewriteValueMIPS64_OpMod16(v)
400 case OpMod16u:
401 return rewriteValueMIPS64_OpMod16u(v)
402 case OpMod32:
403 return rewriteValueMIPS64_OpMod32(v)
404 case OpMod32u:
405 return rewriteValueMIPS64_OpMod32u(v)
406 case OpMod64:
407 return rewriteValueMIPS64_OpMod64(v)
408 case OpMod64u:
409 return rewriteValueMIPS64_OpMod64u(v)
410 case OpMod8:
411 return rewriteValueMIPS64_OpMod8(v)
412 case OpMod8u:
413 return rewriteValueMIPS64_OpMod8u(v)
414 case OpMove:
415 return rewriteValueMIPS64_OpMove(v)
416 case OpMul16:
417 return rewriteValueMIPS64_OpMul16(v)
418 case OpMul32:
419 return rewriteValueMIPS64_OpMul32(v)
420 case OpMul32F:
421 v.Op = OpMIPS64MULF
422 return true
423 case OpMul64:
424 return rewriteValueMIPS64_OpMul64(v)
425 case OpMul64F:
426 v.Op = OpMIPS64MULD
427 return true
428 case OpMul64uhilo:
429 v.Op = OpMIPS64MULVU
430 return true
431 case OpMul8:
432 return rewriteValueMIPS64_OpMul8(v)
433 case OpNeg16:
434 v.Op = OpMIPS64NEGV
435 return true
436 case OpNeg32:
437 v.Op = OpMIPS64NEGV
438 return true
439 case OpNeg32F:
440 v.Op = OpMIPS64NEGF
441 return true
442 case OpNeg64:
443 v.Op = OpMIPS64NEGV
444 return true
445 case OpNeg64F:
446 v.Op = OpMIPS64NEGD
447 return true
448 case OpNeg8:
449 v.Op = OpMIPS64NEGV
450 return true
451 case OpNeq16:
452 return rewriteValueMIPS64_OpNeq16(v)
453 case OpNeq32:
454 return rewriteValueMIPS64_OpNeq32(v)
455 case OpNeq32F:
456 return rewriteValueMIPS64_OpNeq32F(v)
457 case OpNeq64:
458 return rewriteValueMIPS64_OpNeq64(v)
459 case OpNeq64F:
460 return rewriteValueMIPS64_OpNeq64F(v)
461 case OpNeq8:
462 return rewriteValueMIPS64_OpNeq8(v)
463 case OpNeqB:
464 v.Op = OpMIPS64XOR
465 return true
466 case OpNeqPtr:
467 return rewriteValueMIPS64_OpNeqPtr(v)
468 case OpNilCheck:
469 v.Op = OpMIPS64LoweredNilCheck
470 return true
471 case OpNot:
472 return rewriteValueMIPS64_OpNot(v)
473 case OpOffPtr:
474 return rewriteValueMIPS64_OpOffPtr(v)
475 case OpOr16:
476 v.Op = OpMIPS64OR
477 return true
478 case OpOr32:
479 v.Op = OpMIPS64OR
480 return true
481 case OpOr64:
482 v.Op = OpMIPS64OR
483 return true
484 case OpOr8:
485 v.Op = OpMIPS64OR
486 return true
487 case OpOrB:
488 v.Op = OpMIPS64OR
489 return true
490 case OpPanicBounds:
491 return rewriteValueMIPS64_OpPanicBounds(v)
492 case OpRotateLeft16:
493 return rewriteValueMIPS64_OpRotateLeft16(v)
494 case OpRotateLeft32:
495 return rewriteValueMIPS64_OpRotateLeft32(v)
496 case OpRotateLeft64:
497 return rewriteValueMIPS64_OpRotateLeft64(v)
498 case OpRotateLeft8:
499 return rewriteValueMIPS64_OpRotateLeft8(v)
500 case OpRound32F:
501 v.Op = OpCopy
502 return true
503 case OpRound64F:
504 v.Op = OpCopy
505 return true
506 case OpRsh16Ux16:
507 return rewriteValueMIPS64_OpRsh16Ux16(v)
508 case OpRsh16Ux32:
509 return rewriteValueMIPS64_OpRsh16Ux32(v)
510 case OpRsh16Ux64:
511 return rewriteValueMIPS64_OpRsh16Ux64(v)
512 case OpRsh16Ux8:
513 return rewriteValueMIPS64_OpRsh16Ux8(v)
514 case OpRsh16x16:
515 return rewriteValueMIPS64_OpRsh16x16(v)
516 case OpRsh16x32:
517 return rewriteValueMIPS64_OpRsh16x32(v)
518 case OpRsh16x64:
519 return rewriteValueMIPS64_OpRsh16x64(v)
520 case OpRsh16x8:
521 return rewriteValueMIPS64_OpRsh16x8(v)
522 case OpRsh32Ux16:
523 return rewriteValueMIPS64_OpRsh32Ux16(v)
524 case OpRsh32Ux32:
525 return rewriteValueMIPS64_OpRsh32Ux32(v)
526 case OpRsh32Ux64:
527 return rewriteValueMIPS64_OpRsh32Ux64(v)
528 case OpRsh32Ux8:
529 return rewriteValueMIPS64_OpRsh32Ux8(v)
530 case OpRsh32x16:
531 return rewriteValueMIPS64_OpRsh32x16(v)
532 case OpRsh32x32:
533 return rewriteValueMIPS64_OpRsh32x32(v)
534 case OpRsh32x64:
535 return rewriteValueMIPS64_OpRsh32x64(v)
536 case OpRsh32x8:
537 return rewriteValueMIPS64_OpRsh32x8(v)
538 case OpRsh64Ux16:
539 return rewriteValueMIPS64_OpRsh64Ux16(v)
540 case OpRsh64Ux32:
541 return rewriteValueMIPS64_OpRsh64Ux32(v)
542 case OpRsh64Ux64:
543 return rewriteValueMIPS64_OpRsh64Ux64(v)
544 case OpRsh64Ux8:
545 return rewriteValueMIPS64_OpRsh64Ux8(v)
546 case OpRsh64x16:
547 return rewriteValueMIPS64_OpRsh64x16(v)
548 case OpRsh64x32:
549 return rewriteValueMIPS64_OpRsh64x32(v)
550 case OpRsh64x64:
551 return rewriteValueMIPS64_OpRsh64x64(v)
552 case OpRsh64x8:
553 return rewriteValueMIPS64_OpRsh64x8(v)
554 case OpRsh8Ux16:
555 return rewriteValueMIPS64_OpRsh8Ux16(v)
556 case OpRsh8Ux32:
557 return rewriteValueMIPS64_OpRsh8Ux32(v)
558 case OpRsh8Ux64:
559 return rewriteValueMIPS64_OpRsh8Ux64(v)
560 case OpRsh8Ux8:
561 return rewriteValueMIPS64_OpRsh8Ux8(v)
562 case OpRsh8x16:
563 return rewriteValueMIPS64_OpRsh8x16(v)
564 case OpRsh8x32:
565 return rewriteValueMIPS64_OpRsh8x32(v)
566 case OpRsh8x64:
567 return rewriteValueMIPS64_OpRsh8x64(v)
568 case OpRsh8x8:
569 return rewriteValueMIPS64_OpRsh8x8(v)
570 case OpSelect0:
571 return rewriteValueMIPS64_OpSelect0(v)
572 case OpSelect1:
573 return rewriteValueMIPS64_OpSelect1(v)
574 case OpSignExt16to32:
575 v.Op = OpMIPS64MOVHreg
576 return true
577 case OpSignExt16to64:
578 v.Op = OpMIPS64MOVHreg
579 return true
580 case OpSignExt32to64:
581 v.Op = OpMIPS64MOVWreg
582 return true
583 case OpSignExt8to16:
584 v.Op = OpMIPS64MOVBreg
585 return true
586 case OpSignExt8to32:
587 v.Op = OpMIPS64MOVBreg
588 return true
589 case OpSignExt8to64:
590 v.Op = OpMIPS64MOVBreg
591 return true
592 case OpSlicemask:
593 return rewriteValueMIPS64_OpSlicemask(v)
594 case OpSqrt:
595 v.Op = OpMIPS64SQRTD
596 return true
597 case OpStaticCall:
598 v.Op = OpMIPS64CALLstatic
599 return true
600 case OpStore:
601 return rewriteValueMIPS64_OpStore(v)
602 case OpSub16:
603 v.Op = OpMIPS64SUBV
604 return true
605 case OpSub32:
606 v.Op = OpMIPS64SUBV
607 return true
608 case OpSub32F:
609 v.Op = OpMIPS64SUBF
610 return true
611 case OpSub64:
612 v.Op = OpMIPS64SUBV
613 return true
614 case OpSub64F:
615 v.Op = OpMIPS64SUBD
616 return true
617 case OpSub8:
618 v.Op = OpMIPS64SUBV
619 return true
620 case OpSubPtr:
621 v.Op = OpMIPS64SUBV
622 return true
623 case OpTrunc16to8:
624 v.Op = OpCopy
625 return true
626 case OpTrunc32to16:
627 v.Op = OpCopy
628 return true
629 case OpTrunc32to8:
630 v.Op = OpCopy
631 return true
632 case OpTrunc64to16:
633 v.Op = OpCopy
634 return true
635 case OpTrunc64to32:
636 v.Op = OpCopy
637 return true
638 case OpTrunc64to8:
639 v.Op = OpCopy
640 return true
641 case OpWB:
642 v.Op = OpMIPS64LoweredWB
643 return true
644 case OpXor16:
645 v.Op = OpMIPS64XOR
646 return true
647 case OpXor32:
648 v.Op = OpMIPS64XOR
649 return true
650 case OpXor64:
651 v.Op = OpMIPS64XOR
652 return true
653 case OpXor8:
654 v.Op = OpMIPS64XOR
655 return true
656 case OpZero:
657 return rewriteValueMIPS64_OpZero(v)
658 case OpZeroExt16to32:
659 v.Op = OpMIPS64MOVHUreg
660 return true
661 case OpZeroExt16to64:
662 v.Op = OpMIPS64MOVHUreg
663 return true
664 case OpZeroExt32to64:
665 v.Op = OpMIPS64MOVWUreg
666 return true
667 case OpZeroExt8to16:
668 v.Op = OpMIPS64MOVBUreg
669 return true
670 case OpZeroExt8to32:
671 v.Op = OpMIPS64MOVBUreg
672 return true
673 case OpZeroExt8to64:
674 v.Op = OpMIPS64MOVBUreg
675 return true
676 }
677 return false
678 }
679 func rewriteValueMIPS64_OpAddr(v *Value) bool {
680 v_0 := v.Args[0]
681
682
683 for {
684 sym := auxToSym(v.Aux)
685 base := v_0
686 v.reset(OpMIPS64MOVVaddr)
687 v.Aux = symToAux(sym)
688 v.AddArg(base)
689 return true
690 }
691 }
692 func rewriteValueMIPS64_OpAvg64u(v *Value) bool {
693 v_1 := v.Args[1]
694 v_0 := v.Args[0]
695 b := v.Block
696
697
698 for {
699 t := v.Type
700 x := v_0
701 y := v_1
702 v.reset(OpMIPS64ADDV)
703 v0 := b.NewValue0(v.Pos, OpMIPS64SRLVconst, t)
704 v0.AuxInt = int64ToAuxInt(1)
705 v1 := b.NewValue0(v.Pos, OpMIPS64SUBV, t)
706 v1.AddArg2(x, y)
707 v0.AddArg(v1)
708 v.AddArg2(v0, y)
709 return true
710 }
711 }
712 func rewriteValueMIPS64_OpCom16(v *Value) bool {
713 v_0 := v.Args[0]
714 b := v.Block
715 typ := &b.Func.Config.Types
716
717
718 for {
719 x := v_0
720 v.reset(OpMIPS64NOR)
721 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
722 v0.AuxInt = int64ToAuxInt(0)
723 v.AddArg2(v0, x)
724 return true
725 }
726 }
727 func rewriteValueMIPS64_OpCom32(v *Value) bool {
728 v_0 := v.Args[0]
729 b := v.Block
730 typ := &b.Func.Config.Types
731
732
733 for {
734 x := v_0
735 v.reset(OpMIPS64NOR)
736 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
737 v0.AuxInt = int64ToAuxInt(0)
738 v.AddArg2(v0, x)
739 return true
740 }
741 }
742 func rewriteValueMIPS64_OpCom64(v *Value) bool {
743 v_0 := v.Args[0]
744 b := v.Block
745 typ := &b.Func.Config.Types
746
747
748 for {
749 x := v_0
750 v.reset(OpMIPS64NOR)
751 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
752 v0.AuxInt = int64ToAuxInt(0)
753 v.AddArg2(v0, x)
754 return true
755 }
756 }
757 func rewriteValueMIPS64_OpCom8(v *Value) bool {
758 v_0 := v.Args[0]
759 b := v.Block
760 typ := &b.Func.Config.Types
761
762
763 for {
764 x := v_0
765 v.reset(OpMIPS64NOR)
766 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
767 v0.AuxInt = int64ToAuxInt(0)
768 v.AddArg2(v0, x)
769 return true
770 }
771 }
772 func rewriteValueMIPS64_OpConst16(v *Value) bool {
773
774
775 for {
776 val := auxIntToInt16(v.AuxInt)
777 v.reset(OpMIPS64MOVVconst)
778 v.AuxInt = int64ToAuxInt(int64(val))
779 return true
780 }
781 }
782 func rewriteValueMIPS64_OpConst32(v *Value) bool {
783
784
785 for {
786 val := auxIntToInt32(v.AuxInt)
787 v.reset(OpMIPS64MOVVconst)
788 v.AuxInt = int64ToAuxInt(int64(val))
789 return true
790 }
791 }
792 func rewriteValueMIPS64_OpConst32F(v *Value) bool {
793
794
795 for {
796 val := auxIntToFloat32(v.AuxInt)
797 v.reset(OpMIPS64MOVFconst)
798 v.AuxInt = float64ToAuxInt(float64(val))
799 return true
800 }
801 }
802 func rewriteValueMIPS64_OpConst64(v *Value) bool {
803
804
805 for {
806 val := auxIntToInt64(v.AuxInt)
807 v.reset(OpMIPS64MOVVconst)
808 v.AuxInt = int64ToAuxInt(int64(val))
809 return true
810 }
811 }
812 func rewriteValueMIPS64_OpConst64F(v *Value) bool {
813
814
815 for {
816 val := auxIntToFloat64(v.AuxInt)
817 v.reset(OpMIPS64MOVDconst)
818 v.AuxInt = float64ToAuxInt(float64(val))
819 return true
820 }
821 }
822 func rewriteValueMIPS64_OpConst8(v *Value) bool {
823
824
825 for {
826 val := auxIntToInt8(v.AuxInt)
827 v.reset(OpMIPS64MOVVconst)
828 v.AuxInt = int64ToAuxInt(int64(val))
829 return true
830 }
831 }
832 func rewriteValueMIPS64_OpConstBool(v *Value) bool {
833
834
835 for {
836 b := auxIntToBool(v.AuxInt)
837 v.reset(OpMIPS64MOVVconst)
838 v.AuxInt = int64ToAuxInt(int64(b2i(b)))
839 return true
840 }
841 }
842 func rewriteValueMIPS64_OpConstNil(v *Value) bool {
843
844
845 for {
846 v.reset(OpMIPS64MOVVconst)
847 v.AuxInt = int64ToAuxInt(0)
848 return true
849 }
850 }
851 func rewriteValueMIPS64_OpDiv16(v *Value) bool {
852 v_1 := v.Args[1]
853 v_0 := v.Args[0]
854 b := v.Block
855 typ := &b.Func.Config.Types
856
857
858 for {
859 x := v_0
860 y := v_1
861 v.reset(OpSelect1)
862 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
863 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
864 v1.AddArg(x)
865 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
866 v2.AddArg(y)
867 v0.AddArg2(v1, v2)
868 v.AddArg(v0)
869 return true
870 }
871 }
872 func rewriteValueMIPS64_OpDiv16u(v *Value) bool {
873 v_1 := v.Args[1]
874 v_0 := v.Args[0]
875 b := v.Block
876 typ := &b.Func.Config.Types
877
878
879 for {
880 x := v_0
881 y := v_1
882 v.reset(OpSelect1)
883 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
884 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
885 v1.AddArg(x)
886 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
887 v2.AddArg(y)
888 v0.AddArg2(v1, v2)
889 v.AddArg(v0)
890 return true
891 }
892 }
893 func rewriteValueMIPS64_OpDiv32(v *Value) bool {
894 v_1 := v.Args[1]
895 v_0 := v.Args[0]
896 b := v.Block
897 typ := &b.Func.Config.Types
898
899
900 for {
901 x := v_0
902 y := v_1
903 v.reset(OpSelect1)
904 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
905 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
906 v1.AddArg(x)
907 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
908 v2.AddArg(y)
909 v0.AddArg2(v1, v2)
910 v.AddArg(v0)
911 return true
912 }
913 }
914 func rewriteValueMIPS64_OpDiv32u(v *Value) bool {
915 v_1 := v.Args[1]
916 v_0 := v.Args[0]
917 b := v.Block
918 typ := &b.Func.Config.Types
919
920
921 for {
922 x := v_0
923 y := v_1
924 v.reset(OpSelect1)
925 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
926 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
927 v1.AddArg(x)
928 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
929 v2.AddArg(y)
930 v0.AddArg2(v1, v2)
931 v.AddArg(v0)
932 return true
933 }
934 }
935 func rewriteValueMIPS64_OpDiv64(v *Value) bool {
936 v_1 := v.Args[1]
937 v_0 := v.Args[0]
938 b := v.Block
939 typ := &b.Func.Config.Types
940
941
942 for {
943 x := v_0
944 y := v_1
945 v.reset(OpSelect1)
946 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
947 v0.AddArg2(x, y)
948 v.AddArg(v0)
949 return true
950 }
951 }
952 func rewriteValueMIPS64_OpDiv64u(v *Value) bool {
953 v_1 := v.Args[1]
954 v_0 := v.Args[0]
955 b := v.Block
956 typ := &b.Func.Config.Types
957
958
959 for {
960 x := v_0
961 y := v_1
962 v.reset(OpSelect1)
963 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
964 v0.AddArg2(x, y)
965 v.AddArg(v0)
966 return true
967 }
968 }
969 func rewriteValueMIPS64_OpDiv8(v *Value) bool {
970 v_1 := v.Args[1]
971 v_0 := v.Args[0]
972 b := v.Block
973 typ := &b.Func.Config.Types
974
975
976 for {
977 x := v_0
978 y := v_1
979 v.reset(OpSelect1)
980 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
981 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
982 v1.AddArg(x)
983 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
984 v2.AddArg(y)
985 v0.AddArg2(v1, v2)
986 v.AddArg(v0)
987 return true
988 }
989 }
990 func rewriteValueMIPS64_OpDiv8u(v *Value) bool {
991 v_1 := v.Args[1]
992 v_0 := v.Args[0]
993 b := v.Block
994 typ := &b.Func.Config.Types
995
996
997 for {
998 x := v_0
999 y := v_1
1000 v.reset(OpSelect1)
1001 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
1002 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1003 v1.AddArg(x)
1004 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1005 v2.AddArg(y)
1006 v0.AddArg2(v1, v2)
1007 v.AddArg(v0)
1008 return true
1009 }
1010 }
1011 func rewriteValueMIPS64_OpEq16(v *Value) bool {
1012 v_1 := v.Args[1]
1013 v_0 := v.Args[0]
1014 b := v.Block
1015 typ := &b.Func.Config.Types
1016
1017
1018 for {
1019 x := v_0
1020 y := v_1
1021 v.reset(OpMIPS64SGTU)
1022 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1023 v0.AuxInt = int64ToAuxInt(1)
1024 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1025 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1026 v2.AddArg(x)
1027 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1028 v3.AddArg(y)
1029 v1.AddArg2(v2, v3)
1030 v.AddArg2(v0, v1)
1031 return true
1032 }
1033 }
1034 func rewriteValueMIPS64_OpEq32(v *Value) bool {
1035 v_1 := v.Args[1]
1036 v_0 := v.Args[0]
1037 b := v.Block
1038 typ := &b.Func.Config.Types
1039
1040
1041 for {
1042 x := v_0
1043 y := v_1
1044 v.reset(OpMIPS64SGTU)
1045 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1046 v0.AuxInt = int64ToAuxInt(1)
1047 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1048 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1049 v2.AddArg(x)
1050 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1051 v3.AddArg(y)
1052 v1.AddArg2(v2, v3)
1053 v.AddArg2(v0, v1)
1054 return true
1055 }
1056 }
1057 func rewriteValueMIPS64_OpEq32F(v *Value) bool {
1058 v_1 := v.Args[1]
1059 v_0 := v.Args[0]
1060 b := v.Block
1061
1062
1063 for {
1064 x := v_0
1065 y := v_1
1066 v.reset(OpMIPS64FPFlagTrue)
1067 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
1068 v0.AddArg2(x, y)
1069 v.AddArg(v0)
1070 return true
1071 }
1072 }
1073 func rewriteValueMIPS64_OpEq64(v *Value) bool {
1074 v_1 := v.Args[1]
1075 v_0 := v.Args[0]
1076 b := v.Block
1077 typ := &b.Func.Config.Types
1078
1079
1080 for {
1081 x := v_0
1082 y := v_1
1083 v.reset(OpMIPS64SGTU)
1084 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1085 v0.AuxInt = int64ToAuxInt(1)
1086 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1087 v1.AddArg2(x, y)
1088 v.AddArg2(v0, v1)
1089 return true
1090 }
1091 }
1092 func rewriteValueMIPS64_OpEq64F(v *Value) bool {
1093 v_1 := v.Args[1]
1094 v_0 := v.Args[0]
1095 b := v.Block
1096
1097
1098 for {
1099 x := v_0
1100 y := v_1
1101 v.reset(OpMIPS64FPFlagTrue)
1102 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
1103 v0.AddArg2(x, y)
1104 v.AddArg(v0)
1105 return true
1106 }
1107 }
1108 func rewriteValueMIPS64_OpEq8(v *Value) bool {
1109 v_1 := v.Args[1]
1110 v_0 := v.Args[0]
1111 b := v.Block
1112 typ := &b.Func.Config.Types
1113
1114
1115 for {
1116 x := v_0
1117 y := v_1
1118 v.reset(OpMIPS64SGTU)
1119 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1120 v0.AuxInt = int64ToAuxInt(1)
1121 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1122 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1123 v2.AddArg(x)
1124 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1125 v3.AddArg(y)
1126 v1.AddArg2(v2, v3)
1127 v.AddArg2(v0, v1)
1128 return true
1129 }
1130 }
1131 func rewriteValueMIPS64_OpEqB(v *Value) bool {
1132 v_1 := v.Args[1]
1133 v_0 := v.Args[0]
1134 b := v.Block
1135 typ := &b.Func.Config.Types
1136
1137
1138 for {
1139 x := v_0
1140 y := v_1
1141 v.reset(OpMIPS64XOR)
1142 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1143 v0.AuxInt = int64ToAuxInt(1)
1144 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.Bool)
1145 v1.AddArg2(x, y)
1146 v.AddArg2(v0, v1)
1147 return true
1148 }
1149 }
1150 func rewriteValueMIPS64_OpEqPtr(v *Value) bool {
1151 v_1 := v.Args[1]
1152 v_0 := v.Args[0]
1153 b := v.Block
1154 typ := &b.Func.Config.Types
1155
1156
1157 for {
1158 x := v_0
1159 y := v_1
1160 v.reset(OpMIPS64SGTU)
1161 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1162 v0.AuxInt = int64ToAuxInt(1)
1163 v1 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
1164 v1.AddArg2(x, y)
1165 v.AddArg2(v0, v1)
1166 return true
1167 }
1168 }
1169 func rewriteValueMIPS64_OpHmul32(v *Value) bool {
1170 v_1 := v.Args[1]
1171 v_0 := v.Args[0]
1172 b := v.Block
1173 typ := &b.Func.Config.Types
1174
1175
1176 for {
1177 x := v_0
1178 y := v_1
1179 v.reset(OpMIPS64SRAVconst)
1180 v.AuxInt = int64ToAuxInt(32)
1181 v0 := b.NewValue0(v.Pos, OpSelect1, typ.Int64)
1182 v1 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1183 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1184 v2.AddArg(x)
1185 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1186 v3.AddArg(y)
1187 v1.AddArg2(v2, v3)
1188 v0.AddArg(v1)
1189 v.AddArg(v0)
1190 return true
1191 }
1192 }
1193 func rewriteValueMIPS64_OpHmul32u(v *Value) bool {
1194 v_1 := v.Args[1]
1195 v_0 := v.Args[0]
1196 b := v.Block
1197 typ := &b.Func.Config.Types
1198
1199
1200 for {
1201 x := v_0
1202 y := v_1
1203 v.reset(OpMIPS64SRLVconst)
1204 v.AuxInt = int64ToAuxInt(32)
1205 v0 := b.NewValue0(v.Pos, OpSelect1, typ.UInt64)
1206 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1207 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1208 v2.AddArg(x)
1209 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1210 v3.AddArg(y)
1211 v1.AddArg2(v2, v3)
1212 v0.AddArg(v1)
1213 v.AddArg(v0)
1214 return true
1215 }
1216 }
1217 func rewriteValueMIPS64_OpHmul64(v *Value) bool {
1218 v_1 := v.Args[1]
1219 v_0 := v.Args[0]
1220 b := v.Block
1221 typ := &b.Func.Config.Types
1222
1223
1224 for {
1225 x := v_0
1226 y := v_1
1227 v.reset(OpSelect0)
1228 v0 := b.NewValue0(v.Pos, OpMIPS64MULV, types.NewTuple(typ.Int64, typ.Int64))
1229 v0.AddArg2(x, y)
1230 v.AddArg(v0)
1231 return true
1232 }
1233 }
1234 func rewriteValueMIPS64_OpHmul64u(v *Value) bool {
1235 v_1 := v.Args[1]
1236 v_0 := v.Args[0]
1237 b := v.Block
1238 typ := &b.Func.Config.Types
1239
1240
1241 for {
1242 x := v_0
1243 y := v_1
1244 v.reset(OpSelect0)
1245 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
1246 v0.AddArg2(x, y)
1247 v.AddArg(v0)
1248 return true
1249 }
1250 }
1251 func rewriteValueMIPS64_OpIsInBounds(v *Value) bool {
1252 v_1 := v.Args[1]
1253 v_0 := v.Args[0]
1254
1255
1256 for {
1257 idx := v_0
1258 len := v_1
1259 v.reset(OpMIPS64SGTU)
1260 v.AddArg2(len, idx)
1261 return true
1262 }
1263 }
1264 func rewriteValueMIPS64_OpIsNonNil(v *Value) bool {
1265 v_0 := v.Args[0]
1266 b := v.Block
1267 typ := &b.Func.Config.Types
1268
1269
1270 for {
1271 ptr := v_0
1272 v.reset(OpMIPS64SGTU)
1273 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1274 v0.AuxInt = int64ToAuxInt(0)
1275 v.AddArg2(ptr, v0)
1276 return true
1277 }
1278 }
1279 func rewriteValueMIPS64_OpIsSliceInBounds(v *Value) bool {
1280 v_1 := v.Args[1]
1281 v_0 := v.Args[0]
1282 b := v.Block
1283 typ := &b.Func.Config.Types
1284
1285
1286 for {
1287 idx := v_0
1288 len := v_1
1289 v.reset(OpMIPS64XOR)
1290 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1291 v0.AuxInt = int64ToAuxInt(1)
1292 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1293 v1.AddArg2(idx, len)
1294 v.AddArg2(v0, v1)
1295 return true
1296 }
1297 }
1298 func rewriteValueMIPS64_OpLeq16(v *Value) bool {
1299 v_1 := v.Args[1]
1300 v_0 := v.Args[0]
1301 b := v.Block
1302 typ := &b.Func.Config.Types
1303
1304
1305 for {
1306 x := v_0
1307 y := v_1
1308 v.reset(OpMIPS64XOR)
1309 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1310 v0.AuxInt = int64ToAuxInt(1)
1311 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1312 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1313 v2.AddArg(x)
1314 v3 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1315 v3.AddArg(y)
1316 v1.AddArg2(v2, v3)
1317 v.AddArg2(v0, v1)
1318 return true
1319 }
1320 }
1321 func rewriteValueMIPS64_OpLeq16U(v *Value) bool {
1322 v_1 := v.Args[1]
1323 v_0 := v.Args[0]
1324 b := v.Block
1325 typ := &b.Func.Config.Types
1326
1327
1328 for {
1329 x := v_0
1330 y := v_1
1331 v.reset(OpMIPS64XOR)
1332 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1333 v0.AuxInt = int64ToAuxInt(1)
1334 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1335 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1336 v2.AddArg(x)
1337 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1338 v3.AddArg(y)
1339 v1.AddArg2(v2, v3)
1340 v.AddArg2(v0, v1)
1341 return true
1342 }
1343 }
1344 func rewriteValueMIPS64_OpLeq32(v *Value) bool {
1345 v_1 := v.Args[1]
1346 v_0 := v.Args[0]
1347 b := v.Block
1348 typ := &b.Func.Config.Types
1349
1350
1351 for {
1352 x := v_0
1353 y := v_1
1354 v.reset(OpMIPS64XOR)
1355 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1356 v0.AuxInt = int64ToAuxInt(1)
1357 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1358 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1359 v2.AddArg(x)
1360 v3 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1361 v3.AddArg(y)
1362 v1.AddArg2(v2, v3)
1363 v.AddArg2(v0, v1)
1364 return true
1365 }
1366 }
1367 func rewriteValueMIPS64_OpLeq32F(v *Value) bool {
1368 v_1 := v.Args[1]
1369 v_0 := v.Args[0]
1370 b := v.Block
1371
1372
1373 for {
1374 x := v_0
1375 y := v_1
1376 v.reset(OpMIPS64FPFlagTrue)
1377 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGEF, types.TypeFlags)
1378 v0.AddArg2(y, x)
1379 v.AddArg(v0)
1380 return true
1381 }
1382 }
1383 func rewriteValueMIPS64_OpLeq32U(v *Value) bool {
1384 v_1 := v.Args[1]
1385 v_0 := v.Args[0]
1386 b := v.Block
1387 typ := &b.Func.Config.Types
1388
1389
1390 for {
1391 x := v_0
1392 y := v_1
1393 v.reset(OpMIPS64XOR)
1394 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1395 v0.AuxInt = int64ToAuxInt(1)
1396 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1397 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1398 v2.AddArg(x)
1399 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1400 v3.AddArg(y)
1401 v1.AddArg2(v2, v3)
1402 v.AddArg2(v0, v1)
1403 return true
1404 }
1405 }
1406 func rewriteValueMIPS64_OpLeq64(v *Value) bool {
1407 v_1 := v.Args[1]
1408 v_0 := v.Args[0]
1409 b := v.Block
1410 typ := &b.Func.Config.Types
1411
1412
1413 for {
1414 x := v_0
1415 y := v_1
1416 v.reset(OpMIPS64XOR)
1417 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1418 v0.AuxInt = int64ToAuxInt(1)
1419 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1420 v1.AddArg2(x, y)
1421 v.AddArg2(v0, v1)
1422 return true
1423 }
1424 }
1425 func rewriteValueMIPS64_OpLeq64F(v *Value) bool {
1426 v_1 := v.Args[1]
1427 v_0 := v.Args[0]
1428 b := v.Block
1429
1430
1431 for {
1432 x := v_0
1433 y := v_1
1434 v.reset(OpMIPS64FPFlagTrue)
1435 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGED, types.TypeFlags)
1436 v0.AddArg2(y, x)
1437 v.AddArg(v0)
1438 return true
1439 }
1440 }
1441 func rewriteValueMIPS64_OpLeq64U(v *Value) bool {
1442 v_1 := v.Args[1]
1443 v_0 := v.Args[0]
1444 b := v.Block
1445 typ := &b.Func.Config.Types
1446
1447
1448 for {
1449 x := v_0
1450 y := v_1
1451 v.reset(OpMIPS64XOR)
1452 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1453 v0.AuxInt = int64ToAuxInt(1)
1454 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1455 v1.AddArg2(x, y)
1456 v.AddArg2(v0, v1)
1457 return true
1458 }
1459 }
1460 func rewriteValueMIPS64_OpLeq8(v *Value) bool {
1461 v_1 := v.Args[1]
1462 v_0 := v.Args[0]
1463 b := v.Block
1464 typ := &b.Func.Config.Types
1465
1466
1467 for {
1468 x := v_0
1469 y := v_1
1470 v.reset(OpMIPS64XOR)
1471 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1472 v0.AuxInt = int64ToAuxInt(1)
1473 v1 := b.NewValue0(v.Pos, OpMIPS64SGT, typ.Bool)
1474 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1475 v2.AddArg(x)
1476 v3 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1477 v3.AddArg(y)
1478 v1.AddArg2(v2, v3)
1479 v.AddArg2(v0, v1)
1480 return true
1481 }
1482 }
1483 func rewriteValueMIPS64_OpLeq8U(v *Value) bool {
1484 v_1 := v.Args[1]
1485 v_0 := v.Args[0]
1486 b := v.Block
1487 typ := &b.Func.Config.Types
1488
1489
1490 for {
1491 x := v_0
1492 y := v_1
1493 v.reset(OpMIPS64XOR)
1494 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1495 v0.AuxInt = int64ToAuxInt(1)
1496 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1497 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1498 v2.AddArg(x)
1499 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1500 v3.AddArg(y)
1501 v1.AddArg2(v2, v3)
1502 v.AddArg2(v0, v1)
1503 return true
1504 }
1505 }
1506 func rewriteValueMIPS64_OpLess16(v *Value) bool {
1507 v_1 := v.Args[1]
1508 v_0 := v.Args[0]
1509 b := v.Block
1510 typ := &b.Func.Config.Types
1511
1512
1513 for {
1514 x := v_0
1515 y := v_1
1516 v.reset(OpMIPS64SGT)
1517 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1518 v0.AddArg(y)
1519 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
1520 v1.AddArg(x)
1521 v.AddArg2(v0, v1)
1522 return true
1523 }
1524 }
1525 func rewriteValueMIPS64_OpLess16U(v *Value) bool {
1526 v_1 := v.Args[1]
1527 v_0 := v.Args[0]
1528 b := v.Block
1529 typ := &b.Func.Config.Types
1530
1531
1532 for {
1533 x := v_0
1534 y := v_1
1535 v.reset(OpMIPS64SGTU)
1536 v0 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1537 v0.AddArg(y)
1538 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1539 v1.AddArg(x)
1540 v.AddArg2(v0, v1)
1541 return true
1542 }
1543 }
1544 func rewriteValueMIPS64_OpLess32(v *Value) bool {
1545 v_1 := v.Args[1]
1546 v_0 := v.Args[0]
1547 b := v.Block
1548 typ := &b.Func.Config.Types
1549
1550
1551 for {
1552 x := v_0
1553 y := v_1
1554 v.reset(OpMIPS64SGT)
1555 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1556 v0.AddArg(y)
1557 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
1558 v1.AddArg(x)
1559 v.AddArg2(v0, v1)
1560 return true
1561 }
1562 }
1563 func rewriteValueMIPS64_OpLess32F(v *Value) bool {
1564 v_1 := v.Args[1]
1565 v_0 := v.Args[0]
1566 b := v.Block
1567
1568
1569 for {
1570 x := v_0
1571 y := v_1
1572 v.reset(OpMIPS64FPFlagTrue)
1573 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTF, types.TypeFlags)
1574 v0.AddArg2(y, x)
1575 v.AddArg(v0)
1576 return true
1577 }
1578 }
1579 func rewriteValueMIPS64_OpLess32U(v *Value) bool {
1580 v_1 := v.Args[1]
1581 v_0 := v.Args[0]
1582 b := v.Block
1583 typ := &b.Func.Config.Types
1584
1585
1586 for {
1587 x := v_0
1588 y := v_1
1589 v.reset(OpMIPS64SGTU)
1590 v0 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1591 v0.AddArg(y)
1592 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1593 v1.AddArg(x)
1594 v.AddArg2(v0, v1)
1595 return true
1596 }
1597 }
1598 func rewriteValueMIPS64_OpLess64(v *Value) bool {
1599 v_1 := v.Args[1]
1600 v_0 := v.Args[0]
1601
1602
1603 for {
1604 x := v_0
1605 y := v_1
1606 v.reset(OpMIPS64SGT)
1607 v.AddArg2(y, x)
1608 return true
1609 }
1610 }
1611 func rewriteValueMIPS64_OpLess64F(v *Value) bool {
1612 v_1 := v.Args[1]
1613 v_0 := v.Args[0]
1614 b := v.Block
1615
1616
1617 for {
1618 x := v_0
1619 y := v_1
1620 v.reset(OpMIPS64FPFlagTrue)
1621 v0 := b.NewValue0(v.Pos, OpMIPS64CMPGTD, types.TypeFlags)
1622 v0.AddArg2(y, x)
1623 v.AddArg(v0)
1624 return true
1625 }
1626 }
1627 func rewriteValueMIPS64_OpLess64U(v *Value) bool {
1628 v_1 := v.Args[1]
1629 v_0 := v.Args[0]
1630
1631
1632 for {
1633 x := v_0
1634 y := v_1
1635 v.reset(OpMIPS64SGTU)
1636 v.AddArg2(y, x)
1637 return true
1638 }
1639 }
1640 func rewriteValueMIPS64_OpLess8(v *Value) bool {
1641 v_1 := v.Args[1]
1642 v_0 := v.Args[0]
1643 b := v.Block
1644 typ := &b.Func.Config.Types
1645
1646
1647 for {
1648 x := v_0
1649 y := v_1
1650 v.reset(OpMIPS64SGT)
1651 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1652 v0.AddArg(y)
1653 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
1654 v1.AddArg(x)
1655 v.AddArg2(v0, v1)
1656 return true
1657 }
1658 }
1659 func rewriteValueMIPS64_OpLess8U(v *Value) bool {
1660 v_1 := v.Args[1]
1661 v_0 := v.Args[0]
1662 b := v.Block
1663 typ := &b.Func.Config.Types
1664
1665
1666 for {
1667 x := v_0
1668 y := v_1
1669 v.reset(OpMIPS64SGTU)
1670 v0 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1671 v0.AddArg(y)
1672 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1673 v1.AddArg(x)
1674 v.AddArg2(v0, v1)
1675 return true
1676 }
1677 }
1678 func rewriteValueMIPS64_OpLoad(v *Value) bool {
1679 v_1 := v.Args[1]
1680 v_0 := v.Args[0]
1681
1682
1683
1684 for {
1685 t := v.Type
1686 ptr := v_0
1687 mem := v_1
1688 if !(t.IsBoolean()) {
1689 break
1690 }
1691 v.reset(OpMIPS64MOVBUload)
1692 v.AddArg2(ptr, mem)
1693 return true
1694 }
1695
1696
1697
1698 for {
1699 t := v.Type
1700 ptr := v_0
1701 mem := v_1
1702 if !(is8BitInt(t) && isSigned(t)) {
1703 break
1704 }
1705 v.reset(OpMIPS64MOVBload)
1706 v.AddArg2(ptr, mem)
1707 return true
1708 }
1709
1710
1711
1712 for {
1713 t := v.Type
1714 ptr := v_0
1715 mem := v_1
1716 if !(is8BitInt(t) && !isSigned(t)) {
1717 break
1718 }
1719 v.reset(OpMIPS64MOVBUload)
1720 v.AddArg2(ptr, mem)
1721 return true
1722 }
1723
1724
1725
1726 for {
1727 t := v.Type
1728 ptr := v_0
1729 mem := v_1
1730 if !(is16BitInt(t) && isSigned(t)) {
1731 break
1732 }
1733 v.reset(OpMIPS64MOVHload)
1734 v.AddArg2(ptr, mem)
1735 return true
1736 }
1737
1738
1739
1740 for {
1741 t := v.Type
1742 ptr := v_0
1743 mem := v_1
1744 if !(is16BitInt(t) && !isSigned(t)) {
1745 break
1746 }
1747 v.reset(OpMIPS64MOVHUload)
1748 v.AddArg2(ptr, mem)
1749 return true
1750 }
1751
1752
1753
1754 for {
1755 t := v.Type
1756 ptr := v_0
1757 mem := v_1
1758 if !(is32BitInt(t) && isSigned(t)) {
1759 break
1760 }
1761 v.reset(OpMIPS64MOVWload)
1762 v.AddArg2(ptr, mem)
1763 return true
1764 }
1765
1766
1767
1768 for {
1769 t := v.Type
1770 ptr := v_0
1771 mem := v_1
1772 if !(is32BitInt(t) && !isSigned(t)) {
1773 break
1774 }
1775 v.reset(OpMIPS64MOVWUload)
1776 v.AddArg2(ptr, mem)
1777 return true
1778 }
1779
1780
1781
1782 for {
1783 t := v.Type
1784 ptr := v_0
1785 mem := v_1
1786 if !(is64BitInt(t) || isPtr(t)) {
1787 break
1788 }
1789 v.reset(OpMIPS64MOVVload)
1790 v.AddArg2(ptr, mem)
1791 return true
1792 }
1793
1794
1795
1796 for {
1797 t := v.Type
1798 ptr := v_0
1799 mem := v_1
1800 if !(is32BitFloat(t)) {
1801 break
1802 }
1803 v.reset(OpMIPS64MOVFload)
1804 v.AddArg2(ptr, mem)
1805 return true
1806 }
1807
1808
1809
1810 for {
1811 t := v.Type
1812 ptr := v_0
1813 mem := v_1
1814 if !(is64BitFloat(t)) {
1815 break
1816 }
1817 v.reset(OpMIPS64MOVDload)
1818 v.AddArg2(ptr, mem)
1819 return true
1820 }
1821 return false
1822 }
1823 func rewriteValueMIPS64_OpLocalAddr(v *Value) bool {
1824 v_0 := v.Args[0]
1825
1826
1827 for {
1828 sym := auxToSym(v.Aux)
1829 base := v_0
1830 v.reset(OpMIPS64MOVVaddr)
1831 v.Aux = symToAux(sym)
1832 v.AddArg(base)
1833 return true
1834 }
1835 }
1836 func rewriteValueMIPS64_OpLsh16x16(v *Value) bool {
1837 v_1 := v.Args[1]
1838 v_0 := v.Args[0]
1839 b := v.Block
1840 typ := &b.Func.Config.Types
1841
1842
1843 for {
1844 t := v.Type
1845 x := v_0
1846 y := v_1
1847 v.reset(OpMIPS64AND)
1848 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1849 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1850 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1851 v2.AuxInt = int64ToAuxInt(64)
1852 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1853 v3.AddArg(y)
1854 v1.AddArg2(v2, v3)
1855 v0.AddArg(v1)
1856 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1857 v4.AddArg2(x, v3)
1858 v.AddArg2(v0, v4)
1859 return true
1860 }
1861 }
1862 func rewriteValueMIPS64_OpLsh16x32(v *Value) bool {
1863 v_1 := v.Args[1]
1864 v_0 := v.Args[0]
1865 b := v.Block
1866 typ := &b.Func.Config.Types
1867
1868
1869 for {
1870 t := v.Type
1871 x := v_0
1872 y := v_1
1873 v.reset(OpMIPS64AND)
1874 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1875 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1876 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1877 v2.AuxInt = int64ToAuxInt(64)
1878 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1879 v3.AddArg(y)
1880 v1.AddArg2(v2, v3)
1881 v0.AddArg(v1)
1882 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1883 v4.AddArg2(x, v3)
1884 v.AddArg2(v0, v4)
1885 return true
1886 }
1887 }
1888 func rewriteValueMIPS64_OpLsh16x64(v *Value) bool {
1889 v_1 := v.Args[1]
1890 v_0 := v.Args[0]
1891 b := v.Block
1892 typ := &b.Func.Config.Types
1893
1894
1895 for {
1896 t := v.Type
1897 x := v_0
1898 y := v_1
1899 v.reset(OpMIPS64AND)
1900 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1901 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1902 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1903 v2.AuxInt = int64ToAuxInt(64)
1904 v1.AddArg2(v2, y)
1905 v0.AddArg(v1)
1906 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1907 v3.AddArg2(x, y)
1908 v.AddArg2(v0, v3)
1909 return true
1910 }
1911 }
1912 func rewriteValueMIPS64_OpLsh16x8(v *Value) bool {
1913 v_1 := v.Args[1]
1914 v_0 := v.Args[0]
1915 b := v.Block
1916 typ := &b.Func.Config.Types
1917
1918
1919 for {
1920 t := v.Type
1921 x := v_0
1922 y := v_1
1923 v.reset(OpMIPS64AND)
1924 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1925 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1926 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1927 v2.AuxInt = int64ToAuxInt(64)
1928 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
1929 v3.AddArg(y)
1930 v1.AddArg2(v2, v3)
1931 v0.AddArg(v1)
1932 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1933 v4.AddArg2(x, v3)
1934 v.AddArg2(v0, v4)
1935 return true
1936 }
1937 }
1938 func rewriteValueMIPS64_OpLsh32x16(v *Value) bool {
1939 v_1 := v.Args[1]
1940 v_0 := v.Args[0]
1941 b := v.Block
1942 typ := &b.Func.Config.Types
1943
1944
1945 for {
1946 t := v.Type
1947 x := v_0
1948 y := v_1
1949 v.reset(OpMIPS64AND)
1950 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1951 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1952 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1953 v2.AuxInt = int64ToAuxInt(64)
1954 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
1955 v3.AddArg(y)
1956 v1.AddArg2(v2, v3)
1957 v0.AddArg(v1)
1958 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1959 v4.AddArg2(x, v3)
1960 v.AddArg2(v0, v4)
1961 return true
1962 }
1963 }
1964 func rewriteValueMIPS64_OpLsh32x32(v *Value) bool {
1965 v_1 := v.Args[1]
1966 v_0 := v.Args[0]
1967 b := v.Block
1968 typ := &b.Func.Config.Types
1969
1970
1971 for {
1972 t := v.Type
1973 x := v_0
1974 y := v_1
1975 v.reset(OpMIPS64AND)
1976 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
1977 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
1978 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
1979 v2.AuxInt = int64ToAuxInt(64)
1980 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
1981 v3.AddArg(y)
1982 v1.AddArg2(v2, v3)
1983 v0.AddArg(v1)
1984 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
1985 v4.AddArg2(x, v3)
1986 v.AddArg2(v0, v4)
1987 return true
1988 }
1989 }
1990 func rewriteValueMIPS64_OpLsh32x64(v *Value) bool {
1991 v_1 := v.Args[1]
1992 v_0 := v.Args[0]
1993 b := v.Block
1994 typ := &b.Func.Config.Types
1995
1996
1997 for {
1998 t := v.Type
1999 x := v_0
2000 y := v_1
2001 v.reset(OpMIPS64AND)
2002 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2003 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2004 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2005 v2.AuxInt = int64ToAuxInt(64)
2006 v1.AddArg2(v2, y)
2007 v0.AddArg(v1)
2008 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2009 v3.AddArg2(x, y)
2010 v.AddArg2(v0, v3)
2011 return true
2012 }
2013 }
2014 func rewriteValueMIPS64_OpLsh32x8(v *Value) bool {
2015 v_1 := v.Args[1]
2016 v_0 := v.Args[0]
2017 b := v.Block
2018 typ := &b.Func.Config.Types
2019
2020
2021 for {
2022 t := v.Type
2023 x := v_0
2024 y := v_1
2025 v.reset(OpMIPS64AND)
2026 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2027 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2028 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2029 v2.AuxInt = int64ToAuxInt(64)
2030 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2031 v3.AddArg(y)
2032 v1.AddArg2(v2, v3)
2033 v0.AddArg(v1)
2034 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2035 v4.AddArg2(x, v3)
2036 v.AddArg2(v0, v4)
2037 return true
2038 }
2039 }
2040 func rewriteValueMIPS64_OpLsh64x16(v *Value) bool {
2041 v_1 := v.Args[1]
2042 v_0 := v.Args[0]
2043 b := v.Block
2044 typ := &b.Func.Config.Types
2045
2046
2047 for {
2048 t := v.Type
2049 x := v_0
2050 y := v_1
2051 v.reset(OpMIPS64AND)
2052 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2053 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2054 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2055 v2.AuxInt = int64ToAuxInt(64)
2056 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2057 v3.AddArg(y)
2058 v1.AddArg2(v2, v3)
2059 v0.AddArg(v1)
2060 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2061 v4.AddArg2(x, v3)
2062 v.AddArg2(v0, v4)
2063 return true
2064 }
2065 }
2066 func rewriteValueMIPS64_OpLsh64x32(v *Value) bool {
2067 v_1 := v.Args[1]
2068 v_0 := v.Args[0]
2069 b := v.Block
2070 typ := &b.Func.Config.Types
2071
2072
2073 for {
2074 t := v.Type
2075 x := v_0
2076 y := v_1
2077 v.reset(OpMIPS64AND)
2078 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2079 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2080 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2081 v2.AuxInt = int64ToAuxInt(64)
2082 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2083 v3.AddArg(y)
2084 v1.AddArg2(v2, v3)
2085 v0.AddArg(v1)
2086 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2087 v4.AddArg2(x, v3)
2088 v.AddArg2(v0, v4)
2089 return true
2090 }
2091 }
2092 func rewriteValueMIPS64_OpLsh64x64(v *Value) bool {
2093 v_1 := v.Args[1]
2094 v_0 := v.Args[0]
2095 b := v.Block
2096 typ := &b.Func.Config.Types
2097
2098
2099 for {
2100 t := v.Type
2101 x := v_0
2102 y := v_1
2103 v.reset(OpMIPS64AND)
2104 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2105 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2106 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2107 v2.AuxInt = int64ToAuxInt(64)
2108 v1.AddArg2(v2, y)
2109 v0.AddArg(v1)
2110 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2111 v3.AddArg2(x, y)
2112 v.AddArg2(v0, v3)
2113 return true
2114 }
2115 }
2116 func rewriteValueMIPS64_OpLsh64x8(v *Value) bool {
2117 v_1 := v.Args[1]
2118 v_0 := v.Args[0]
2119 b := v.Block
2120 typ := &b.Func.Config.Types
2121
2122
2123 for {
2124 t := v.Type
2125 x := v_0
2126 y := v_1
2127 v.reset(OpMIPS64AND)
2128 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2129 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2130 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2131 v2.AuxInt = int64ToAuxInt(64)
2132 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2133 v3.AddArg(y)
2134 v1.AddArg2(v2, v3)
2135 v0.AddArg(v1)
2136 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2137 v4.AddArg2(x, v3)
2138 v.AddArg2(v0, v4)
2139 return true
2140 }
2141 }
2142 func rewriteValueMIPS64_OpLsh8x16(v *Value) bool {
2143 v_1 := v.Args[1]
2144 v_0 := v.Args[0]
2145 b := v.Block
2146 typ := &b.Func.Config.Types
2147
2148
2149 for {
2150 t := v.Type
2151 x := v_0
2152 y := v_1
2153 v.reset(OpMIPS64AND)
2154 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2155 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2156 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2157 v2.AuxInt = int64ToAuxInt(64)
2158 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
2159 v3.AddArg(y)
2160 v1.AddArg2(v2, v3)
2161 v0.AddArg(v1)
2162 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2163 v4.AddArg2(x, v3)
2164 v.AddArg2(v0, v4)
2165 return true
2166 }
2167 }
2168 func rewriteValueMIPS64_OpLsh8x32(v *Value) bool {
2169 v_1 := v.Args[1]
2170 v_0 := v.Args[0]
2171 b := v.Block
2172 typ := &b.Func.Config.Types
2173
2174
2175 for {
2176 t := v.Type
2177 x := v_0
2178 y := v_1
2179 v.reset(OpMIPS64AND)
2180 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2181 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2182 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2183 v2.AuxInt = int64ToAuxInt(64)
2184 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
2185 v3.AddArg(y)
2186 v1.AddArg2(v2, v3)
2187 v0.AddArg(v1)
2188 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2189 v4.AddArg2(x, v3)
2190 v.AddArg2(v0, v4)
2191 return true
2192 }
2193 }
2194 func rewriteValueMIPS64_OpLsh8x64(v *Value) bool {
2195 v_1 := v.Args[1]
2196 v_0 := v.Args[0]
2197 b := v.Block
2198 typ := &b.Func.Config.Types
2199
2200
2201 for {
2202 t := v.Type
2203 x := v_0
2204 y := v_1
2205 v.reset(OpMIPS64AND)
2206 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2207 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2208 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2209 v2.AuxInt = int64ToAuxInt(64)
2210 v1.AddArg2(v2, y)
2211 v0.AddArg(v1)
2212 v3 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2213 v3.AddArg2(x, y)
2214 v.AddArg2(v0, v3)
2215 return true
2216 }
2217 }
2218 func rewriteValueMIPS64_OpLsh8x8(v *Value) bool {
2219 v_1 := v.Args[1]
2220 v_0 := v.Args[0]
2221 b := v.Block
2222 typ := &b.Func.Config.Types
2223
2224
2225 for {
2226 t := v.Type
2227 x := v_0
2228 y := v_1
2229 v.reset(OpMIPS64AND)
2230 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
2231 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
2232 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
2233 v2.AuxInt = int64ToAuxInt(64)
2234 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
2235 v3.AddArg(y)
2236 v1.AddArg2(v2, v3)
2237 v0.AddArg(v1)
2238 v4 := b.NewValue0(v.Pos, OpMIPS64SLLV, t)
2239 v4.AddArg2(x, v3)
2240 v.AddArg2(v0, v4)
2241 return true
2242 }
2243 }
2244 func rewriteValueMIPS64_OpMIPS64ADDV(v *Value) bool {
2245 v_1 := v.Args[1]
2246 v_0 := v.Args[0]
2247
2248
2249
2250 for {
2251 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2252 x := v_0
2253 if v_1.Op != OpMIPS64MOVVconst {
2254 continue
2255 }
2256 c := auxIntToInt64(v_1.AuxInt)
2257 if !(is32Bit(c)) {
2258 continue
2259 }
2260 v.reset(OpMIPS64ADDVconst)
2261 v.AuxInt = int64ToAuxInt(c)
2262 v.AddArg(x)
2263 return true
2264 }
2265 break
2266 }
2267
2268
2269 for {
2270 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2271 x := v_0
2272 if v_1.Op != OpMIPS64NEGV {
2273 continue
2274 }
2275 y := v_1.Args[0]
2276 v.reset(OpMIPS64SUBV)
2277 v.AddArg2(x, y)
2278 return true
2279 }
2280 break
2281 }
2282 return false
2283 }
2284 func rewriteValueMIPS64_OpMIPS64ADDVconst(v *Value) bool {
2285 v_0 := v.Args[0]
2286
2287
2288
2289 for {
2290 off1 := auxIntToInt64(v.AuxInt)
2291 if v_0.Op != OpMIPS64MOVVaddr {
2292 break
2293 }
2294 off2 := auxIntToInt32(v_0.AuxInt)
2295 sym := auxToSym(v_0.Aux)
2296 ptr := v_0.Args[0]
2297 if !(is32Bit(off1 + int64(off2))) {
2298 break
2299 }
2300 v.reset(OpMIPS64MOVVaddr)
2301 v.AuxInt = int32ToAuxInt(int32(off1) + int32(off2))
2302 v.Aux = symToAux(sym)
2303 v.AddArg(ptr)
2304 return true
2305 }
2306
2307
2308 for {
2309 if auxIntToInt64(v.AuxInt) != 0 {
2310 break
2311 }
2312 x := v_0
2313 v.copyOf(x)
2314 return true
2315 }
2316
2317
2318 for {
2319 c := auxIntToInt64(v.AuxInt)
2320 if v_0.Op != OpMIPS64MOVVconst {
2321 break
2322 }
2323 d := auxIntToInt64(v_0.AuxInt)
2324 v.reset(OpMIPS64MOVVconst)
2325 v.AuxInt = int64ToAuxInt(c + d)
2326 return true
2327 }
2328
2329
2330
2331 for {
2332 c := auxIntToInt64(v.AuxInt)
2333 if v_0.Op != OpMIPS64ADDVconst {
2334 break
2335 }
2336 d := auxIntToInt64(v_0.AuxInt)
2337 x := v_0.Args[0]
2338 if !(is32Bit(c + d)) {
2339 break
2340 }
2341 v.reset(OpMIPS64ADDVconst)
2342 v.AuxInt = int64ToAuxInt(c + d)
2343 v.AddArg(x)
2344 return true
2345 }
2346
2347
2348
2349 for {
2350 c := auxIntToInt64(v.AuxInt)
2351 if v_0.Op != OpMIPS64SUBVconst {
2352 break
2353 }
2354 d := auxIntToInt64(v_0.AuxInt)
2355 x := v_0.Args[0]
2356 if !(is32Bit(c - d)) {
2357 break
2358 }
2359 v.reset(OpMIPS64ADDVconst)
2360 v.AuxInt = int64ToAuxInt(c - d)
2361 v.AddArg(x)
2362 return true
2363 }
2364 return false
2365 }
2366 func rewriteValueMIPS64_OpMIPS64AND(v *Value) bool {
2367 v_1 := v.Args[1]
2368 v_0 := v.Args[0]
2369
2370
2371
2372 for {
2373 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
2374 x := v_0
2375 if v_1.Op != OpMIPS64MOVVconst {
2376 continue
2377 }
2378 c := auxIntToInt64(v_1.AuxInt)
2379 if !(is32Bit(c)) {
2380 continue
2381 }
2382 v.reset(OpMIPS64ANDconst)
2383 v.AuxInt = int64ToAuxInt(c)
2384 v.AddArg(x)
2385 return true
2386 }
2387 break
2388 }
2389
2390
2391 for {
2392 x := v_0
2393 if x != v_1 {
2394 break
2395 }
2396 v.copyOf(x)
2397 return true
2398 }
2399 return false
2400 }
2401 func rewriteValueMIPS64_OpMIPS64ANDconst(v *Value) bool {
2402 v_0 := v.Args[0]
2403
2404
2405 for {
2406 if auxIntToInt64(v.AuxInt) != 0 {
2407 break
2408 }
2409 v.reset(OpMIPS64MOVVconst)
2410 v.AuxInt = int64ToAuxInt(0)
2411 return true
2412 }
2413
2414
2415 for {
2416 if auxIntToInt64(v.AuxInt) != -1 {
2417 break
2418 }
2419 x := v_0
2420 v.copyOf(x)
2421 return true
2422 }
2423
2424
2425 for {
2426 c := auxIntToInt64(v.AuxInt)
2427 if v_0.Op != OpMIPS64MOVVconst {
2428 break
2429 }
2430 d := auxIntToInt64(v_0.AuxInt)
2431 v.reset(OpMIPS64MOVVconst)
2432 v.AuxInt = int64ToAuxInt(c & d)
2433 return true
2434 }
2435
2436
2437 for {
2438 c := auxIntToInt64(v.AuxInt)
2439 if v_0.Op != OpMIPS64ANDconst {
2440 break
2441 }
2442 d := auxIntToInt64(v_0.AuxInt)
2443 x := v_0.Args[0]
2444 v.reset(OpMIPS64ANDconst)
2445 v.AuxInt = int64ToAuxInt(c & d)
2446 v.AddArg(x)
2447 return true
2448 }
2449 return false
2450 }
2451 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd32(v *Value) bool {
2452 v_2 := v.Args[2]
2453 v_1 := v.Args[1]
2454 v_0 := v.Args[0]
2455
2456
2457
2458 for {
2459 ptr := v_0
2460 if v_1.Op != OpMIPS64MOVVconst {
2461 break
2462 }
2463 c := auxIntToInt64(v_1.AuxInt)
2464 mem := v_2
2465 if !(is32Bit(c)) {
2466 break
2467 }
2468 v.reset(OpMIPS64LoweredAtomicAddconst32)
2469 v.AuxInt = int32ToAuxInt(int32(c))
2470 v.AddArg2(ptr, mem)
2471 return true
2472 }
2473 return false
2474 }
2475 func rewriteValueMIPS64_OpMIPS64LoweredAtomicAdd64(v *Value) bool {
2476 v_2 := v.Args[2]
2477 v_1 := v.Args[1]
2478 v_0 := v.Args[0]
2479
2480
2481
2482 for {
2483 ptr := v_0
2484 if v_1.Op != OpMIPS64MOVVconst {
2485 break
2486 }
2487 c := auxIntToInt64(v_1.AuxInt)
2488 mem := v_2
2489 if !(is32Bit(c)) {
2490 break
2491 }
2492 v.reset(OpMIPS64LoweredAtomicAddconst64)
2493 v.AuxInt = int64ToAuxInt(c)
2494 v.AddArg2(ptr, mem)
2495 return true
2496 }
2497 return false
2498 }
2499 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore32(v *Value) bool {
2500 v_2 := v.Args[2]
2501 v_1 := v.Args[1]
2502 v_0 := v.Args[0]
2503
2504
2505 for {
2506 ptr := v_0
2507 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2508 break
2509 }
2510 mem := v_2
2511 v.reset(OpMIPS64LoweredAtomicStorezero32)
2512 v.AddArg2(ptr, mem)
2513 return true
2514 }
2515 return false
2516 }
2517 func rewriteValueMIPS64_OpMIPS64LoweredAtomicStore64(v *Value) bool {
2518 v_2 := v.Args[2]
2519 v_1 := v.Args[1]
2520 v_0 := v.Args[0]
2521
2522
2523 for {
2524 ptr := v_0
2525 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2526 break
2527 }
2528 mem := v_2
2529 v.reset(OpMIPS64LoweredAtomicStorezero64)
2530 v.AddArg2(ptr, mem)
2531 return true
2532 }
2533 return false
2534 }
2535 func rewriteValueMIPS64_OpMIPS64MOVBUload(v *Value) bool {
2536 v_1 := v.Args[1]
2537 v_0 := v.Args[0]
2538
2539
2540
2541 for {
2542 off1 := auxIntToInt32(v.AuxInt)
2543 sym := auxToSym(v.Aux)
2544 if v_0.Op != OpMIPS64ADDVconst {
2545 break
2546 }
2547 off2 := auxIntToInt64(v_0.AuxInt)
2548 ptr := v_0.Args[0]
2549 mem := v_1
2550 if !(is32Bit(int64(off1) + off2)) {
2551 break
2552 }
2553 v.reset(OpMIPS64MOVBUload)
2554 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2555 v.Aux = symToAux(sym)
2556 v.AddArg2(ptr, mem)
2557 return true
2558 }
2559
2560
2561
2562 for {
2563 off1 := auxIntToInt32(v.AuxInt)
2564 sym1 := auxToSym(v.Aux)
2565 if v_0.Op != OpMIPS64MOVVaddr {
2566 break
2567 }
2568 off2 := auxIntToInt32(v_0.AuxInt)
2569 sym2 := auxToSym(v_0.Aux)
2570 ptr := v_0.Args[0]
2571 mem := v_1
2572 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
2573 break
2574 }
2575 v.reset(OpMIPS64MOVBUload)
2576 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2577 v.Aux = symToAux(mergeSym(sym1, sym2))
2578 v.AddArg2(ptr, mem)
2579 return true
2580 }
2581 return false
2582 }
2583 func rewriteValueMIPS64_OpMIPS64MOVBUreg(v *Value) bool {
2584 v_0 := v.Args[0]
2585
2586
2587 for {
2588 x := v_0
2589 if x.Op != OpMIPS64MOVBUload {
2590 break
2591 }
2592 v.reset(OpMIPS64MOVVreg)
2593 v.AddArg(x)
2594 return true
2595 }
2596
2597
2598 for {
2599 x := v_0
2600 if x.Op != OpMIPS64MOVBUreg {
2601 break
2602 }
2603 v.reset(OpMIPS64MOVVreg)
2604 v.AddArg(x)
2605 return true
2606 }
2607
2608
2609 for {
2610 if v_0.Op != OpMIPS64MOVVconst {
2611 break
2612 }
2613 c := auxIntToInt64(v_0.AuxInt)
2614 v.reset(OpMIPS64MOVVconst)
2615 v.AuxInt = int64ToAuxInt(int64(uint8(c)))
2616 return true
2617 }
2618 return false
2619 }
2620 func rewriteValueMIPS64_OpMIPS64MOVBload(v *Value) bool {
2621 v_1 := v.Args[1]
2622 v_0 := v.Args[0]
2623
2624
2625
2626 for {
2627 off1 := auxIntToInt32(v.AuxInt)
2628 sym := auxToSym(v.Aux)
2629 if v_0.Op != OpMIPS64ADDVconst {
2630 break
2631 }
2632 off2 := auxIntToInt64(v_0.AuxInt)
2633 ptr := v_0.Args[0]
2634 mem := v_1
2635 if !(is32Bit(int64(off1) + off2)) {
2636 break
2637 }
2638 v.reset(OpMIPS64MOVBload)
2639 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2640 v.Aux = symToAux(sym)
2641 v.AddArg2(ptr, mem)
2642 return true
2643 }
2644
2645
2646
2647 for {
2648 off1 := auxIntToInt32(v.AuxInt)
2649 sym1 := auxToSym(v.Aux)
2650 if v_0.Op != OpMIPS64MOVVaddr {
2651 break
2652 }
2653 off2 := auxIntToInt32(v_0.AuxInt)
2654 sym2 := auxToSym(v_0.Aux)
2655 ptr := v_0.Args[0]
2656 mem := v_1
2657 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
2658 break
2659 }
2660 v.reset(OpMIPS64MOVBload)
2661 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2662 v.Aux = symToAux(mergeSym(sym1, sym2))
2663 v.AddArg2(ptr, mem)
2664 return true
2665 }
2666 return false
2667 }
2668 func rewriteValueMIPS64_OpMIPS64MOVBreg(v *Value) bool {
2669 v_0 := v.Args[0]
2670
2671
2672 for {
2673 x := v_0
2674 if x.Op != OpMIPS64MOVBload {
2675 break
2676 }
2677 v.reset(OpMIPS64MOVVreg)
2678 v.AddArg(x)
2679 return true
2680 }
2681
2682
2683 for {
2684 x := v_0
2685 if x.Op != OpMIPS64MOVBreg {
2686 break
2687 }
2688 v.reset(OpMIPS64MOVVreg)
2689 v.AddArg(x)
2690 return true
2691 }
2692
2693
2694 for {
2695 if v_0.Op != OpMIPS64MOVVconst {
2696 break
2697 }
2698 c := auxIntToInt64(v_0.AuxInt)
2699 v.reset(OpMIPS64MOVVconst)
2700 v.AuxInt = int64ToAuxInt(int64(int8(c)))
2701 return true
2702 }
2703 return false
2704 }
2705 func rewriteValueMIPS64_OpMIPS64MOVBstore(v *Value) bool {
2706 v_2 := v.Args[2]
2707 v_1 := v.Args[1]
2708 v_0 := v.Args[0]
2709
2710
2711
2712 for {
2713 off1 := auxIntToInt32(v.AuxInt)
2714 sym := auxToSym(v.Aux)
2715 if v_0.Op != OpMIPS64ADDVconst {
2716 break
2717 }
2718 off2 := auxIntToInt64(v_0.AuxInt)
2719 ptr := v_0.Args[0]
2720 val := v_1
2721 mem := v_2
2722 if !(is32Bit(int64(off1) + off2)) {
2723 break
2724 }
2725 v.reset(OpMIPS64MOVBstore)
2726 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2727 v.Aux = symToAux(sym)
2728 v.AddArg3(ptr, val, mem)
2729 return true
2730 }
2731
2732
2733
2734 for {
2735 off1 := auxIntToInt32(v.AuxInt)
2736 sym1 := auxToSym(v.Aux)
2737 if v_0.Op != OpMIPS64MOVVaddr {
2738 break
2739 }
2740 off2 := auxIntToInt32(v_0.AuxInt)
2741 sym2 := auxToSym(v_0.Aux)
2742 ptr := v_0.Args[0]
2743 val := v_1
2744 mem := v_2
2745 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
2746 break
2747 }
2748 v.reset(OpMIPS64MOVBstore)
2749 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2750 v.Aux = symToAux(mergeSym(sym1, sym2))
2751 v.AddArg3(ptr, val, mem)
2752 return true
2753 }
2754
2755
2756 for {
2757 off := auxIntToInt32(v.AuxInt)
2758 sym := auxToSym(v.Aux)
2759 ptr := v_0
2760 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
2761 break
2762 }
2763 mem := v_2
2764 v.reset(OpMIPS64MOVBstorezero)
2765 v.AuxInt = int32ToAuxInt(off)
2766 v.Aux = symToAux(sym)
2767 v.AddArg2(ptr, mem)
2768 return true
2769 }
2770
2771
2772 for {
2773 off := auxIntToInt32(v.AuxInt)
2774 sym := auxToSym(v.Aux)
2775 ptr := v_0
2776 if v_1.Op != OpMIPS64MOVBreg {
2777 break
2778 }
2779 x := v_1.Args[0]
2780 mem := v_2
2781 v.reset(OpMIPS64MOVBstore)
2782 v.AuxInt = int32ToAuxInt(off)
2783 v.Aux = symToAux(sym)
2784 v.AddArg3(ptr, x, mem)
2785 return true
2786 }
2787
2788
2789 for {
2790 off := auxIntToInt32(v.AuxInt)
2791 sym := auxToSym(v.Aux)
2792 ptr := v_0
2793 if v_1.Op != OpMIPS64MOVBUreg {
2794 break
2795 }
2796 x := v_1.Args[0]
2797 mem := v_2
2798 v.reset(OpMIPS64MOVBstore)
2799 v.AuxInt = int32ToAuxInt(off)
2800 v.Aux = symToAux(sym)
2801 v.AddArg3(ptr, x, mem)
2802 return true
2803 }
2804
2805
2806 for {
2807 off := auxIntToInt32(v.AuxInt)
2808 sym := auxToSym(v.Aux)
2809 ptr := v_0
2810 if v_1.Op != OpMIPS64MOVHreg {
2811 break
2812 }
2813 x := v_1.Args[0]
2814 mem := v_2
2815 v.reset(OpMIPS64MOVBstore)
2816 v.AuxInt = int32ToAuxInt(off)
2817 v.Aux = symToAux(sym)
2818 v.AddArg3(ptr, x, mem)
2819 return true
2820 }
2821
2822
2823 for {
2824 off := auxIntToInt32(v.AuxInt)
2825 sym := auxToSym(v.Aux)
2826 ptr := v_0
2827 if v_1.Op != OpMIPS64MOVHUreg {
2828 break
2829 }
2830 x := v_1.Args[0]
2831 mem := v_2
2832 v.reset(OpMIPS64MOVBstore)
2833 v.AuxInt = int32ToAuxInt(off)
2834 v.Aux = symToAux(sym)
2835 v.AddArg3(ptr, x, mem)
2836 return true
2837 }
2838
2839
2840 for {
2841 off := auxIntToInt32(v.AuxInt)
2842 sym := auxToSym(v.Aux)
2843 ptr := v_0
2844 if v_1.Op != OpMIPS64MOVWreg {
2845 break
2846 }
2847 x := v_1.Args[0]
2848 mem := v_2
2849 v.reset(OpMIPS64MOVBstore)
2850 v.AuxInt = int32ToAuxInt(off)
2851 v.Aux = symToAux(sym)
2852 v.AddArg3(ptr, x, mem)
2853 return true
2854 }
2855
2856
2857 for {
2858 off := auxIntToInt32(v.AuxInt)
2859 sym := auxToSym(v.Aux)
2860 ptr := v_0
2861 if v_1.Op != OpMIPS64MOVWUreg {
2862 break
2863 }
2864 x := v_1.Args[0]
2865 mem := v_2
2866 v.reset(OpMIPS64MOVBstore)
2867 v.AuxInt = int32ToAuxInt(off)
2868 v.Aux = symToAux(sym)
2869 v.AddArg3(ptr, x, mem)
2870 return true
2871 }
2872 return false
2873 }
2874 func rewriteValueMIPS64_OpMIPS64MOVBstorezero(v *Value) bool {
2875 v_1 := v.Args[1]
2876 v_0 := v.Args[0]
2877
2878
2879
2880 for {
2881 off1 := auxIntToInt32(v.AuxInt)
2882 sym := auxToSym(v.Aux)
2883 if v_0.Op != OpMIPS64ADDVconst {
2884 break
2885 }
2886 off2 := auxIntToInt64(v_0.AuxInt)
2887 ptr := v_0.Args[0]
2888 mem := v_1
2889 if !(is32Bit(int64(off1) + off2)) {
2890 break
2891 }
2892 v.reset(OpMIPS64MOVBstorezero)
2893 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2894 v.Aux = symToAux(sym)
2895 v.AddArg2(ptr, mem)
2896 return true
2897 }
2898
2899
2900
2901 for {
2902 off1 := auxIntToInt32(v.AuxInt)
2903 sym1 := auxToSym(v.Aux)
2904 if v_0.Op != OpMIPS64MOVVaddr {
2905 break
2906 }
2907 off2 := auxIntToInt32(v_0.AuxInt)
2908 sym2 := auxToSym(v_0.Aux)
2909 ptr := v_0.Args[0]
2910 mem := v_1
2911 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
2912 break
2913 }
2914 v.reset(OpMIPS64MOVBstorezero)
2915 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2916 v.Aux = symToAux(mergeSym(sym1, sym2))
2917 v.AddArg2(ptr, mem)
2918 return true
2919 }
2920 return false
2921 }
2922 func rewriteValueMIPS64_OpMIPS64MOVDload(v *Value) bool {
2923 v_1 := v.Args[1]
2924 v_0 := v.Args[0]
2925
2926
2927
2928 for {
2929 off1 := auxIntToInt32(v.AuxInt)
2930 sym := auxToSym(v.Aux)
2931 if v_0.Op != OpMIPS64ADDVconst {
2932 break
2933 }
2934 off2 := auxIntToInt64(v_0.AuxInt)
2935 ptr := v_0.Args[0]
2936 mem := v_1
2937 if !(is32Bit(int64(off1) + off2)) {
2938 break
2939 }
2940 v.reset(OpMIPS64MOVDload)
2941 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2942 v.Aux = symToAux(sym)
2943 v.AddArg2(ptr, mem)
2944 return true
2945 }
2946
2947
2948
2949 for {
2950 off1 := auxIntToInt32(v.AuxInt)
2951 sym1 := auxToSym(v.Aux)
2952 if v_0.Op != OpMIPS64MOVVaddr {
2953 break
2954 }
2955 off2 := auxIntToInt32(v_0.AuxInt)
2956 sym2 := auxToSym(v_0.Aux)
2957 ptr := v_0.Args[0]
2958 mem := v_1
2959 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
2960 break
2961 }
2962 v.reset(OpMIPS64MOVDload)
2963 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2964 v.Aux = symToAux(mergeSym(sym1, sym2))
2965 v.AddArg2(ptr, mem)
2966 return true
2967 }
2968 return false
2969 }
2970 func rewriteValueMIPS64_OpMIPS64MOVDstore(v *Value) bool {
2971 v_2 := v.Args[2]
2972 v_1 := v.Args[1]
2973 v_0 := v.Args[0]
2974
2975
2976
2977 for {
2978 off1 := auxIntToInt32(v.AuxInt)
2979 sym := auxToSym(v.Aux)
2980 if v_0.Op != OpMIPS64ADDVconst {
2981 break
2982 }
2983 off2 := auxIntToInt64(v_0.AuxInt)
2984 ptr := v_0.Args[0]
2985 val := v_1
2986 mem := v_2
2987 if !(is32Bit(int64(off1) + off2)) {
2988 break
2989 }
2990 v.reset(OpMIPS64MOVDstore)
2991 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
2992 v.Aux = symToAux(sym)
2993 v.AddArg3(ptr, val, mem)
2994 return true
2995 }
2996
2997
2998
2999 for {
3000 off1 := auxIntToInt32(v.AuxInt)
3001 sym1 := auxToSym(v.Aux)
3002 if v_0.Op != OpMIPS64MOVVaddr {
3003 break
3004 }
3005 off2 := auxIntToInt32(v_0.AuxInt)
3006 sym2 := auxToSym(v_0.Aux)
3007 ptr := v_0.Args[0]
3008 val := v_1
3009 mem := v_2
3010 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3011 break
3012 }
3013 v.reset(OpMIPS64MOVDstore)
3014 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3015 v.Aux = symToAux(mergeSym(sym1, sym2))
3016 v.AddArg3(ptr, val, mem)
3017 return true
3018 }
3019 return false
3020 }
3021 func rewriteValueMIPS64_OpMIPS64MOVFload(v *Value) bool {
3022 v_1 := v.Args[1]
3023 v_0 := v.Args[0]
3024
3025
3026
3027 for {
3028 off1 := auxIntToInt32(v.AuxInt)
3029 sym := auxToSym(v.Aux)
3030 if v_0.Op != OpMIPS64ADDVconst {
3031 break
3032 }
3033 off2 := auxIntToInt64(v_0.AuxInt)
3034 ptr := v_0.Args[0]
3035 mem := v_1
3036 if !(is32Bit(int64(off1) + off2)) {
3037 break
3038 }
3039 v.reset(OpMIPS64MOVFload)
3040 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3041 v.Aux = symToAux(sym)
3042 v.AddArg2(ptr, mem)
3043 return true
3044 }
3045
3046
3047
3048 for {
3049 off1 := auxIntToInt32(v.AuxInt)
3050 sym1 := auxToSym(v.Aux)
3051 if v_0.Op != OpMIPS64MOVVaddr {
3052 break
3053 }
3054 off2 := auxIntToInt32(v_0.AuxInt)
3055 sym2 := auxToSym(v_0.Aux)
3056 ptr := v_0.Args[0]
3057 mem := v_1
3058 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3059 break
3060 }
3061 v.reset(OpMIPS64MOVFload)
3062 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3063 v.Aux = symToAux(mergeSym(sym1, sym2))
3064 v.AddArg2(ptr, mem)
3065 return true
3066 }
3067 return false
3068 }
3069 func rewriteValueMIPS64_OpMIPS64MOVFstore(v *Value) bool {
3070 v_2 := v.Args[2]
3071 v_1 := v.Args[1]
3072 v_0 := v.Args[0]
3073
3074
3075
3076 for {
3077 off1 := auxIntToInt32(v.AuxInt)
3078 sym := auxToSym(v.Aux)
3079 if v_0.Op != OpMIPS64ADDVconst {
3080 break
3081 }
3082 off2 := auxIntToInt64(v_0.AuxInt)
3083 ptr := v_0.Args[0]
3084 val := v_1
3085 mem := v_2
3086 if !(is32Bit(int64(off1) + off2)) {
3087 break
3088 }
3089 v.reset(OpMIPS64MOVFstore)
3090 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3091 v.Aux = symToAux(sym)
3092 v.AddArg3(ptr, val, mem)
3093 return true
3094 }
3095
3096
3097
3098 for {
3099 off1 := auxIntToInt32(v.AuxInt)
3100 sym1 := auxToSym(v.Aux)
3101 if v_0.Op != OpMIPS64MOVVaddr {
3102 break
3103 }
3104 off2 := auxIntToInt32(v_0.AuxInt)
3105 sym2 := auxToSym(v_0.Aux)
3106 ptr := v_0.Args[0]
3107 val := v_1
3108 mem := v_2
3109 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3110 break
3111 }
3112 v.reset(OpMIPS64MOVFstore)
3113 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3114 v.Aux = symToAux(mergeSym(sym1, sym2))
3115 v.AddArg3(ptr, val, mem)
3116 return true
3117 }
3118 return false
3119 }
3120 func rewriteValueMIPS64_OpMIPS64MOVHUload(v *Value) bool {
3121 v_1 := v.Args[1]
3122 v_0 := v.Args[0]
3123
3124
3125
3126 for {
3127 off1 := auxIntToInt32(v.AuxInt)
3128 sym := auxToSym(v.Aux)
3129 if v_0.Op != OpMIPS64ADDVconst {
3130 break
3131 }
3132 off2 := auxIntToInt64(v_0.AuxInt)
3133 ptr := v_0.Args[0]
3134 mem := v_1
3135 if !(is32Bit(int64(off1) + off2)) {
3136 break
3137 }
3138 v.reset(OpMIPS64MOVHUload)
3139 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3140 v.Aux = symToAux(sym)
3141 v.AddArg2(ptr, mem)
3142 return true
3143 }
3144
3145
3146
3147 for {
3148 off1 := auxIntToInt32(v.AuxInt)
3149 sym1 := auxToSym(v.Aux)
3150 if v_0.Op != OpMIPS64MOVVaddr {
3151 break
3152 }
3153 off2 := auxIntToInt32(v_0.AuxInt)
3154 sym2 := auxToSym(v_0.Aux)
3155 ptr := v_0.Args[0]
3156 mem := v_1
3157 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3158 break
3159 }
3160 v.reset(OpMIPS64MOVHUload)
3161 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3162 v.Aux = symToAux(mergeSym(sym1, sym2))
3163 v.AddArg2(ptr, mem)
3164 return true
3165 }
3166 return false
3167 }
3168 func rewriteValueMIPS64_OpMIPS64MOVHUreg(v *Value) bool {
3169 v_0 := v.Args[0]
3170
3171
3172 for {
3173 x := v_0
3174 if x.Op != OpMIPS64MOVBUload {
3175 break
3176 }
3177 v.reset(OpMIPS64MOVVreg)
3178 v.AddArg(x)
3179 return true
3180 }
3181
3182
3183 for {
3184 x := v_0
3185 if x.Op != OpMIPS64MOVHUload {
3186 break
3187 }
3188 v.reset(OpMIPS64MOVVreg)
3189 v.AddArg(x)
3190 return true
3191 }
3192
3193
3194 for {
3195 x := v_0
3196 if x.Op != OpMIPS64MOVBUreg {
3197 break
3198 }
3199 v.reset(OpMIPS64MOVVreg)
3200 v.AddArg(x)
3201 return true
3202 }
3203
3204
3205 for {
3206 x := v_0
3207 if x.Op != OpMIPS64MOVHUreg {
3208 break
3209 }
3210 v.reset(OpMIPS64MOVVreg)
3211 v.AddArg(x)
3212 return true
3213 }
3214
3215
3216 for {
3217 if v_0.Op != OpMIPS64MOVVconst {
3218 break
3219 }
3220 c := auxIntToInt64(v_0.AuxInt)
3221 v.reset(OpMIPS64MOVVconst)
3222 v.AuxInt = int64ToAuxInt(int64(uint16(c)))
3223 return true
3224 }
3225 return false
3226 }
3227 func rewriteValueMIPS64_OpMIPS64MOVHload(v *Value) bool {
3228 v_1 := v.Args[1]
3229 v_0 := v.Args[0]
3230
3231
3232
3233 for {
3234 off1 := auxIntToInt32(v.AuxInt)
3235 sym := auxToSym(v.Aux)
3236 if v_0.Op != OpMIPS64ADDVconst {
3237 break
3238 }
3239 off2 := auxIntToInt64(v_0.AuxInt)
3240 ptr := v_0.Args[0]
3241 mem := v_1
3242 if !(is32Bit(int64(off1) + off2)) {
3243 break
3244 }
3245 v.reset(OpMIPS64MOVHload)
3246 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3247 v.Aux = symToAux(sym)
3248 v.AddArg2(ptr, mem)
3249 return true
3250 }
3251
3252
3253
3254 for {
3255 off1 := auxIntToInt32(v.AuxInt)
3256 sym1 := auxToSym(v.Aux)
3257 if v_0.Op != OpMIPS64MOVVaddr {
3258 break
3259 }
3260 off2 := auxIntToInt32(v_0.AuxInt)
3261 sym2 := auxToSym(v_0.Aux)
3262 ptr := v_0.Args[0]
3263 mem := v_1
3264 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3265 break
3266 }
3267 v.reset(OpMIPS64MOVHload)
3268 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3269 v.Aux = symToAux(mergeSym(sym1, sym2))
3270 v.AddArg2(ptr, mem)
3271 return true
3272 }
3273 return false
3274 }
3275 func rewriteValueMIPS64_OpMIPS64MOVHreg(v *Value) bool {
3276 v_0 := v.Args[0]
3277
3278
3279 for {
3280 x := v_0
3281 if x.Op != OpMIPS64MOVBload {
3282 break
3283 }
3284 v.reset(OpMIPS64MOVVreg)
3285 v.AddArg(x)
3286 return true
3287 }
3288
3289
3290 for {
3291 x := v_0
3292 if x.Op != OpMIPS64MOVBUload {
3293 break
3294 }
3295 v.reset(OpMIPS64MOVVreg)
3296 v.AddArg(x)
3297 return true
3298 }
3299
3300
3301 for {
3302 x := v_0
3303 if x.Op != OpMIPS64MOVHload {
3304 break
3305 }
3306 v.reset(OpMIPS64MOVVreg)
3307 v.AddArg(x)
3308 return true
3309 }
3310
3311
3312 for {
3313 x := v_0
3314 if x.Op != OpMIPS64MOVBreg {
3315 break
3316 }
3317 v.reset(OpMIPS64MOVVreg)
3318 v.AddArg(x)
3319 return true
3320 }
3321
3322
3323 for {
3324 x := v_0
3325 if x.Op != OpMIPS64MOVBUreg {
3326 break
3327 }
3328 v.reset(OpMIPS64MOVVreg)
3329 v.AddArg(x)
3330 return true
3331 }
3332
3333
3334 for {
3335 x := v_0
3336 if x.Op != OpMIPS64MOVHreg {
3337 break
3338 }
3339 v.reset(OpMIPS64MOVVreg)
3340 v.AddArg(x)
3341 return true
3342 }
3343
3344
3345 for {
3346 if v_0.Op != OpMIPS64MOVVconst {
3347 break
3348 }
3349 c := auxIntToInt64(v_0.AuxInt)
3350 v.reset(OpMIPS64MOVVconst)
3351 v.AuxInt = int64ToAuxInt(int64(int16(c)))
3352 return true
3353 }
3354 return false
3355 }
3356 func rewriteValueMIPS64_OpMIPS64MOVHstore(v *Value) bool {
3357 v_2 := v.Args[2]
3358 v_1 := v.Args[1]
3359 v_0 := v.Args[0]
3360
3361
3362
3363 for {
3364 off1 := auxIntToInt32(v.AuxInt)
3365 sym := auxToSym(v.Aux)
3366 if v_0.Op != OpMIPS64ADDVconst {
3367 break
3368 }
3369 off2 := auxIntToInt64(v_0.AuxInt)
3370 ptr := v_0.Args[0]
3371 val := v_1
3372 mem := v_2
3373 if !(is32Bit(int64(off1) + off2)) {
3374 break
3375 }
3376 v.reset(OpMIPS64MOVHstore)
3377 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3378 v.Aux = symToAux(sym)
3379 v.AddArg3(ptr, val, mem)
3380 return true
3381 }
3382
3383
3384
3385 for {
3386 off1 := auxIntToInt32(v.AuxInt)
3387 sym1 := auxToSym(v.Aux)
3388 if v_0.Op != OpMIPS64MOVVaddr {
3389 break
3390 }
3391 off2 := auxIntToInt32(v_0.AuxInt)
3392 sym2 := auxToSym(v_0.Aux)
3393 ptr := v_0.Args[0]
3394 val := v_1
3395 mem := v_2
3396 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3397 break
3398 }
3399 v.reset(OpMIPS64MOVHstore)
3400 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3401 v.Aux = symToAux(mergeSym(sym1, sym2))
3402 v.AddArg3(ptr, val, mem)
3403 return true
3404 }
3405
3406
3407 for {
3408 off := auxIntToInt32(v.AuxInt)
3409 sym := auxToSym(v.Aux)
3410 ptr := v_0
3411 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3412 break
3413 }
3414 mem := v_2
3415 v.reset(OpMIPS64MOVHstorezero)
3416 v.AuxInt = int32ToAuxInt(off)
3417 v.Aux = symToAux(sym)
3418 v.AddArg2(ptr, mem)
3419 return true
3420 }
3421
3422
3423 for {
3424 off := auxIntToInt32(v.AuxInt)
3425 sym := auxToSym(v.Aux)
3426 ptr := v_0
3427 if v_1.Op != OpMIPS64MOVHreg {
3428 break
3429 }
3430 x := v_1.Args[0]
3431 mem := v_2
3432 v.reset(OpMIPS64MOVHstore)
3433 v.AuxInt = int32ToAuxInt(off)
3434 v.Aux = symToAux(sym)
3435 v.AddArg3(ptr, x, mem)
3436 return true
3437 }
3438
3439
3440 for {
3441 off := auxIntToInt32(v.AuxInt)
3442 sym := auxToSym(v.Aux)
3443 ptr := v_0
3444 if v_1.Op != OpMIPS64MOVHUreg {
3445 break
3446 }
3447 x := v_1.Args[0]
3448 mem := v_2
3449 v.reset(OpMIPS64MOVHstore)
3450 v.AuxInt = int32ToAuxInt(off)
3451 v.Aux = symToAux(sym)
3452 v.AddArg3(ptr, x, mem)
3453 return true
3454 }
3455
3456
3457 for {
3458 off := auxIntToInt32(v.AuxInt)
3459 sym := auxToSym(v.Aux)
3460 ptr := v_0
3461 if v_1.Op != OpMIPS64MOVWreg {
3462 break
3463 }
3464 x := v_1.Args[0]
3465 mem := v_2
3466 v.reset(OpMIPS64MOVHstore)
3467 v.AuxInt = int32ToAuxInt(off)
3468 v.Aux = symToAux(sym)
3469 v.AddArg3(ptr, x, mem)
3470 return true
3471 }
3472
3473
3474 for {
3475 off := auxIntToInt32(v.AuxInt)
3476 sym := auxToSym(v.Aux)
3477 ptr := v_0
3478 if v_1.Op != OpMIPS64MOVWUreg {
3479 break
3480 }
3481 x := v_1.Args[0]
3482 mem := v_2
3483 v.reset(OpMIPS64MOVHstore)
3484 v.AuxInt = int32ToAuxInt(off)
3485 v.Aux = symToAux(sym)
3486 v.AddArg3(ptr, x, mem)
3487 return true
3488 }
3489 return false
3490 }
3491 func rewriteValueMIPS64_OpMIPS64MOVHstorezero(v *Value) bool {
3492 v_1 := v.Args[1]
3493 v_0 := v.Args[0]
3494
3495
3496
3497 for {
3498 off1 := auxIntToInt32(v.AuxInt)
3499 sym := auxToSym(v.Aux)
3500 if v_0.Op != OpMIPS64ADDVconst {
3501 break
3502 }
3503 off2 := auxIntToInt64(v_0.AuxInt)
3504 ptr := v_0.Args[0]
3505 mem := v_1
3506 if !(is32Bit(int64(off1) + off2)) {
3507 break
3508 }
3509 v.reset(OpMIPS64MOVHstorezero)
3510 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3511 v.Aux = symToAux(sym)
3512 v.AddArg2(ptr, mem)
3513 return true
3514 }
3515
3516
3517
3518 for {
3519 off1 := auxIntToInt32(v.AuxInt)
3520 sym1 := auxToSym(v.Aux)
3521 if v_0.Op != OpMIPS64MOVVaddr {
3522 break
3523 }
3524 off2 := auxIntToInt32(v_0.AuxInt)
3525 sym2 := auxToSym(v_0.Aux)
3526 ptr := v_0.Args[0]
3527 mem := v_1
3528 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3529 break
3530 }
3531 v.reset(OpMIPS64MOVHstorezero)
3532 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3533 v.Aux = symToAux(mergeSym(sym1, sym2))
3534 v.AddArg2(ptr, mem)
3535 return true
3536 }
3537 return false
3538 }
3539 func rewriteValueMIPS64_OpMIPS64MOVVload(v *Value) bool {
3540 v_1 := v.Args[1]
3541 v_0 := v.Args[0]
3542
3543
3544
3545 for {
3546 off1 := auxIntToInt32(v.AuxInt)
3547 sym := auxToSym(v.Aux)
3548 if v_0.Op != OpMIPS64ADDVconst {
3549 break
3550 }
3551 off2 := auxIntToInt64(v_0.AuxInt)
3552 ptr := v_0.Args[0]
3553 mem := v_1
3554 if !(is32Bit(int64(off1) + off2)) {
3555 break
3556 }
3557 v.reset(OpMIPS64MOVVload)
3558 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3559 v.Aux = symToAux(sym)
3560 v.AddArg2(ptr, mem)
3561 return true
3562 }
3563
3564
3565
3566 for {
3567 off1 := auxIntToInt32(v.AuxInt)
3568 sym1 := auxToSym(v.Aux)
3569 if v_0.Op != OpMIPS64MOVVaddr {
3570 break
3571 }
3572 off2 := auxIntToInt32(v_0.AuxInt)
3573 sym2 := auxToSym(v_0.Aux)
3574 ptr := v_0.Args[0]
3575 mem := v_1
3576 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3577 break
3578 }
3579 v.reset(OpMIPS64MOVVload)
3580 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3581 v.Aux = symToAux(mergeSym(sym1, sym2))
3582 v.AddArg2(ptr, mem)
3583 return true
3584 }
3585 return false
3586 }
3587 func rewriteValueMIPS64_OpMIPS64MOVVreg(v *Value) bool {
3588 v_0 := v.Args[0]
3589
3590
3591
3592 for {
3593 x := v_0
3594 if !(x.Uses == 1) {
3595 break
3596 }
3597 v.reset(OpMIPS64MOVVnop)
3598 v.AddArg(x)
3599 return true
3600 }
3601
3602
3603 for {
3604 if v_0.Op != OpMIPS64MOVVconst {
3605 break
3606 }
3607 c := auxIntToInt64(v_0.AuxInt)
3608 v.reset(OpMIPS64MOVVconst)
3609 v.AuxInt = int64ToAuxInt(c)
3610 return true
3611 }
3612 return false
3613 }
3614 func rewriteValueMIPS64_OpMIPS64MOVVstore(v *Value) bool {
3615 v_2 := v.Args[2]
3616 v_1 := v.Args[1]
3617 v_0 := v.Args[0]
3618
3619
3620
3621 for {
3622 off1 := auxIntToInt32(v.AuxInt)
3623 sym := auxToSym(v.Aux)
3624 if v_0.Op != OpMIPS64ADDVconst {
3625 break
3626 }
3627 off2 := auxIntToInt64(v_0.AuxInt)
3628 ptr := v_0.Args[0]
3629 val := v_1
3630 mem := v_2
3631 if !(is32Bit(int64(off1) + off2)) {
3632 break
3633 }
3634 v.reset(OpMIPS64MOVVstore)
3635 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3636 v.Aux = symToAux(sym)
3637 v.AddArg3(ptr, val, mem)
3638 return true
3639 }
3640
3641
3642
3643 for {
3644 off1 := auxIntToInt32(v.AuxInt)
3645 sym1 := auxToSym(v.Aux)
3646 if v_0.Op != OpMIPS64MOVVaddr {
3647 break
3648 }
3649 off2 := auxIntToInt32(v_0.AuxInt)
3650 sym2 := auxToSym(v_0.Aux)
3651 ptr := v_0.Args[0]
3652 val := v_1
3653 mem := v_2
3654 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3655 break
3656 }
3657 v.reset(OpMIPS64MOVVstore)
3658 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3659 v.Aux = symToAux(mergeSym(sym1, sym2))
3660 v.AddArg3(ptr, val, mem)
3661 return true
3662 }
3663
3664
3665 for {
3666 off := auxIntToInt32(v.AuxInt)
3667 sym := auxToSym(v.Aux)
3668 ptr := v_0
3669 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
3670 break
3671 }
3672 mem := v_2
3673 v.reset(OpMIPS64MOVVstorezero)
3674 v.AuxInt = int32ToAuxInt(off)
3675 v.Aux = symToAux(sym)
3676 v.AddArg2(ptr, mem)
3677 return true
3678 }
3679 return false
3680 }
3681 func rewriteValueMIPS64_OpMIPS64MOVVstorezero(v *Value) bool {
3682 v_1 := v.Args[1]
3683 v_0 := v.Args[0]
3684
3685
3686
3687 for {
3688 off1 := auxIntToInt32(v.AuxInt)
3689 sym := auxToSym(v.Aux)
3690 if v_0.Op != OpMIPS64ADDVconst {
3691 break
3692 }
3693 off2 := auxIntToInt64(v_0.AuxInt)
3694 ptr := v_0.Args[0]
3695 mem := v_1
3696 if !(is32Bit(int64(off1) + off2)) {
3697 break
3698 }
3699 v.reset(OpMIPS64MOVVstorezero)
3700 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3701 v.Aux = symToAux(sym)
3702 v.AddArg2(ptr, mem)
3703 return true
3704 }
3705
3706
3707
3708 for {
3709 off1 := auxIntToInt32(v.AuxInt)
3710 sym1 := auxToSym(v.Aux)
3711 if v_0.Op != OpMIPS64MOVVaddr {
3712 break
3713 }
3714 off2 := auxIntToInt32(v_0.AuxInt)
3715 sym2 := auxToSym(v_0.Aux)
3716 ptr := v_0.Args[0]
3717 mem := v_1
3718 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3719 break
3720 }
3721 v.reset(OpMIPS64MOVVstorezero)
3722 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3723 v.Aux = symToAux(mergeSym(sym1, sym2))
3724 v.AddArg2(ptr, mem)
3725 return true
3726 }
3727 return false
3728 }
3729 func rewriteValueMIPS64_OpMIPS64MOVWUload(v *Value) bool {
3730 v_1 := v.Args[1]
3731 v_0 := v.Args[0]
3732
3733
3734
3735 for {
3736 off1 := auxIntToInt32(v.AuxInt)
3737 sym := auxToSym(v.Aux)
3738 if v_0.Op != OpMIPS64ADDVconst {
3739 break
3740 }
3741 off2 := auxIntToInt64(v_0.AuxInt)
3742 ptr := v_0.Args[0]
3743 mem := v_1
3744 if !(is32Bit(int64(off1) + off2)) {
3745 break
3746 }
3747 v.reset(OpMIPS64MOVWUload)
3748 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3749 v.Aux = symToAux(sym)
3750 v.AddArg2(ptr, mem)
3751 return true
3752 }
3753
3754
3755
3756 for {
3757 off1 := auxIntToInt32(v.AuxInt)
3758 sym1 := auxToSym(v.Aux)
3759 if v_0.Op != OpMIPS64MOVVaddr {
3760 break
3761 }
3762 off2 := auxIntToInt32(v_0.AuxInt)
3763 sym2 := auxToSym(v_0.Aux)
3764 ptr := v_0.Args[0]
3765 mem := v_1
3766 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3767 break
3768 }
3769 v.reset(OpMIPS64MOVWUload)
3770 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3771 v.Aux = symToAux(mergeSym(sym1, sym2))
3772 v.AddArg2(ptr, mem)
3773 return true
3774 }
3775 return false
3776 }
3777 func rewriteValueMIPS64_OpMIPS64MOVWUreg(v *Value) bool {
3778 v_0 := v.Args[0]
3779
3780
3781 for {
3782 x := v_0
3783 if x.Op != OpMIPS64MOVBUload {
3784 break
3785 }
3786 v.reset(OpMIPS64MOVVreg)
3787 v.AddArg(x)
3788 return true
3789 }
3790
3791
3792 for {
3793 x := v_0
3794 if x.Op != OpMIPS64MOVHUload {
3795 break
3796 }
3797 v.reset(OpMIPS64MOVVreg)
3798 v.AddArg(x)
3799 return true
3800 }
3801
3802
3803 for {
3804 x := v_0
3805 if x.Op != OpMIPS64MOVWUload {
3806 break
3807 }
3808 v.reset(OpMIPS64MOVVreg)
3809 v.AddArg(x)
3810 return true
3811 }
3812
3813
3814 for {
3815 x := v_0
3816 if x.Op != OpMIPS64MOVBUreg {
3817 break
3818 }
3819 v.reset(OpMIPS64MOVVreg)
3820 v.AddArg(x)
3821 return true
3822 }
3823
3824
3825 for {
3826 x := v_0
3827 if x.Op != OpMIPS64MOVHUreg {
3828 break
3829 }
3830 v.reset(OpMIPS64MOVVreg)
3831 v.AddArg(x)
3832 return true
3833 }
3834
3835
3836 for {
3837 x := v_0
3838 if x.Op != OpMIPS64MOVWUreg {
3839 break
3840 }
3841 v.reset(OpMIPS64MOVVreg)
3842 v.AddArg(x)
3843 return true
3844 }
3845
3846
3847 for {
3848 if v_0.Op != OpMIPS64MOVVconst {
3849 break
3850 }
3851 c := auxIntToInt64(v_0.AuxInt)
3852 v.reset(OpMIPS64MOVVconst)
3853 v.AuxInt = int64ToAuxInt(int64(uint32(c)))
3854 return true
3855 }
3856 return false
3857 }
3858 func rewriteValueMIPS64_OpMIPS64MOVWload(v *Value) bool {
3859 v_1 := v.Args[1]
3860 v_0 := v.Args[0]
3861
3862
3863
3864 for {
3865 off1 := auxIntToInt32(v.AuxInt)
3866 sym := auxToSym(v.Aux)
3867 if v_0.Op != OpMIPS64ADDVconst {
3868 break
3869 }
3870 off2 := auxIntToInt64(v_0.AuxInt)
3871 ptr := v_0.Args[0]
3872 mem := v_1
3873 if !(is32Bit(int64(off1) + off2)) {
3874 break
3875 }
3876 v.reset(OpMIPS64MOVWload)
3877 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3878 v.Aux = symToAux(sym)
3879 v.AddArg2(ptr, mem)
3880 return true
3881 }
3882
3883
3884
3885 for {
3886 off1 := auxIntToInt32(v.AuxInt)
3887 sym1 := auxToSym(v.Aux)
3888 if v_0.Op != OpMIPS64MOVVaddr {
3889 break
3890 }
3891 off2 := auxIntToInt32(v_0.AuxInt)
3892 sym2 := auxToSym(v_0.Aux)
3893 ptr := v_0.Args[0]
3894 mem := v_1
3895 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
3896 break
3897 }
3898 v.reset(OpMIPS64MOVWload)
3899 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
3900 v.Aux = symToAux(mergeSym(sym1, sym2))
3901 v.AddArg2(ptr, mem)
3902 return true
3903 }
3904 return false
3905 }
3906 func rewriteValueMIPS64_OpMIPS64MOVWreg(v *Value) bool {
3907 v_0 := v.Args[0]
3908
3909
3910 for {
3911 x := v_0
3912 if x.Op != OpMIPS64MOVBload {
3913 break
3914 }
3915 v.reset(OpMIPS64MOVVreg)
3916 v.AddArg(x)
3917 return true
3918 }
3919
3920
3921 for {
3922 x := v_0
3923 if x.Op != OpMIPS64MOVBUload {
3924 break
3925 }
3926 v.reset(OpMIPS64MOVVreg)
3927 v.AddArg(x)
3928 return true
3929 }
3930
3931
3932 for {
3933 x := v_0
3934 if x.Op != OpMIPS64MOVHload {
3935 break
3936 }
3937 v.reset(OpMIPS64MOVVreg)
3938 v.AddArg(x)
3939 return true
3940 }
3941
3942
3943 for {
3944 x := v_0
3945 if x.Op != OpMIPS64MOVHUload {
3946 break
3947 }
3948 v.reset(OpMIPS64MOVVreg)
3949 v.AddArg(x)
3950 return true
3951 }
3952
3953
3954 for {
3955 x := v_0
3956 if x.Op != OpMIPS64MOVWload {
3957 break
3958 }
3959 v.reset(OpMIPS64MOVVreg)
3960 v.AddArg(x)
3961 return true
3962 }
3963
3964
3965 for {
3966 x := v_0
3967 if x.Op != OpMIPS64MOVBreg {
3968 break
3969 }
3970 v.reset(OpMIPS64MOVVreg)
3971 v.AddArg(x)
3972 return true
3973 }
3974
3975
3976 for {
3977 x := v_0
3978 if x.Op != OpMIPS64MOVBUreg {
3979 break
3980 }
3981 v.reset(OpMIPS64MOVVreg)
3982 v.AddArg(x)
3983 return true
3984 }
3985
3986
3987 for {
3988 x := v_0
3989 if x.Op != OpMIPS64MOVHreg {
3990 break
3991 }
3992 v.reset(OpMIPS64MOVVreg)
3993 v.AddArg(x)
3994 return true
3995 }
3996
3997
3998 for {
3999 x := v_0
4000 if x.Op != OpMIPS64MOVWreg {
4001 break
4002 }
4003 v.reset(OpMIPS64MOVVreg)
4004 v.AddArg(x)
4005 return true
4006 }
4007
4008
4009 for {
4010 if v_0.Op != OpMIPS64MOVVconst {
4011 break
4012 }
4013 c := auxIntToInt64(v_0.AuxInt)
4014 v.reset(OpMIPS64MOVVconst)
4015 v.AuxInt = int64ToAuxInt(int64(int32(c)))
4016 return true
4017 }
4018 return false
4019 }
4020 func rewriteValueMIPS64_OpMIPS64MOVWstore(v *Value) bool {
4021 v_2 := v.Args[2]
4022 v_1 := v.Args[1]
4023 v_0 := v.Args[0]
4024
4025
4026
4027 for {
4028 off1 := auxIntToInt32(v.AuxInt)
4029 sym := auxToSym(v.Aux)
4030 if v_0.Op != OpMIPS64ADDVconst {
4031 break
4032 }
4033 off2 := auxIntToInt64(v_0.AuxInt)
4034 ptr := v_0.Args[0]
4035 val := v_1
4036 mem := v_2
4037 if !(is32Bit(int64(off1) + off2)) {
4038 break
4039 }
4040 v.reset(OpMIPS64MOVWstore)
4041 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4042 v.Aux = symToAux(sym)
4043 v.AddArg3(ptr, val, mem)
4044 return true
4045 }
4046
4047
4048
4049 for {
4050 off1 := auxIntToInt32(v.AuxInt)
4051 sym1 := auxToSym(v.Aux)
4052 if v_0.Op != OpMIPS64MOVVaddr {
4053 break
4054 }
4055 off2 := auxIntToInt32(v_0.AuxInt)
4056 sym2 := auxToSym(v_0.Aux)
4057 ptr := v_0.Args[0]
4058 val := v_1
4059 mem := v_2
4060 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4061 break
4062 }
4063 v.reset(OpMIPS64MOVWstore)
4064 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4065 v.Aux = symToAux(mergeSym(sym1, sym2))
4066 v.AddArg3(ptr, val, mem)
4067 return true
4068 }
4069
4070
4071 for {
4072 off := auxIntToInt32(v.AuxInt)
4073 sym := auxToSym(v.Aux)
4074 ptr := v_0
4075 if v_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_1.AuxInt) != 0 {
4076 break
4077 }
4078 mem := v_2
4079 v.reset(OpMIPS64MOVWstorezero)
4080 v.AuxInt = int32ToAuxInt(off)
4081 v.Aux = symToAux(sym)
4082 v.AddArg2(ptr, mem)
4083 return true
4084 }
4085
4086
4087 for {
4088 off := auxIntToInt32(v.AuxInt)
4089 sym := auxToSym(v.Aux)
4090 ptr := v_0
4091 if v_1.Op != OpMIPS64MOVWreg {
4092 break
4093 }
4094 x := v_1.Args[0]
4095 mem := v_2
4096 v.reset(OpMIPS64MOVWstore)
4097 v.AuxInt = int32ToAuxInt(off)
4098 v.Aux = symToAux(sym)
4099 v.AddArg3(ptr, x, mem)
4100 return true
4101 }
4102
4103
4104 for {
4105 off := auxIntToInt32(v.AuxInt)
4106 sym := auxToSym(v.Aux)
4107 ptr := v_0
4108 if v_1.Op != OpMIPS64MOVWUreg {
4109 break
4110 }
4111 x := v_1.Args[0]
4112 mem := v_2
4113 v.reset(OpMIPS64MOVWstore)
4114 v.AuxInt = int32ToAuxInt(off)
4115 v.Aux = symToAux(sym)
4116 v.AddArg3(ptr, x, mem)
4117 return true
4118 }
4119 return false
4120 }
4121 func rewriteValueMIPS64_OpMIPS64MOVWstorezero(v *Value) bool {
4122 v_1 := v.Args[1]
4123 v_0 := v.Args[0]
4124
4125
4126
4127 for {
4128 off1 := auxIntToInt32(v.AuxInt)
4129 sym := auxToSym(v.Aux)
4130 if v_0.Op != OpMIPS64ADDVconst {
4131 break
4132 }
4133 off2 := auxIntToInt64(v_0.AuxInt)
4134 ptr := v_0.Args[0]
4135 mem := v_1
4136 if !(is32Bit(int64(off1) + off2)) {
4137 break
4138 }
4139 v.reset(OpMIPS64MOVWstorezero)
4140 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4141 v.Aux = symToAux(sym)
4142 v.AddArg2(ptr, mem)
4143 return true
4144 }
4145
4146
4147
4148 for {
4149 off1 := auxIntToInt32(v.AuxInt)
4150 sym1 := auxToSym(v.Aux)
4151 if v_0.Op != OpMIPS64MOVVaddr {
4152 break
4153 }
4154 off2 := auxIntToInt32(v_0.AuxInt)
4155 sym2 := auxToSym(v_0.Aux)
4156 ptr := v_0.Args[0]
4157 mem := v_1
4158 if !(canMergeSym(sym1, sym2) && is32Bit(int64(off1)+int64(off2))) {
4159 break
4160 }
4161 v.reset(OpMIPS64MOVWstorezero)
4162 v.AuxInt = int32ToAuxInt(off1 + int32(off2))
4163 v.Aux = symToAux(mergeSym(sym1, sym2))
4164 v.AddArg2(ptr, mem)
4165 return true
4166 }
4167 return false
4168 }
4169 func rewriteValueMIPS64_OpMIPS64NEGV(v *Value) bool {
4170 v_0 := v.Args[0]
4171
4172
4173 for {
4174 if v_0.Op != OpMIPS64MOVVconst {
4175 break
4176 }
4177 c := auxIntToInt64(v_0.AuxInt)
4178 v.reset(OpMIPS64MOVVconst)
4179 v.AuxInt = int64ToAuxInt(-c)
4180 return true
4181 }
4182 return false
4183 }
4184 func rewriteValueMIPS64_OpMIPS64NOR(v *Value) bool {
4185 v_1 := v.Args[1]
4186 v_0 := v.Args[0]
4187
4188
4189
4190 for {
4191 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4192 x := v_0
4193 if v_1.Op != OpMIPS64MOVVconst {
4194 continue
4195 }
4196 c := auxIntToInt64(v_1.AuxInt)
4197 if !(is32Bit(c)) {
4198 continue
4199 }
4200 v.reset(OpMIPS64NORconst)
4201 v.AuxInt = int64ToAuxInt(c)
4202 v.AddArg(x)
4203 return true
4204 }
4205 break
4206 }
4207 return false
4208 }
4209 func rewriteValueMIPS64_OpMIPS64NORconst(v *Value) bool {
4210 v_0 := v.Args[0]
4211
4212
4213 for {
4214 c := auxIntToInt64(v.AuxInt)
4215 if v_0.Op != OpMIPS64MOVVconst {
4216 break
4217 }
4218 d := auxIntToInt64(v_0.AuxInt)
4219 v.reset(OpMIPS64MOVVconst)
4220 v.AuxInt = int64ToAuxInt(^(c | d))
4221 return true
4222 }
4223 return false
4224 }
4225 func rewriteValueMIPS64_OpMIPS64OR(v *Value) bool {
4226 v_1 := v.Args[1]
4227 v_0 := v.Args[0]
4228
4229
4230
4231 for {
4232 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4233 x := v_0
4234 if v_1.Op != OpMIPS64MOVVconst {
4235 continue
4236 }
4237 c := auxIntToInt64(v_1.AuxInt)
4238 if !(is32Bit(c)) {
4239 continue
4240 }
4241 v.reset(OpMIPS64ORconst)
4242 v.AuxInt = int64ToAuxInt(c)
4243 v.AddArg(x)
4244 return true
4245 }
4246 break
4247 }
4248
4249
4250 for {
4251 x := v_0
4252 if x != v_1 {
4253 break
4254 }
4255 v.copyOf(x)
4256 return true
4257 }
4258 return false
4259 }
4260 func rewriteValueMIPS64_OpMIPS64ORconst(v *Value) bool {
4261 v_0 := v.Args[0]
4262
4263
4264 for {
4265 if auxIntToInt64(v.AuxInt) != 0 {
4266 break
4267 }
4268 x := v_0
4269 v.copyOf(x)
4270 return true
4271 }
4272
4273
4274 for {
4275 if auxIntToInt64(v.AuxInt) != -1 {
4276 break
4277 }
4278 v.reset(OpMIPS64MOVVconst)
4279 v.AuxInt = int64ToAuxInt(-1)
4280 return true
4281 }
4282
4283
4284 for {
4285 c := auxIntToInt64(v.AuxInt)
4286 if v_0.Op != OpMIPS64MOVVconst {
4287 break
4288 }
4289 d := auxIntToInt64(v_0.AuxInt)
4290 v.reset(OpMIPS64MOVVconst)
4291 v.AuxInt = int64ToAuxInt(c | d)
4292 return true
4293 }
4294
4295
4296
4297 for {
4298 c := auxIntToInt64(v.AuxInt)
4299 if v_0.Op != OpMIPS64ORconst {
4300 break
4301 }
4302 d := auxIntToInt64(v_0.AuxInt)
4303 x := v_0.Args[0]
4304 if !(is32Bit(c | d)) {
4305 break
4306 }
4307 v.reset(OpMIPS64ORconst)
4308 v.AuxInt = int64ToAuxInt(c | d)
4309 v.AddArg(x)
4310 return true
4311 }
4312 return false
4313 }
4314 func rewriteValueMIPS64_OpMIPS64SGT(v *Value) bool {
4315 v_1 := v.Args[1]
4316 v_0 := v.Args[0]
4317
4318
4319
4320 for {
4321 if v_0.Op != OpMIPS64MOVVconst {
4322 break
4323 }
4324 c := auxIntToInt64(v_0.AuxInt)
4325 x := v_1
4326 if !(is32Bit(c)) {
4327 break
4328 }
4329 v.reset(OpMIPS64SGTconst)
4330 v.AuxInt = int64ToAuxInt(c)
4331 v.AddArg(x)
4332 return true
4333 }
4334 return false
4335 }
4336 func rewriteValueMIPS64_OpMIPS64SGTU(v *Value) bool {
4337 v_1 := v.Args[1]
4338 v_0 := v.Args[0]
4339
4340
4341
4342 for {
4343 if v_0.Op != OpMIPS64MOVVconst {
4344 break
4345 }
4346 c := auxIntToInt64(v_0.AuxInt)
4347 x := v_1
4348 if !(is32Bit(c)) {
4349 break
4350 }
4351 v.reset(OpMIPS64SGTUconst)
4352 v.AuxInt = int64ToAuxInt(c)
4353 v.AddArg(x)
4354 return true
4355 }
4356 return false
4357 }
4358 func rewriteValueMIPS64_OpMIPS64SGTUconst(v *Value) bool {
4359 v_0 := v.Args[0]
4360
4361
4362
4363 for {
4364 c := auxIntToInt64(v.AuxInt)
4365 if v_0.Op != OpMIPS64MOVVconst {
4366 break
4367 }
4368 d := auxIntToInt64(v_0.AuxInt)
4369 if !(uint64(c) > uint64(d)) {
4370 break
4371 }
4372 v.reset(OpMIPS64MOVVconst)
4373 v.AuxInt = int64ToAuxInt(1)
4374 return true
4375 }
4376
4377
4378
4379 for {
4380 c := auxIntToInt64(v.AuxInt)
4381 if v_0.Op != OpMIPS64MOVVconst {
4382 break
4383 }
4384 d := auxIntToInt64(v_0.AuxInt)
4385 if !(uint64(c) <= uint64(d)) {
4386 break
4387 }
4388 v.reset(OpMIPS64MOVVconst)
4389 v.AuxInt = int64ToAuxInt(0)
4390 return true
4391 }
4392
4393
4394
4395 for {
4396 c := auxIntToInt64(v.AuxInt)
4397 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < uint64(c)) {
4398 break
4399 }
4400 v.reset(OpMIPS64MOVVconst)
4401 v.AuxInt = int64ToAuxInt(1)
4402 return true
4403 }
4404
4405
4406
4407 for {
4408 c := auxIntToInt64(v.AuxInt)
4409 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < uint64(c)) {
4410 break
4411 }
4412 v.reset(OpMIPS64MOVVconst)
4413 v.AuxInt = int64ToAuxInt(1)
4414 return true
4415 }
4416
4417
4418
4419 for {
4420 c := auxIntToInt64(v.AuxInt)
4421 if v_0.Op != OpMIPS64ANDconst {
4422 break
4423 }
4424 m := auxIntToInt64(v_0.AuxInt)
4425 if !(uint64(m) < uint64(c)) {
4426 break
4427 }
4428 v.reset(OpMIPS64MOVVconst)
4429 v.AuxInt = int64ToAuxInt(1)
4430 return true
4431 }
4432
4433
4434
4435 for {
4436 c := auxIntToInt64(v.AuxInt)
4437 if v_0.Op != OpMIPS64SRLVconst {
4438 break
4439 }
4440 d := auxIntToInt64(v_0.AuxInt)
4441 if !(0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4442 break
4443 }
4444 v.reset(OpMIPS64MOVVconst)
4445 v.AuxInt = int64ToAuxInt(1)
4446 return true
4447 }
4448 return false
4449 }
4450 func rewriteValueMIPS64_OpMIPS64SGTconst(v *Value) bool {
4451 v_0 := v.Args[0]
4452
4453
4454
4455 for {
4456 c := auxIntToInt64(v.AuxInt)
4457 if v_0.Op != OpMIPS64MOVVconst {
4458 break
4459 }
4460 d := auxIntToInt64(v_0.AuxInt)
4461 if !(c > d) {
4462 break
4463 }
4464 v.reset(OpMIPS64MOVVconst)
4465 v.AuxInt = int64ToAuxInt(1)
4466 return true
4467 }
4468
4469
4470
4471 for {
4472 c := auxIntToInt64(v.AuxInt)
4473 if v_0.Op != OpMIPS64MOVVconst {
4474 break
4475 }
4476 d := auxIntToInt64(v_0.AuxInt)
4477 if !(c <= d) {
4478 break
4479 }
4480 v.reset(OpMIPS64MOVVconst)
4481 v.AuxInt = int64ToAuxInt(0)
4482 return true
4483 }
4484
4485
4486
4487 for {
4488 c := auxIntToInt64(v.AuxInt)
4489 if v_0.Op != OpMIPS64MOVBreg || !(0x7f < c) {
4490 break
4491 }
4492 v.reset(OpMIPS64MOVVconst)
4493 v.AuxInt = int64ToAuxInt(1)
4494 return true
4495 }
4496
4497
4498
4499 for {
4500 c := auxIntToInt64(v.AuxInt)
4501 if v_0.Op != OpMIPS64MOVBreg || !(c <= -0x80) {
4502 break
4503 }
4504 v.reset(OpMIPS64MOVVconst)
4505 v.AuxInt = int64ToAuxInt(0)
4506 return true
4507 }
4508
4509
4510
4511 for {
4512 c := auxIntToInt64(v.AuxInt)
4513 if v_0.Op != OpMIPS64MOVBUreg || !(0xff < c) {
4514 break
4515 }
4516 v.reset(OpMIPS64MOVVconst)
4517 v.AuxInt = int64ToAuxInt(1)
4518 return true
4519 }
4520
4521
4522
4523 for {
4524 c := auxIntToInt64(v.AuxInt)
4525 if v_0.Op != OpMIPS64MOVBUreg || !(c < 0) {
4526 break
4527 }
4528 v.reset(OpMIPS64MOVVconst)
4529 v.AuxInt = int64ToAuxInt(0)
4530 return true
4531 }
4532
4533
4534
4535 for {
4536 c := auxIntToInt64(v.AuxInt)
4537 if v_0.Op != OpMIPS64MOVHreg || !(0x7fff < c) {
4538 break
4539 }
4540 v.reset(OpMIPS64MOVVconst)
4541 v.AuxInt = int64ToAuxInt(1)
4542 return true
4543 }
4544
4545
4546
4547 for {
4548 c := auxIntToInt64(v.AuxInt)
4549 if v_0.Op != OpMIPS64MOVHreg || !(c <= -0x8000) {
4550 break
4551 }
4552 v.reset(OpMIPS64MOVVconst)
4553 v.AuxInt = int64ToAuxInt(0)
4554 return true
4555 }
4556
4557
4558
4559 for {
4560 c := auxIntToInt64(v.AuxInt)
4561 if v_0.Op != OpMIPS64MOVHUreg || !(0xffff < c) {
4562 break
4563 }
4564 v.reset(OpMIPS64MOVVconst)
4565 v.AuxInt = int64ToAuxInt(1)
4566 return true
4567 }
4568
4569
4570
4571 for {
4572 c := auxIntToInt64(v.AuxInt)
4573 if v_0.Op != OpMIPS64MOVHUreg || !(c < 0) {
4574 break
4575 }
4576 v.reset(OpMIPS64MOVVconst)
4577 v.AuxInt = int64ToAuxInt(0)
4578 return true
4579 }
4580
4581
4582
4583 for {
4584 c := auxIntToInt64(v.AuxInt)
4585 if v_0.Op != OpMIPS64MOVWUreg || !(c < 0) {
4586 break
4587 }
4588 v.reset(OpMIPS64MOVVconst)
4589 v.AuxInt = int64ToAuxInt(0)
4590 return true
4591 }
4592
4593
4594
4595 for {
4596 c := auxIntToInt64(v.AuxInt)
4597 if v_0.Op != OpMIPS64ANDconst {
4598 break
4599 }
4600 m := auxIntToInt64(v_0.AuxInt)
4601 if !(0 <= m && m < c) {
4602 break
4603 }
4604 v.reset(OpMIPS64MOVVconst)
4605 v.AuxInt = int64ToAuxInt(1)
4606 return true
4607 }
4608
4609
4610
4611 for {
4612 c := auxIntToInt64(v.AuxInt)
4613 if v_0.Op != OpMIPS64SRLVconst {
4614 break
4615 }
4616 d := auxIntToInt64(v_0.AuxInt)
4617 if !(0 <= c && 0 < d && d <= 63 && 0xffffffffffffffff>>uint64(d) < uint64(c)) {
4618 break
4619 }
4620 v.reset(OpMIPS64MOVVconst)
4621 v.AuxInt = int64ToAuxInt(1)
4622 return true
4623 }
4624 return false
4625 }
4626 func rewriteValueMIPS64_OpMIPS64SLLV(v *Value) bool {
4627 v_1 := v.Args[1]
4628 v_0 := v.Args[0]
4629
4630
4631
4632 for {
4633 if v_1.Op != OpMIPS64MOVVconst {
4634 break
4635 }
4636 c := auxIntToInt64(v_1.AuxInt)
4637 if !(uint64(c) >= 64) {
4638 break
4639 }
4640 v.reset(OpMIPS64MOVVconst)
4641 v.AuxInt = int64ToAuxInt(0)
4642 return true
4643 }
4644
4645
4646 for {
4647 x := v_0
4648 if v_1.Op != OpMIPS64MOVVconst {
4649 break
4650 }
4651 c := auxIntToInt64(v_1.AuxInt)
4652 v.reset(OpMIPS64SLLVconst)
4653 v.AuxInt = int64ToAuxInt(c)
4654 v.AddArg(x)
4655 return true
4656 }
4657 return false
4658 }
4659 func rewriteValueMIPS64_OpMIPS64SLLVconst(v *Value) bool {
4660 v_0 := v.Args[0]
4661
4662
4663 for {
4664 c := auxIntToInt64(v.AuxInt)
4665 if v_0.Op != OpMIPS64MOVVconst {
4666 break
4667 }
4668 d := auxIntToInt64(v_0.AuxInt)
4669 v.reset(OpMIPS64MOVVconst)
4670 v.AuxInt = int64ToAuxInt(d << uint64(c))
4671 return true
4672 }
4673 return false
4674 }
4675 func rewriteValueMIPS64_OpMIPS64SRAV(v *Value) bool {
4676 v_1 := v.Args[1]
4677 v_0 := v.Args[0]
4678
4679
4680
4681 for {
4682 x := v_0
4683 if v_1.Op != OpMIPS64MOVVconst {
4684 break
4685 }
4686 c := auxIntToInt64(v_1.AuxInt)
4687 if !(uint64(c) >= 64) {
4688 break
4689 }
4690 v.reset(OpMIPS64SRAVconst)
4691 v.AuxInt = int64ToAuxInt(63)
4692 v.AddArg(x)
4693 return true
4694 }
4695
4696
4697 for {
4698 x := v_0
4699 if v_1.Op != OpMIPS64MOVVconst {
4700 break
4701 }
4702 c := auxIntToInt64(v_1.AuxInt)
4703 v.reset(OpMIPS64SRAVconst)
4704 v.AuxInt = int64ToAuxInt(c)
4705 v.AddArg(x)
4706 return true
4707 }
4708 return false
4709 }
4710 func rewriteValueMIPS64_OpMIPS64SRAVconst(v *Value) bool {
4711 v_0 := v.Args[0]
4712
4713
4714 for {
4715 c := auxIntToInt64(v.AuxInt)
4716 if v_0.Op != OpMIPS64MOVVconst {
4717 break
4718 }
4719 d := auxIntToInt64(v_0.AuxInt)
4720 v.reset(OpMIPS64MOVVconst)
4721 v.AuxInt = int64ToAuxInt(d >> uint64(c))
4722 return true
4723 }
4724 return false
4725 }
4726 func rewriteValueMIPS64_OpMIPS64SRLV(v *Value) bool {
4727 v_1 := v.Args[1]
4728 v_0 := v.Args[0]
4729
4730
4731
4732 for {
4733 if v_1.Op != OpMIPS64MOVVconst {
4734 break
4735 }
4736 c := auxIntToInt64(v_1.AuxInt)
4737 if !(uint64(c) >= 64) {
4738 break
4739 }
4740 v.reset(OpMIPS64MOVVconst)
4741 v.AuxInt = int64ToAuxInt(0)
4742 return true
4743 }
4744
4745
4746 for {
4747 x := v_0
4748 if v_1.Op != OpMIPS64MOVVconst {
4749 break
4750 }
4751 c := auxIntToInt64(v_1.AuxInt)
4752 v.reset(OpMIPS64SRLVconst)
4753 v.AuxInt = int64ToAuxInt(c)
4754 v.AddArg(x)
4755 return true
4756 }
4757 return false
4758 }
4759 func rewriteValueMIPS64_OpMIPS64SRLVconst(v *Value) bool {
4760 v_0 := v.Args[0]
4761
4762
4763 for {
4764 c := auxIntToInt64(v.AuxInt)
4765 if v_0.Op != OpMIPS64MOVVconst {
4766 break
4767 }
4768 d := auxIntToInt64(v_0.AuxInt)
4769 v.reset(OpMIPS64MOVVconst)
4770 v.AuxInt = int64ToAuxInt(int64(uint64(d) >> uint64(c)))
4771 return true
4772 }
4773 return false
4774 }
4775 func rewriteValueMIPS64_OpMIPS64SUBV(v *Value) bool {
4776 v_1 := v.Args[1]
4777 v_0 := v.Args[0]
4778
4779
4780
4781 for {
4782 x := v_0
4783 if v_1.Op != OpMIPS64MOVVconst {
4784 break
4785 }
4786 c := auxIntToInt64(v_1.AuxInt)
4787 if !(is32Bit(c)) {
4788 break
4789 }
4790 v.reset(OpMIPS64SUBVconst)
4791 v.AuxInt = int64ToAuxInt(c)
4792 v.AddArg(x)
4793 return true
4794 }
4795
4796
4797 for {
4798 x := v_0
4799 if x != v_1 {
4800 break
4801 }
4802 v.reset(OpMIPS64MOVVconst)
4803 v.AuxInt = int64ToAuxInt(0)
4804 return true
4805 }
4806
4807
4808 for {
4809 if v_0.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0.AuxInt) != 0 {
4810 break
4811 }
4812 x := v_1
4813 v.reset(OpMIPS64NEGV)
4814 v.AddArg(x)
4815 return true
4816 }
4817 return false
4818 }
4819 func rewriteValueMIPS64_OpMIPS64SUBVconst(v *Value) bool {
4820 v_0 := v.Args[0]
4821
4822
4823 for {
4824 if auxIntToInt64(v.AuxInt) != 0 {
4825 break
4826 }
4827 x := v_0
4828 v.copyOf(x)
4829 return true
4830 }
4831
4832
4833 for {
4834 c := auxIntToInt64(v.AuxInt)
4835 if v_0.Op != OpMIPS64MOVVconst {
4836 break
4837 }
4838 d := auxIntToInt64(v_0.AuxInt)
4839 v.reset(OpMIPS64MOVVconst)
4840 v.AuxInt = int64ToAuxInt(d - c)
4841 return true
4842 }
4843
4844
4845
4846 for {
4847 c := auxIntToInt64(v.AuxInt)
4848 if v_0.Op != OpMIPS64SUBVconst {
4849 break
4850 }
4851 d := auxIntToInt64(v_0.AuxInt)
4852 x := v_0.Args[0]
4853 if !(is32Bit(-c - d)) {
4854 break
4855 }
4856 v.reset(OpMIPS64ADDVconst)
4857 v.AuxInt = int64ToAuxInt(-c - d)
4858 v.AddArg(x)
4859 return true
4860 }
4861
4862
4863
4864 for {
4865 c := auxIntToInt64(v.AuxInt)
4866 if v_0.Op != OpMIPS64ADDVconst {
4867 break
4868 }
4869 d := auxIntToInt64(v_0.AuxInt)
4870 x := v_0.Args[0]
4871 if !(is32Bit(-c + d)) {
4872 break
4873 }
4874 v.reset(OpMIPS64ADDVconst)
4875 v.AuxInt = int64ToAuxInt(-c + d)
4876 v.AddArg(x)
4877 return true
4878 }
4879 return false
4880 }
4881 func rewriteValueMIPS64_OpMIPS64XOR(v *Value) bool {
4882 v_1 := v.Args[1]
4883 v_0 := v.Args[0]
4884
4885
4886
4887 for {
4888 for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 {
4889 x := v_0
4890 if v_1.Op != OpMIPS64MOVVconst {
4891 continue
4892 }
4893 c := auxIntToInt64(v_1.AuxInt)
4894 if !(is32Bit(c)) {
4895 continue
4896 }
4897 v.reset(OpMIPS64XORconst)
4898 v.AuxInt = int64ToAuxInt(c)
4899 v.AddArg(x)
4900 return true
4901 }
4902 break
4903 }
4904
4905
4906 for {
4907 x := v_0
4908 if x != v_1 {
4909 break
4910 }
4911 v.reset(OpMIPS64MOVVconst)
4912 v.AuxInt = int64ToAuxInt(0)
4913 return true
4914 }
4915 return false
4916 }
4917 func rewriteValueMIPS64_OpMIPS64XORconst(v *Value) bool {
4918 v_0 := v.Args[0]
4919
4920
4921 for {
4922 if auxIntToInt64(v.AuxInt) != 0 {
4923 break
4924 }
4925 x := v_0
4926 v.copyOf(x)
4927 return true
4928 }
4929
4930
4931 for {
4932 if auxIntToInt64(v.AuxInt) != -1 {
4933 break
4934 }
4935 x := v_0
4936 v.reset(OpMIPS64NORconst)
4937 v.AuxInt = int64ToAuxInt(0)
4938 v.AddArg(x)
4939 return true
4940 }
4941
4942
4943 for {
4944 c := auxIntToInt64(v.AuxInt)
4945 if v_0.Op != OpMIPS64MOVVconst {
4946 break
4947 }
4948 d := auxIntToInt64(v_0.AuxInt)
4949 v.reset(OpMIPS64MOVVconst)
4950 v.AuxInt = int64ToAuxInt(c ^ d)
4951 return true
4952 }
4953
4954
4955
4956 for {
4957 c := auxIntToInt64(v.AuxInt)
4958 if v_0.Op != OpMIPS64XORconst {
4959 break
4960 }
4961 d := auxIntToInt64(v_0.AuxInt)
4962 x := v_0.Args[0]
4963 if !(is32Bit(c ^ d)) {
4964 break
4965 }
4966 v.reset(OpMIPS64XORconst)
4967 v.AuxInt = int64ToAuxInt(c ^ d)
4968 v.AddArg(x)
4969 return true
4970 }
4971 return false
4972 }
4973 func rewriteValueMIPS64_OpMod16(v *Value) bool {
4974 v_1 := v.Args[1]
4975 v_0 := v.Args[0]
4976 b := v.Block
4977 typ := &b.Func.Config.Types
4978
4979
4980 for {
4981 x := v_0
4982 y := v_1
4983 v.reset(OpSelect0)
4984 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
4985 v1 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
4986 v1.AddArg(x)
4987 v2 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
4988 v2.AddArg(y)
4989 v0.AddArg2(v1, v2)
4990 v.AddArg(v0)
4991 return true
4992 }
4993 }
4994 func rewriteValueMIPS64_OpMod16u(v *Value) bool {
4995 v_1 := v.Args[1]
4996 v_0 := v.Args[0]
4997 b := v.Block
4998 typ := &b.Func.Config.Types
4999
5000
5001 for {
5002 x := v_0
5003 y := v_1
5004 v.reset(OpSelect0)
5005 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5006 v1 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5007 v1.AddArg(x)
5008 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5009 v2.AddArg(y)
5010 v0.AddArg2(v1, v2)
5011 v.AddArg(v0)
5012 return true
5013 }
5014 }
5015 func rewriteValueMIPS64_OpMod32(v *Value) bool {
5016 v_1 := v.Args[1]
5017 v_0 := v.Args[0]
5018 b := v.Block
5019 typ := &b.Func.Config.Types
5020
5021
5022 for {
5023 x := v_0
5024 y := v_1
5025 v.reset(OpSelect0)
5026 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5027 v1 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5028 v1.AddArg(x)
5029 v2 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
5030 v2.AddArg(y)
5031 v0.AddArg2(v1, v2)
5032 v.AddArg(v0)
5033 return true
5034 }
5035 }
5036 func rewriteValueMIPS64_OpMod32u(v *Value) bool {
5037 v_1 := v.Args[1]
5038 v_0 := v.Args[0]
5039 b := v.Block
5040 typ := &b.Func.Config.Types
5041
5042
5043 for {
5044 x := v_0
5045 y := v_1
5046 v.reset(OpSelect0)
5047 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5048 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5049 v1.AddArg(x)
5050 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5051 v2.AddArg(y)
5052 v0.AddArg2(v1, v2)
5053 v.AddArg(v0)
5054 return true
5055 }
5056 }
5057 func rewriteValueMIPS64_OpMod64(v *Value) bool {
5058 v_1 := v.Args[1]
5059 v_0 := v.Args[0]
5060 b := v.Block
5061 typ := &b.Func.Config.Types
5062
5063
5064 for {
5065 x := v_0
5066 y := v_1
5067 v.reset(OpSelect0)
5068 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5069 v0.AddArg2(x, y)
5070 v.AddArg(v0)
5071 return true
5072 }
5073 }
5074 func rewriteValueMIPS64_OpMod64u(v *Value) bool {
5075 v_1 := v.Args[1]
5076 v_0 := v.Args[0]
5077 b := v.Block
5078 typ := &b.Func.Config.Types
5079
5080
5081 for {
5082 x := v_0
5083 y := v_1
5084 v.reset(OpSelect0)
5085 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5086 v0.AddArg2(x, y)
5087 v.AddArg(v0)
5088 return true
5089 }
5090 }
5091 func rewriteValueMIPS64_OpMod8(v *Value) bool {
5092 v_1 := v.Args[1]
5093 v_0 := v.Args[0]
5094 b := v.Block
5095 typ := &b.Func.Config.Types
5096
5097
5098 for {
5099 x := v_0
5100 y := v_1
5101 v.reset(OpSelect0)
5102 v0 := b.NewValue0(v.Pos, OpMIPS64DIVV, types.NewTuple(typ.Int64, typ.Int64))
5103 v1 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5104 v1.AddArg(x)
5105 v2 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
5106 v2.AddArg(y)
5107 v0.AddArg2(v1, v2)
5108 v.AddArg(v0)
5109 return true
5110 }
5111 }
5112 func rewriteValueMIPS64_OpMod8u(v *Value) bool {
5113 v_1 := v.Args[1]
5114 v_0 := v.Args[0]
5115 b := v.Block
5116 typ := &b.Func.Config.Types
5117
5118
5119 for {
5120 x := v_0
5121 y := v_1
5122 v.reset(OpSelect0)
5123 v0 := b.NewValue0(v.Pos, OpMIPS64DIVVU, types.NewTuple(typ.UInt64, typ.UInt64))
5124 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5125 v1.AddArg(x)
5126 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5127 v2.AddArg(y)
5128 v0.AddArg2(v1, v2)
5129 v.AddArg(v0)
5130 return true
5131 }
5132 }
5133 func rewriteValueMIPS64_OpMove(v *Value) bool {
5134 v_2 := v.Args[2]
5135 v_1 := v.Args[1]
5136 v_0 := v.Args[0]
5137 b := v.Block
5138 config := b.Func.Config
5139 typ := &b.Func.Config.Types
5140
5141
5142 for {
5143 if auxIntToInt64(v.AuxInt) != 0 {
5144 break
5145 }
5146 mem := v_2
5147 v.copyOf(mem)
5148 return true
5149 }
5150
5151
5152 for {
5153 if auxIntToInt64(v.AuxInt) != 1 {
5154 break
5155 }
5156 dst := v_0
5157 src := v_1
5158 mem := v_2
5159 v.reset(OpMIPS64MOVBstore)
5160 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5161 v0.AddArg2(src, mem)
5162 v.AddArg3(dst, v0, mem)
5163 return true
5164 }
5165
5166
5167
5168 for {
5169 if auxIntToInt64(v.AuxInt) != 2 {
5170 break
5171 }
5172 t := auxToType(v.Aux)
5173 dst := v_0
5174 src := v_1
5175 mem := v_2
5176 if !(t.Alignment()%2 == 0) {
5177 break
5178 }
5179 v.reset(OpMIPS64MOVHstore)
5180 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5181 v0.AddArg2(src, mem)
5182 v.AddArg3(dst, v0, mem)
5183 return true
5184 }
5185
5186
5187 for {
5188 if auxIntToInt64(v.AuxInt) != 2 {
5189 break
5190 }
5191 dst := v_0
5192 src := v_1
5193 mem := v_2
5194 v.reset(OpMIPS64MOVBstore)
5195 v.AuxInt = int32ToAuxInt(1)
5196 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5197 v0.AuxInt = int32ToAuxInt(1)
5198 v0.AddArg2(src, mem)
5199 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5200 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5201 v2.AddArg2(src, mem)
5202 v1.AddArg3(dst, v2, mem)
5203 v.AddArg3(dst, v0, v1)
5204 return true
5205 }
5206
5207
5208
5209 for {
5210 if auxIntToInt64(v.AuxInt) != 4 {
5211 break
5212 }
5213 t := auxToType(v.Aux)
5214 dst := v_0
5215 src := v_1
5216 mem := v_2
5217 if !(t.Alignment()%4 == 0) {
5218 break
5219 }
5220 v.reset(OpMIPS64MOVWstore)
5221 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5222 v0.AddArg2(src, mem)
5223 v.AddArg3(dst, v0, mem)
5224 return true
5225 }
5226
5227
5228
5229 for {
5230 if auxIntToInt64(v.AuxInt) != 4 {
5231 break
5232 }
5233 t := auxToType(v.Aux)
5234 dst := v_0
5235 src := v_1
5236 mem := v_2
5237 if !(t.Alignment()%2 == 0) {
5238 break
5239 }
5240 v.reset(OpMIPS64MOVHstore)
5241 v.AuxInt = int32ToAuxInt(2)
5242 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5243 v0.AuxInt = int32ToAuxInt(2)
5244 v0.AddArg2(src, mem)
5245 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5246 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5247 v2.AddArg2(src, mem)
5248 v1.AddArg3(dst, v2, mem)
5249 v.AddArg3(dst, v0, v1)
5250 return true
5251 }
5252
5253
5254 for {
5255 if auxIntToInt64(v.AuxInt) != 4 {
5256 break
5257 }
5258 dst := v_0
5259 src := v_1
5260 mem := v_2
5261 v.reset(OpMIPS64MOVBstore)
5262 v.AuxInt = int32ToAuxInt(3)
5263 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5264 v0.AuxInt = int32ToAuxInt(3)
5265 v0.AddArg2(src, mem)
5266 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5267 v1.AuxInt = int32ToAuxInt(2)
5268 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5269 v2.AuxInt = int32ToAuxInt(2)
5270 v2.AddArg2(src, mem)
5271 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5272 v3.AuxInt = int32ToAuxInt(1)
5273 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5274 v4.AuxInt = int32ToAuxInt(1)
5275 v4.AddArg2(src, mem)
5276 v5 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5277 v6 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5278 v6.AddArg2(src, mem)
5279 v5.AddArg3(dst, v6, mem)
5280 v3.AddArg3(dst, v4, v5)
5281 v1.AddArg3(dst, v2, v3)
5282 v.AddArg3(dst, v0, v1)
5283 return true
5284 }
5285
5286
5287
5288 for {
5289 if auxIntToInt64(v.AuxInt) != 8 {
5290 break
5291 }
5292 t := auxToType(v.Aux)
5293 dst := v_0
5294 src := v_1
5295 mem := v_2
5296 if !(t.Alignment()%8 == 0) {
5297 break
5298 }
5299 v.reset(OpMIPS64MOVVstore)
5300 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5301 v0.AddArg2(src, mem)
5302 v.AddArg3(dst, v0, mem)
5303 return true
5304 }
5305
5306
5307
5308 for {
5309 if auxIntToInt64(v.AuxInt) != 8 {
5310 break
5311 }
5312 t := auxToType(v.Aux)
5313 dst := v_0
5314 src := v_1
5315 mem := v_2
5316 if !(t.Alignment()%4 == 0) {
5317 break
5318 }
5319 v.reset(OpMIPS64MOVWstore)
5320 v.AuxInt = int32ToAuxInt(4)
5321 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5322 v0.AuxInt = int32ToAuxInt(4)
5323 v0.AddArg2(src, mem)
5324 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5325 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5326 v2.AddArg2(src, mem)
5327 v1.AddArg3(dst, v2, mem)
5328 v.AddArg3(dst, v0, v1)
5329 return true
5330 }
5331
5332
5333
5334 for {
5335 if auxIntToInt64(v.AuxInt) != 8 {
5336 break
5337 }
5338 t := auxToType(v.Aux)
5339 dst := v_0
5340 src := v_1
5341 mem := v_2
5342 if !(t.Alignment()%2 == 0) {
5343 break
5344 }
5345 v.reset(OpMIPS64MOVHstore)
5346 v.AuxInt = int32ToAuxInt(6)
5347 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5348 v0.AuxInt = int32ToAuxInt(6)
5349 v0.AddArg2(src, mem)
5350 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5351 v1.AuxInt = int32ToAuxInt(4)
5352 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5353 v2.AuxInt = int32ToAuxInt(4)
5354 v2.AddArg2(src, mem)
5355 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5356 v3.AuxInt = int32ToAuxInt(2)
5357 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5358 v4.AuxInt = int32ToAuxInt(2)
5359 v4.AddArg2(src, mem)
5360 v5 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5361 v6 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5362 v6.AddArg2(src, mem)
5363 v5.AddArg3(dst, v6, mem)
5364 v3.AddArg3(dst, v4, v5)
5365 v1.AddArg3(dst, v2, v3)
5366 v.AddArg3(dst, v0, v1)
5367 return true
5368 }
5369
5370
5371 for {
5372 if auxIntToInt64(v.AuxInt) != 3 {
5373 break
5374 }
5375 dst := v_0
5376 src := v_1
5377 mem := v_2
5378 v.reset(OpMIPS64MOVBstore)
5379 v.AuxInt = int32ToAuxInt(2)
5380 v0 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5381 v0.AuxInt = int32ToAuxInt(2)
5382 v0.AddArg2(src, mem)
5383 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5384 v1.AuxInt = int32ToAuxInt(1)
5385 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5386 v2.AuxInt = int32ToAuxInt(1)
5387 v2.AddArg2(src, mem)
5388 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
5389 v4 := b.NewValue0(v.Pos, OpMIPS64MOVBload, typ.Int8)
5390 v4.AddArg2(src, mem)
5391 v3.AddArg3(dst, v4, mem)
5392 v1.AddArg3(dst, v2, v3)
5393 v.AddArg3(dst, v0, v1)
5394 return true
5395 }
5396
5397
5398
5399 for {
5400 if auxIntToInt64(v.AuxInt) != 6 {
5401 break
5402 }
5403 t := auxToType(v.Aux)
5404 dst := v_0
5405 src := v_1
5406 mem := v_2
5407 if !(t.Alignment()%2 == 0) {
5408 break
5409 }
5410 v.reset(OpMIPS64MOVHstore)
5411 v.AuxInt = int32ToAuxInt(4)
5412 v0 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5413 v0.AuxInt = int32ToAuxInt(4)
5414 v0.AddArg2(src, mem)
5415 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5416 v1.AuxInt = int32ToAuxInt(2)
5417 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5418 v2.AuxInt = int32ToAuxInt(2)
5419 v2.AddArg2(src, mem)
5420 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
5421 v4 := b.NewValue0(v.Pos, OpMIPS64MOVHload, typ.Int16)
5422 v4.AddArg2(src, mem)
5423 v3.AddArg3(dst, v4, mem)
5424 v1.AddArg3(dst, v2, v3)
5425 v.AddArg3(dst, v0, v1)
5426 return true
5427 }
5428
5429
5430
5431 for {
5432 if auxIntToInt64(v.AuxInt) != 12 {
5433 break
5434 }
5435 t := auxToType(v.Aux)
5436 dst := v_0
5437 src := v_1
5438 mem := v_2
5439 if !(t.Alignment()%4 == 0) {
5440 break
5441 }
5442 v.reset(OpMIPS64MOVWstore)
5443 v.AuxInt = int32ToAuxInt(8)
5444 v0 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5445 v0.AuxInt = int32ToAuxInt(8)
5446 v0.AddArg2(src, mem)
5447 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5448 v1.AuxInt = int32ToAuxInt(4)
5449 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5450 v2.AuxInt = int32ToAuxInt(4)
5451 v2.AddArg2(src, mem)
5452 v3 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
5453 v4 := b.NewValue0(v.Pos, OpMIPS64MOVWload, typ.Int32)
5454 v4.AddArg2(src, mem)
5455 v3.AddArg3(dst, v4, mem)
5456 v1.AddArg3(dst, v2, v3)
5457 v.AddArg3(dst, v0, v1)
5458 return true
5459 }
5460
5461
5462
5463 for {
5464 if auxIntToInt64(v.AuxInt) != 16 {
5465 break
5466 }
5467 t := auxToType(v.Aux)
5468 dst := v_0
5469 src := v_1
5470 mem := v_2
5471 if !(t.Alignment()%8 == 0) {
5472 break
5473 }
5474 v.reset(OpMIPS64MOVVstore)
5475 v.AuxInt = int32ToAuxInt(8)
5476 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5477 v0.AuxInt = int32ToAuxInt(8)
5478 v0.AddArg2(src, mem)
5479 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5480 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5481 v2.AddArg2(src, mem)
5482 v1.AddArg3(dst, v2, mem)
5483 v.AddArg3(dst, v0, v1)
5484 return true
5485 }
5486
5487
5488
5489 for {
5490 if auxIntToInt64(v.AuxInt) != 24 {
5491 break
5492 }
5493 t := auxToType(v.Aux)
5494 dst := v_0
5495 src := v_1
5496 mem := v_2
5497 if !(t.Alignment()%8 == 0) {
5498 break
5499 }
5500 v.reset(OpMIPS64MOVVstore)
5501 v.AuxInt = int32ToAuxInt(16)
5502 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5503 v0.AuxInt = int32ToAuxInt(16)
5504 v0.AddArg2(src, mem)
5505 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5506 v1.AuxInt = int32ToAuxInt(8)
5507 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5508 v2.AuxInt = int32ToAuxInt(8)
5509 v2.AddArg2(src, mem)
5510 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
5511 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVload, typ.UInt64)
5512 v4.AddArg2(src, mem)
5513 v3.AddArg3(dst, v4, mem)
5514 v1.AddArg3(dst, v2, v3)
5515 v.AddArg3(dst, v0, v1)
5516 return true
5517 }
5518
5519
5520
5521 for {
5522 s := auxIntToInt64(v.AuxInt)
5523 t := auxToType(v.Aux)
5524 dst := v_0
5525 src := v_1
5526 mem := v_2
5527 if !(s%8 == 0 && s >= 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice && logLargeCopy(v, s)) {
5528 break
5529 }
5530 v.reset(OpMIPS64DUFFCOPY)
5531 v.AuxInt = int64ToAuxInt(16 * (128 - s/8))
5532 v.AddArg3(dst, src, mem)
5533 return true
5534 }
5535
5536
5537
5538 for {
5539 s := auxIntToInt64(v.AuxInt)
5540 t := auxToType(v.Aux)
5541 dst := v_0
5542 src := v_1
5543 mem := v_2
5544 if !(s > 24 && logLargeCopy(v, s) || t.Alignment()%8 != 0) {
5545 break
5546 }
5547 v.reset(OpMIPS64LoweredMove)
5548 v.AuxInt = int64ToAuxInt(t.Alignment())
5549 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, src.Type)
5550 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
5551 v0.AddArg(src)
5552 v.AddArg4(dst, src, v0, mem)
5553 return true
5554 }
5555 return false
5556 }
5557 func rewriteValueMIPS64_OpMul16(v *Value) bool {
5558 v_1 := v.Args[1]
5559 v_0 := v.Args[0]
5560 b := v.Block
5561 typ := &b.Func.Config.Types
5562
5563
5564 for {
5565 x := v_0
5566 y := v_1
5567 v.reset(OpSelect1)
5568 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5569 v0.AddArg2(x, y)
5570 v.AddArg(v0)
5571 return true
5572 }
5573 }
5574 func rewriteValueMIPS64_OpMul32(v *Value) bool {
5575 v_1 := v.Args[1]
5576 v_0 := v.Args[0]
5577 b := v.Block
5578 typ := &b.Func.Config.Types
5579
5580
5581 for {
5582 x := v_0
5583 y := v_1
5584 v.reset(OpSelect1)
5585 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5586 v0.AddArg2(x, y)
5587 v.AddArg(v0)
5588 return true
5589 }
5590 }
5591 func rewriteValueMIPS64_OpMul64(v *Value) bool {
5592 v_1 := v.Args[1]
5593 v_0 := v.Args[0]
5594 b := v.Block
5595 typ := &b.Func.Config.Types
5596
5597
5598 for {
5599 x := v_0
5600 y := v_1
5601 v.reset(OpSelect1)
5602 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5603 v0.AddArg2(x, y)
5604 v.AddArg(v0)
5605 return true
5606 }
5607 }
5608 func rewriteValueMIPS64_OpMul8(v *Value) bool {
5609 v_1 := v.Args[1]
5610 v_0 := v.Args[0]
5611 b := v.Block
5612 typ := &b.Func.Config.Types
5613
5614
5615 for {
5616 x := v_0
5617 y := v_1
5618 v.reset(OpSelect1)
5619 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
5620 v0.AddArg2(x, y)
5621 v.AddArg(v0)
5622 return true
5623 }
5624 }
5625 func rewriteValueMIPS64_OpNeq16(v *Value) bool {
5626 v_1 := v.Args[1]
5627 v_0 := v.Args[0]
5628 b := v.Block
5629 typ := &b.Func.Config.Types
5630
5631
5632 for {
5633 x := v_0
5634 y := v_1
5635 v.reset(OpMIPS64SGTU)
5636 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
5637 v1 := b.NewValue0(v.Pos, OpZeroExt16to32, typ.UInt32)
5638 v1.AddArg(x)
5639 v2 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5640 v2.AddArg(y)
5641 v0.AddArg2(v1, v2)
5642 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5643 v3.AuxInt = int64ToAuxInt(0)
5644 v.AddArg2(v0, v3)
5645 return true
5646 }
5647 }
5648 func rewriteValueMIPS64_OpNeq32(v *Value) bool {
5649 v_1 := v.Args[1]
5650 v_0 := v.Args[0]
5651 b := v.Block
5652 typ := &b.Func.Config.Types
5653
5654
5655 for {
5656 x := v_0
5657 y := v_1
5658 v.reset(OpMIPS64SGTU)
5659 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
5660 v1 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5661 v1.AddArg(x)
5662 v2 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
5663 v2.AddArg(y)
5664 v0.AddArg2(v1, v2)
5665 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5666 v3.AuxInt = int64ToAuxInt(0)
5667 v.AddArg2(v0, v3)
5668 return true
5669 }
5670 }
5671 func rewriteValueMIPS64_OpNeq32F(v *Value) bool {
5672 v_1 := v.Args[1]
5673 v_0 := v.Args[0]
5674 b := v.Block
5675
5676
5677 for {
5678 x := v_0
5679 y := v_1
5680 v.reset(OpMIPS64FPFlagFalse)
5681 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQF, types.TypeFlags)
5682 v0.AddArg2(x, y)
5683 v.AddArg(v0)
5684 return true
5685 }
5686 }
5687 func rewriteValueMIPS64_OpNeq64(v *Value) bool {
5688 v_1 := v.Args[1]
5689 v_0 := v.Args[0]
5690 b := v.Block
5691 typ := &b.Func.Config.Types
5692
5693
5694 for {
5695 x := v_0
5696 y := v_1
5697 v.reset(OpMIPS64SGTU)
5698 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
5699 v0.AddArg2(x, y)
5700 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5701 v1.AuxInt = int64ToAuxInt(0)
5702 v.AddArg2(v0, v1)
5703 return true
5704 }
5705 }
5706 func rewriteValueMIPS64_OpNeq64F(v *Value) bool {
5707 v_1 := v.Args[1]
5708 v_0 := v.Args[0]
5709 b := v.Block
5710
5711
5712 for {
5713 x := v_0
5714 y := v_1
5715 v.reset(OpMIPS64FPFlagFalse)
5716 v0 := b.NewValue0(v.Pos, OpMIPS64CMPEQD, types.TypeFlags)
5717 v0.AddArg2(x, y)
5718 v.AddArg(v0)
5719 return true
5720 }
5721 }
5722 func rewriteValueMIPS64_OpNeq8(v *Value) bool {
5723 v_1 := v.Args[1]
5724 v_0 := v.Args[0]
5725 b := v.Block
5726 typ := &b.Func.Config.Types
5727
5728
5729 for {
5730 x := v_0
5731 y := v_1
5732 v.reset(OpMIPS64SGTU)
5733 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
5734 v1 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5735 v1.AddArg(x)
5736 v2 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
5737 v2.AddArg(y)
5738 v0.AddArg2(v1, v2)
5739 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5740 v3.AuxInt = int64ToAuxInt(0)
5741 v.AddArg2(v0, v3)
5742 return true
5743 }
5744 }
5745 func rewriteValueMIPS64_OpNeqPtr(v *Value) bool {
5746 v_1 := v.Args[1]
5747 v_0 := v.Args[0]
5748 b := v.Block
5749 typ := &b.Func.Config.Types
5750
5751
5752 for {
5753 x := v_0
5754 y := v_1
5755 v.reset(OpMIPS64SGTU)
5756 v0 := b.NewValue0(v.Pos, OpMIPS64XOR, typ.UInt64)
5757 v0.AddArg2(x, y)
5758 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5759 v1.AuxInt = int64ToAuxInt(0)
5760 v.AddArg2(v0, v1)
5761 return true
5762 }
5763 }
5764 func rewriteValueMIPS64_OpNot(v *Value) bool {
5765 v_0 := v.Args[0]
5766
5767
5768 for {
5769 x := v_0
5770 v.reset(OpMIPS64XORconst)
5771 v.AuxInt = int64ToAuxInt(1)
5772 v.AddArg(x)
5773 return true
5774 }
5775 }
5776 func rewriteValueMIPS64_OpOffPtr(v *Value) bool {
5777 v_0 := v.Args[0]
5778
5779
5780
5781 for {
5782 off := auxIntToInt64(v.AuxInt)
5783 ptr := v_0
5784 if ptr.Op != OpSP || !(is32Bit(off)) {
5785 break
5786 }
5787 v.reset(OpMIPS64MOVVaddr)
5788 v.AuxInt = int32ToAuxInt(int32(off))
5789 v.AddArg(ptr)
5790 return true
5791 }
5792
5793
5794 for {
5795 off := auxIntToInt64(v.AuxInt)
5796 ptr := v_0
5797 v.reset(OpMIPS64ADDVconst)
5798 v.AuxInt = int64ToAuxInt(off)
5799 v.AddArg(ptr)
5800 return true
5801 }
5802 }
5803 func rewriteValueMIPS64_OpPanicBounds(v *Value) bool {
5804 v_2 := v.Args[2]
5805 v_1 := v.Args[1]
5806 v_0 := v.Args[0]
5807
5808
5809
5810 for {
5811 kind := auxIntToInt64(v.AuxInt)
5812 x := v_0
5813 y := v_1
5814 mem := v_2
5815 if !(boundsABI(kind) == 0) {
5816 break
5817 }
5818 v.reset(OpMIPS64LoweredPanicBoundsA)
5819 v.AuxInt = int64ToAuxInt(kind)
5820 v.AddArg3(x, y, mem)
5821 return true
5822 }
5823
5824
5825
5826 for {
5827 kind := auxIntToInt64(v.AuxInt)
5828 x := v_0
5829 y := v_1
5830 mem := v_2
5831 if !(boundsABI(kind) == 1) {
5832 break
5833 }
5834 v.reset(OpMIPS64LoweredPanicBoundsB)
5835 v.AuxInt = int64ToAuxInt(kind)
5836 v.AddArg3(x, y, mem)
5837 return true
5838 }
5839
5840
5841
5842 for {
5843 kind := auxIntToInt64(v.AuxInt)
5844 x := v_0
5845 y := v_1
5846 mem := v_2
5847 if !(boundsABI(kind) == 2) {
5848 break
5849 }
5850 v.reset(OpMIPS64LoweredPanicBoundsC)
5851 v.AuxInt = int64ToAuxInt(kind)
5852 v.AddArg3(x, y, mem)
5853 return true
5854 }
5855 return false
5856 }
5857 func rewriteValueMIPS64_OpRotateLeft16(v *Value) bool {
5858 v_1 := v.Args[1]
5859 v_0 := v.Args[0]
5860 b := v.Block
5861 typ := &b.Func.Config.Types
5862
5863
5864 for {
5865 t := v.Type
5866 x := v_0
5867 if v_1.Op != OpMIPS64MOVVconst {
5868 break
5869 }
5870 c := auxIntToInt64(v_1.AuxInt)
5871 v.reset(OpOr16)
5872 v0 := b.NewValue0(v.Pos, OpLsh16x64, t)
5873 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5874 v1.AuxInt = int64ToAuxInt(c & 15)
5875 v0.AddArg2(x, v1)
5876 v2 := b.NewValue0(v.Pos, OpRsh16Ux64, t)
5877 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5878 v3.AuxInt = int64ToAuxInt(-c & 15)
5879 v2.AddArg2(x, v3)
5880 v.AddArg2(v0, v2)
5881 return true
5882 }
5883 return false
5884 }
5885 func rewriteValueMIPS64_OpRotateLeft32(v *Value) bool {
5886 v_1 := v.Args[1]
5887 v_0 := v.Args[0]
5888 b := v.Block
5889 typ := &b.Func.Config.Types
5890
5891
5892 for {
5893 t := v.Type
5894 x := v_0
5895 if v_1.Op != OpMIPS64MOVVconst {
5896 break
5897 }
5898 c := auxIntToInt64(v_1.AuxInt)
5899 v.reset(OpOr32)
5900 v0 := b.NewValue0(v.Pos, OpLsh32x64, t)
5901 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5902 v1.AuxInt = int64ToAuxInt(c & 31)
5903 v0.AddArg2(x, v1)
5904 v2 := b.NewValue0(v.Pos, OpRsh32Ux64, t)
5905 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5906 v3.AuxInt = int64ToAuxInt(-c & 31)
5907 v2.AddArg2(x, v3)
5908 v.AddArg2(v0, v2)
5909 return true
5910 }
5911 return false
5912 }
5913 func rewriteValueMIPS64_OpRotateLeft64(v *Value) bool {
5914 v_1 := v.Args[1]
5915 v_0 := v.Args[0]
5916 b := v.Block
5917 typ := &b.Func.Config.Types
5918
5919
5920 for {
5921 t := v.Type
5922 x := v_0
5923 if v_1.Op != OpMIPS64MOVVconst {
5924 break
5925 }
5926 c := auxIntToInt64(v_1.AuxInt)
5927 v.reset(OpOr64)
5928 v0 := b.NewValue0(v.Pos, OpLsh64x64, t)
5929 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5930 v1.AuxInt = int64ToAuxInt(c & 63)
5931 v0.AddArg2(x, v1)
5932 v2 := b.NewValue0(v.Pos, OpRsh64Ux64, t)
5933 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5934 v3.AuxInt = int64ToAuxInt(-c & 63)
5935 v2.AddArg2(x, v3)
5936 v.AddArg2(v0, v2)
5937 return true
5938 }
5939 return false
5940 }
5941 func rewriteValueMIPS64_OpRotateLeft8(v *Value) bool {
5942 v_1 := v.Args[1]
5943 v_0 := v.Args[0]
5944 b := v.Block
5945 typ := &b.Func.Config.Types
5946
5947
5948 for {
5949 t := v.Type
5950 x := v_0
5951 if v_1.Op != OpMIPS64MOVVconst {
5952 break
5953 }
5954 c := auxIntToInt64(v_1.AuxInt)
5955 v.reset(OpOr8)
5956 v0 := b.NewValue0(v.Pos, OpLsh8x64, t)
5957 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5958 v1.AuxInt = int64ToAuxInt(c & 7)
5959 v0.AddArg2(x, v1)
5960 v2 := b.NewValue0(v.Pos, OpRsh8Ux64, t)
5961 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5962 v3.AuxInt = int64ToAuxInt(-c & 7)
5963 v2.AddArg2(x, v3)
5964 v.AddArg2(v0, v2)
5965 return true
5966 }
5967 return false
5968 }
5969 func rewriteValueMIPS64_OpRsh16Ux16(v *Value) bool {
5970 v_1 := v.Args[1]
5971 v_0 := v.Args[0]
5972 b := v.Block
5973 typ := &b.Func.Config.Types
5974
5975
5976 for {
5977 t := v.Type
5978 x := v_0
5979 y := v_1
5980 v.reset(OpMIPS64AND)
5981 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
5982 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
5983 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
5984 v2.AuxInt = int64ToAuxInt(64)
5985 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5986 v3.AddArg(y)
5987 v1.AddArg2(v2, v3)
5988 v0.AddArg(v1)
5989 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
5990 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
5991 v5.AddArg(x)
5992 v4.AddArg2(v5, v3)
5993 v.AddArg2(v0, v4)
5994 return true
5995 }
5996 }
5997 func rewriteValueMIPS64_OpRsh16Ux32(v *Value) bool {
5998 v_1 := v.Args[1]
5999 v_0 := v.Args[0]
6000 b := v.Block
6001 typ := &b.Func.Config.Types
6002
6003
6004 for {
6005 t := v.Type
6006 x := v_0
6007 y := v_1
6008 v.reset(OpMIPS64AND)
6009 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6010 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6011 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6012 v2.AuxInt = int64ToAuxInt(64)
6013 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6014 v3.AddArg(y)
6015 v1.AddArg2(v2, v3)
6016 v0.AddArg(v1)
6017 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6018 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6019 v5.AddArg(x)
6020 v4.AddArg2(v5, v3)
6021 v.AddArg2(v0, v4)
6022 return true
6023 }
6024 }
6025 func rewriteValueMIPS64_OpRsh16Ux64(v *Value) bool {
6026 v_1 := v.Args[1]
6027 v_0 := v.Args[0]
6028 b := v.Block
6029 typ := &b.Func.Config.Types
6030
6031
6032 for {
6033 t := v.Type
6034 x := v_0
6035 y := v_1
6036 v.reset(OpMIPS64AND)
6037 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6038 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6039 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6040 v2.AuxInt = int64ToAuxInt(64)
6041 v1.AddArg2(v2, y)
6042 v0.AddArg(v1)
6043 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6044 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6045 v4.AddArg(x)
6046 v3.AddArg2(v4, y)
6047 v.AddArg2(v0, v3)
6048 return true
6049 }
6050 }
6051 func rewriteValueMIPS64_OpRsh16Ux8(v *Value) bool {
6052 v_1 := v.Args[1]
6053 v_0 := v.Args[0]
6054 b := v.Block
6055 typ := &b.Func.Config.Types
6056
6057
6058 for {
6059 t := v.Type
6060 x := v_0
6061 y := v_1
6062 v.reset(OpMIPS64AND)
6063 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6064 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6065 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6066 v2.AuxInt = int64ToAuxInt(64)
6067 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6068 v3.AddArg(y)
6069 v1.AddArg2(v2, v3)
6070 v0.AddArg(v1)
6071 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6072 v5 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6073 v5.AddArg(x)
6074 v4.AddArg2(v5, v3)
6075 v.AddArg2(v0, v4)
6076 return true
6077 }
6078 }
6079 func rewriteValueMIPS64_OpRsh16x16(v *Value) bool {
6080 v_1 := v.Args[1]
6081 v_0 := v.Args[0]
6082 b := v.Block
6083 typ := &b.Func.Config.Types
6084
6085
6086 for {
6087 t := v.Type
6088 x := v_0
6089 y := v_1
6090 v.reset(OpMIPS64SRAV)
6091 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6092 v0.AddArg(x)
6093 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6094 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6095 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6096 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6097 v4.AddArg(y)
6098 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6099 v5.AuxInt = int64ToAuxInt(63)
6100 v3.AddArg2(v4, v5)
6101 v2.AddArg(v3)
6102 v1.AddArg2(v2, v4)
6103 v.AddArg2(v0, v1)
6104 return true
6105 }
6106 }
6107 func rewriteValueMIPS64_OpRsh16x32(v *Value) bool {
6108 v_1 := v.Args[1]
6109 v_0 := v.Args[0]
6110 b := v.Block
6111 typ := &b.Func.Config.Types
6112
6113
6114 for {
6115 t := v.Type
6116 x := v_0
6117 y := v_1
6118 v.reset(OpMIPS64SRAV)
6119 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6120 v0.AddArg(x)
6121 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6122 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6123 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6124 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6125 v4.AddArg(y)
6126 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6127 v5.AuxInt = int64ToAuxInt(63)
6128 v3.AddArg2(v4, v5)
6129 v2.AddArg(v3)
6130 v1.AddArg2(v2, v4)
6131 v.AddArg2(v0, v1)
6132 return true
6133 }
6134 }
6135 func rewriteValueMIPS64_OpRsh16x64(v *Value) bool {
6136 v_1 := v.Args[1]
6137 v_0 := v.Args[0]
6138 b := v.Block
6139 typ := &b.Func.Config.Types
6140
6141
6142 for {
6143 t := v.Type
6144 x := v_0
6145 y := v_1
6146 v.reset(OpMIPS64SRAV)
6147 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6148 v0.AddArg(x)
6149 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6150 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6151 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6152 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6153 v4.AuxInt = int64ToAuxInt(63)
6154 v3.AddArg2(y, v4)
6155 v2.AddArg(v3)
6156 v1.AddArg2(v2, y)
6157 v.AddArg2(v0, v1)
6158 return true
6159 }
6160 }
6161 func rewriteValueMIPS64_OpRsh16x8(v *Value) bool {
6162 v_1 := v.Args[1]
6163 v_0 := v.Args[0]
6164 b := v.Block
6165 typ := &b.Func.Config.Types
6166
6167
6168 for {
6169 t := v.Type
6170 x := v_0
6171 y := v_1
6172 v.reset(OpMIPS64SRAV)
6173 v0 := b.NewValue0(v.Pos, OpSignExt16to64, typ.Int64)
6174 v0.AddArg(x)
6175 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6176 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6177 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6178 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6179 v4.AddArg(y)
6180 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6181 v5.AuxInt = int64ToAuxInt(63)
6182 v3.AddArg2(v4, v5)
6183 v2.AddArg(v3)
6184 v1.AddArg2(v2, v4)
6185 v.AddArg2(v0, v1)
6186 return true
6187 }
6188 }
6189 func rewriteValueMIPS64_OpRsh32Ux16(v *Value) bool {
6190 v_1 := v.Args[1]
6191 v_0 := v.Args[0]
6192 b := v.Block
6193 typ := &b.Func.Config.Types
6194
6195
6196 for {
6197 t := v.Type
6198 x := v_0
6199 y := v_1
6200 v.reset(OpMIPS64AND)
6201 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6202 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6203 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6204 v2.AuxInt = int64ToAuxInt(64)
6205 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6206 v3.AddArg(y)
6207 v1.AddArg2(v2, v3)
6208 v0.AddArg(v1)
6209 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6210 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6211 v5.AddArg(x)
6212 v4.AddArg2(v5, v3)
6213 v.AddArg2(v0, v4)
6214 return true
6215 }
6216 }
6217 func rewriteValueMIPS64_OpRsh32Ux32(v *Value) bool {
6218 v_1 := v.Args[1]
6219 v_0 := v.Args[0]
6220 b := v.Block
6221 typ := &b.Func.Config.Types
6222
6223
6224 for {
6225 t := v.Type
6226 x := v_0
6227 y := v_1
6228 v.reset(OpMIPS64AND)
6229 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6230 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6231 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6232 v2.AuxInt = int64ToAuxInt(64)
6233 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6234 v3.AddArg(y)
6235 v1.AddArg2(v2, v3)
6236 v0.AddArg(v1)
6237 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6238 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6239 v5.AddArg(x)
6240 v4.AddArg2(v5, v3)
6241 v.AddArg2(v0, v4)
6242 return true
6243 }
6244 }
6245 func rewriteValueMIPS64_OpRsh32Ux64(v *Value) bool {
6246 v_1 := v.Args[1]
6247 v_0 := v.Args[0]
6248 b := v.Block
6249 typ := &b.Func.Config.Types
6250
6251
6252 for {
6253 t := v.Type
6254 x := v_0
6255 y := v_1
6256 v.reset(OpMIPS64AND)
6257 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6258 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6259 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6260 v2.AuxInt = int64ToAuxInt(64)
6261 v1.AddArg2(v2, y)
6262 v0.AddArg(v1)
6263 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6264 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6265 v4.AddArg(x)
6266 v3.AddArg2(v4, y)
6267 v.AddArg2(v0, v3)
6268 return true
6269 }
6270 }
6271 func rewriteValueMIPS64_OpRsh32Ux8(v *Value) bool {
6272 v_1 := v.Args[1]
6273 v_0 := v.Args[0]
6274 b := v.Block
6275 typ := &b.Func.Config.Types
6276
6277
6278 for {
6279 t := v.Type
6280 x := v_0
6281 y := v_1
6282 v.reset(OpMIPS64AND)
6283 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6284 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6285 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6286 v2.AuxInt = int64ToAuxInt(64)
6287 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6288 v3.AddArg(y)
6289 v1.AddArg2(v2, v3)
6290 v0.AddArg(v1)
6291 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6292 v5 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6293 v5.AddArg(x)
6294 v4.AddArg2(v5, v3)
6295 v.AddArg2(v0, v4)
6296 return true
6297 }
6298 }
6299 func rewriteValueMIPS64_OpRsh32x16(v *Value) bool {
6300 v_1 := v.Args[1]
6301 v_0 := v.Args[0]
6302 b := v.Block
6303 typ := &b.Func.Config.Types
6304
6305
6306 for {
6307 t := v.Type
6308 x := v_0
6309 y := v_1
6310 v.reset(OpMIPS64SRAV)
6311 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6312 v0.AddArg(x)
6313 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6314 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6315 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6316 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6317 v4.AddArg(y)
6318 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6319 v5.AuxInt = int64ToAuxInt(63)
6320 v3.AddArg2(v4, v5)
6321 v2.AddArg(v3)
6322 v1.AddArg2(v2, v4)
6323 v.AddArg2(v0, v1)
6324 return true
6325 }
6326 }
6327 func rewriteValueMIPS64_OpRsh32x32(v *Value) bool {
6328 v_1 := v.Args[1]
6329 v_0 := v.Args[0]
6330 b := v.Block
6331 typ := &b.Func.Config.Types
6332
6333
6334 for {
6335 t := v.Type
6336 x := v_0
6337 y := v_1
6338 v.reset(OpMIPS64SRAV)
6339 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6340 v0.AddArg(x)
6341 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6342 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6343 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6344 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6345 v4.AddArg(y)
6346 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6347 v5.AuxInt = int64ToAuxInt(63)
6348 v3.AddArg2(v4, v5)
6349 v2.AddArg(v3)
6350 v1.AddArg2(v2, v4)
6351 v.AddArg2(v0, v1)
6352 return true
6353 }
6354 }
6355 func rewriteValueMIPS64_OpRsh32x64(v *Value) bool {
6356 v_1 := v.Args[1]
6357 v_0 := v.Args[0]
6358 b := v.Block
6359 typ := &b.Func.Config.Types
6360
6361
6362 for {
6363 t := v.Type
6364 x := v_0
6365 y := v_1
6366 v.reset(OpMIPS64SRAV)
6367 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6368 v0.AddArg(x)
6369 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6370 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6371 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6372 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6373 v4.AuxInt = int64ToAuxInt(63)
6374 v3.AddArg2(y, v4)
6375 v2.AddArg(v3)
6376 v1.AddArg2(v2, y)
6377 v.AddArg2(v0, v1)
6378 return true
6379 }
6380 }
6381 func rewriteValueMIPS64_OpRsh32x8(v *Value) bool {
6382 v_1 := v.Args[1]
6383 v_0 := v.Args[0]
6384 b := v.Block
6385 typ := &b.Func.Config.Types
6386
6387
6388 for {
6389 t := v.Type
6390 x := v_0
6391 y := v_1
6392 v.reset(OpMIPS64SRAV)
6393 v0 := b.NewValue0(v.Pos, OpSignExt32to64, typ.Int64)
6394 v0.AddArg(x)
6395 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6396 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6397 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6398 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6399 v4.AddArg(y)
6400 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6401 v5.AuxInt = int64ToAuxInt(63)
6402 v3.AddArg2(v4, v5)
6403 v2.AddArg(v3)
6404 v1.AddArg2(v2, v4)
6405 v.AddArg2(v0, v1)
6406 return true
6407 }
6408 }
6409 func rewriteValueMIPS64_OpRsh64Ux16(v *Value) bool {
6410 v_1 := v.Args[1]
6411 v_0 := v.Args[0]
6412 b := v.Block
6413 typ := &b.Func.Config.Types
6414
6415
6416 for {
6417 t := v.Type
6418 x := v_0
6419 y := v_1
6420 v.reset(OpMIPS64AND)
6421 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6422 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6423 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6424 v2.AuxInt = int64ToAuxInt(64)
6425 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6426 v3.AddArg(y)
6427 v1.AddArg2(v2, v3)
6428 v0.AddArg(v1)
6429 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6430 v4.AddArg2(x, v3)
6431 v.AddArg2(v0, v4)
6432 return true
6433 }
6434 }
6435 func rewriteValueMIPS64_OpRsh64Ux32(v *Value) bool {
6436 v_1 := v.Args[1]
6437 v_0 := v.Args[0]
6438 b := v.Block
6439 typ := &b.Func.Config.Types
6440
6441
6442 for {
6443 t := v.Type
6444 x := v_0
6445 y := v_1
6446 v.reset(OpMIPS64AND)
6447 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6448 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6449 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6450 v2.AuxInt = int64ToAuxInt(64)
6451 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6452 v3.AddArg(y)
6453 v1.AddArg2(v2, v3)
6454 v0.AddArg(v1)
6455 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6456 v4.AddArg2(x, v3)
6457 v.AddArg2(v0, v4)
6458 return true
6459 }
6460 }
6461 func rewriteValueMIPS64_OpRsh64Ux64(v *Value) bool {
6462 v_1 := v.Args[1]
6463 v_0 := v.Args[0]
6464 b := v.Block
6465 typ := &b.Func.Config.Types
6466
6467
6468 for {
6469 t := v.Type
6470 x := v_0
6471 y := v_1
6472 v.reset(OpMIPS64AND)
6473 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6474 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6475 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6476 v2.AuxInt = int64ToAuxInt(64)
6477 v1.AddArg2(v2, y)
6478 v0.AddArg(v1)
6479 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6480 v3.AddArg2(x, y)
6481 v.AddArg2(v0, v3)
6482 return true
6483 }
6484 }
6485 func rewriteValueMIPS64_OpRsh64Ux8(v *Value) bool {
6486 v_1 := v.Args[1]
6487 v_0 := v.Args[0]
6488 b := v.Block
6489 typ := &b.Func.Config.Types
6490
6491
6492 for {
6493 t := v.Type
6494 x := v_0
6495 y := v_1
6496 v.reset(OpMIPS64AND)
6497 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6498 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6499 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6500 v2.AuxInt = int64ToAuxInt(64)
6501 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6502 v3.AddArg(y)
6503 v1.AddArg2(v2, v3)
6504 v0.AddArg(v1)
6505 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6506 v4.AddArg2(x, v3)
6507 v.AddArg2(v0, v4)
6508 return true
6509 }
6510 }
6511 func rewriteValueMIPS64_OpRsh64x16(v *Value) bool {
6512 v_1 := v.Args[1]
6513 v_0 := v.Args[0]
6514 b := v.Block
6515 typ := &b.Func.Config.Types
6516
6517
6518 for {
6519 t := v.Type
6520 x := v_0
6521 y := v_1
6522 v.reset(OpMIPS64SRAV)
6523 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6524 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6525 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6526 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6527 v3.AddArg(y)
6528 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6529 v4.AuxInt = int64ToAuxInt(63)
6530 v2.AddArg2(v3, v4)
6531 v1.AddArg(v2)
6532 v0.AddArg2(v1, v3)
6533 v.AddArg2(x, v0)
6534 return true
6535 }
6536 }
6537 func rewriteValueMIPS64_OpRsh64x32(v *Value) bool {
6538 v_1 := v.Args[1]
6539 v_0 := v.Args[0]
6540 b := v.Block
6541 typ := &b.Func.Config.Types
6542
6543
6544 for {
6545 t := v.Type
6546 x := v_0
6547 y := v_1
6548 v.reset(OpMIPS64SRAV)
6549 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6550 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6551 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6552 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6553 v3.AddArg(y)
6554 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6555 v4.AuxInt = int64ToAuxInt(63)
6556 v2.AddArg2(v3, v4)
6557 v1.AddArg(v2)
6558 v0.AddArg2(v1, v3)
6559 v.AddArg2(x, v0)
6560 return true
6561 }
6562 }
6563 func rewriteValueMIPS64_OpRsh64x64(v *Value) bool {
6564 v_1 := v.Args[1]
6565 v_0 := v.Args[0]
6566 b := v.Block
6567 typ := &b.Func.Config.Types
6568
6569
6570 for {
6571 t := v.Type
6572 x := v_0
6573 y := v_1
6574 v.reset(OpMIPS64SRAV)
6575 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6576 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6577 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6578 v3 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6579 v3.AuxInt = int64ToAuxInt(63)
6580 v2.AddArg2(y, v3)
6581 v1.AddArg(v2)
6582 v0.AddArg2(v1, y)
6583 v.AddArg2(x, v0)
6584 return true
6585 }
6586 }
6587 func rewriteValueMIPS64_OpRsh64x8(v *Value) bool {
6588 v_1 := v.Args[1]
6589 v_0 := v.Args[0]
6590 b := v.Block
6591 typ := &b.Func.Config.Types
6592
6593
6594 for {
6595 t := v.Type
6596 x := v_0
6597 y := v_1
6598 v.reset(OpMIPS64SRAV)
6599 v0 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6600 v1 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6601 v2 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6602 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6603 v3.AddArg(y)
6604 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6605 v4.AuxInt = int64ToAuxInt(63)
6606 v2.AddArg2(v3, v4)
6607 v1.AddArg(v2)
6608 v0.AddArg2(v1, v3)
6609 v.AddArg2(x, v0)
6610 return true
6611 }
6612 }
6613 func rewriteValueMIPS64_OpRsh8Ux16(v *Value) bool {
6614 v_1 := v.Args[1]
6615 v_0 := v.Args[0]
6616 b := v.Block
6617 typ := &b.Func.Config.Types
6618
6619
6620 for {
6621 t := v.Type
6622 x := v_0
6623 y := v_1
6624 v.reset(OpMIPS64AND)
6625 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6626 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6627 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6628 v2.AuxInt = int64ToAuxInt(64)
6629 v3 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6630 v3.AddArg(y)
6631 v1.AddArg2(v2, v3)
6632 v0.AddArg(v1)
6633 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6634 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6635 v5.AddArg(x)
6636 v4.AddArg2(v5, v3)
6637 v.AddArg2(v0, v4)
6638 return true
6639 }
6640 }
6641 func rewriteValueMIPS64_OpRsh8Ux32(v *Value) bool {
6642 v_1 := v.Args[1]
6643 v_0 := v.Args[0]
6644 b := v.Block
6645 typ := &b.Func.Config.Types
6646
6647
6648 for {
6649 t := v.Type
6650 x := v_0
6651 y := v_1
6652 v.reset(OpMIPS64AND)
6653 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6654 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6655 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6656 v2.AuxInt = int64ToAuxInt(64)
6657 v3 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6658 v3.AddArg(y)
6659 v1.AddArg2(v2, v3)
6660 v0.AddArg(v1)
6661 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6662 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6663 v5.AddArg(x)
6664 v4.AddArg2(v5, v3)
6665 v.AddArg2(v0, v4)
6666 return true
6667 }
6668 }
6669 func rewriteValueMIPS64_OpRsh8Ux64(v *Value) bool {
6670 v_1 := v.Args[1]
6671 v_0 := v.Args[0]
6672 b := v.Block
6673 typ := &b.Func.Config.Types
6674
6675
6676 for {
6677 t := v.Type
6678 x := v_0
6679 y := v_1
6680 v.reset(OpMIPS64AND)
6681 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6682 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6683 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6684 v2.AuxInt = int64ToAuxInt(64)
6685 v1.AddArg2(v2, y)
6686 v0.AddArg(v1)
6687 v3 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6688 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6689 v4.AddArg(x)
6690 v3.AddArg2(v4, y)
6691 v.AddArg2(v0, v3)
6692 return true
6693 }
6694 }
6695 func rewriteValueMIPS64_OpRsh8Ux8(v *Value) bool {
6696 v_1 := v.Args[1]
6697 v_0 := v.Args[0]
6698 b := v.Block
6699 typ := &b.Func.Config.Types
6700
6701
6702 for {
6703 t := v.Type
6704 x := v_0
6705 y := v_1
6706 v.reset(OpMIPS64AND)
6707 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6708 v1 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6709 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6710 v2.AuxInt = int64ToAuxInt(64)
6711 v3 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6712 v3.AddArg(y)
6713 v1.AddArg2(v2, v3)
6714 v0.AddArg(v1)
6715 v4 := b.NewValue0(v.Pos, OpMIPS64SRLV, t)
6716 v5 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6717 v5.AddArg(x)
6718 v4.AddArg2(v5, v3)
6719 v.AddArg2(v0, v4)
6720 return true
6721 }
6722 }
6723 func rewriteValueMIPS64_OpRsh8x16(v *Value) bool {
6724 v_1 := v.Args[1]
6725 v_0 := v.Args[0]
6726 b := v.Block
6727 typ := &b.Func.Config.Types
6728
6729
6730 for {
6731 t := v.Type
6732 x := v_0
6733 y := v_1
6734 v.reset(OpMIPS64SRAV)
6735 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
6736 v0.AddArg(x)
6737 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6738 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6739 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6740 v4 := b.NewValue0(v.Pos, OpZeroExt16to64, typ.UInt64)
6741 v4.AddArg(y)
6742 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6743 v5.AuxInt = int64ToAuxInt(63)
6744 v3.AddArg2(v4, v5)
6745 v2.AddArg(v3)
6746 v1.AddArg2(v2, v4)
6747 v.AddArg2(v0, v1)
6748 return true
6749 }
6750 }
6751 func rewriteValueMIPS64_OpRsh8x32(v *Value) bool {
6752 v_1 := v.Args[1]
6753 v_0 := v.Args[0]
6754 b := v.Block
6755 typ := &b.Func.Config.Types
6756
6757
6758 for {
6759 t := v.Type
6760 x := v_0
6761 y := v_1
6762 v.reset(OpMIPS64SRAV)
6763 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
6764 v0.AddArg(x)
6765 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6766 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6767 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6768 v4 := b.NewValue0(v.Pos, OpZeroExt32to64, typ.UInt64)
6769 v4.AddArg(y)
6770 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6771 v5.AuxInt = int64ToAuxInt(63)
6772 v3.AddArg2(v4, v5)
6773 v2.AddArg(v3)
6774 v1.AddArg2(v2, v4)
6775 v.AddArg2(v0, v1)
6776 return true
6777 }
6778 }
6779 func rewriteValueMIPS64_OpRsh8x64(v *Value) bool {
6780 v_1 := v.Args[1]
6781 v_0 := v.Args[0]
6782 b := v.Block
6783 typ := &b.Func.Config.Types
6784
6785
6786 for {
6787 t := v.Type
6788 x := v_0
6789 y := v_1
6790 v.reset(OpMIPS64SRAV)
6791 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
6792 v0.AddArg(x)
6793 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6794 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6795 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6796 v4 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6797 v4.AuxInt = int64ToAuxInt(63)
6798 v3.AddArg2(y, v4)
6799 v2.AddArg(v3)
6800 v1.AddArg2(v2, y)
6801 v.AddArg2(v0, v1)
6802 return true
6803 }
6804 }
6805 func rewriteValueMIPS64_OpRsh8x8(v *Value) bool {
6806 v_1 := v.Args[1]
6807 v_0 := v.Args[0]
6808 b := v.Block
6809 typ := &b.Func.Config.Types
6810
6811
6812 for {
6813 t := v.Type
6814 x := v_0
6815 y := v_1
6816 v.reset(OpMIPS64SRAV)
6817 v0 := b.NewValue0(v.Pos, OpSignExt8to64, typ.Int64)
6818 v0.AddArg(x)
6819 v1 := b.NewValue0(v.Pos, OpMIPS64OR, t)
6820 v2 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
6821 v3 := b.NewValue0(v.Pos, OpMIPS64SGTU, typ.Bool)
6822 v4 := b.NewValue0(v.Pos, OpZeroExt8to64, typ.UInt64)
6823 v4.AddArg(y)
6824 v5 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6825 v5.AuxInt = int64ToAuxInt(63)
6826 v3.AddArg2(v4, v5)
6827 v2.AddArg(v3)
6828 v1.AddArg2(v2, v4)
6829 v.AddArg2(v0, v1)
6830 return true
6831 }
6832 }
6833 func rewriteValueMIPS64_OpSelect0(v *Value) bool {
6834 v_0 := v.Args[0]
6835 b := v.Block
6836 typ := &b.Func.Config.Types
6837
6838
6839 for {
6840 if v_0.Op != OpMul64uover {
6841 break
6842 }
6843 y := v_0.Args[1]
6844 x := v_0.Args[0]
6845 v.reset(OpSelect1)
6846 v.Type = typ.UInt64
6847 v0 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6848 v0.AddArg2(x, y)
6849 v.AddArg(v0)
6850 return true
6851 }
6852
6853
6854 for {
6855 if v_0.Op != OpMIPS64DIVVU {
6856 break
6857 }
6858 _ = v_0.Args[1]
6859 v_0_1 := v_0.Args[1]
6860 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
6861 break
6862 }
6863 v.reset(OpMIPS64MOVVconst)
6864 v.AuxInt = int64ToAuxInt(0)
6865 return true
6866 }
6867
6868
6869
6870 for {
6871 if v_0.Op != OpMIPS64DIVVU {
6872 break
6873 }
6874 _ = v_0.Args[1]
6875 x := v_0.Args[0]
6876 v_0_1 := v_0.Args[1]
6877 if v_0_1.Op != OpMIPS64MOVVconst {
6878 break
6879 }
6880 c := auxIntToInt64(v_0_1.AuxInt)
6881 if !(isPowerOfTwo64(c)) {
6882 break
6883 }
6884 v.reset(OpMIPS64ANDconst)
6885 v.AuxInt = int64ToAuxInt(c - 1)
6886 v.AddArg(x)
6887 return true
6888 }
6889
6890
6891
6892 for {
6893 if v_0.Op != OpMIPS64DIVV {
6894 break
6895 }
6896 _ = v_0.Args[1]
6897 v_0_0 := v_0.Args[0]
6898 if v_0_0.Op != OpMIPS64MOVVconst {
6899 break
6900 }
6901 c := auxIntToInt64(v_0_0.AuxInt)
6902 v_0_1 := v_0.Args[1]
6903 if v_0_1.Op != OpMIPS64MOVVconst {
6904 break
6905 }
6906 d := auxIntToInt64(v_0_1.AuxInt)
6907 if !(d != 0) {
6908 break
6909 }
6910 v.reset(OpMIPS64MOVVconst)
6911 v.AuxInt = int64ToAuxInt(c % d)
6912 return true
6913 }
6914
6915
6916
6917 for {
6918 if v_0.Op != OpMIPS64DIVVU {
6919 break
6920 }
6921 _ = v_0.Args[1]
6922 v_0_0 := v_0.Args[0]
6923 if v_0_0.Op != OpMIPS64MOVVconst {
6924 break
6925 }
6926 c := auxIntToInt64(v_0_0.AuxInt)
6927 v_0_1 := v_0.Args[1]
6928 if v_0_1.Op != OpMIPS64MOVVconst {
6929 break
6930 }
6931 d := auxIntToInt64(v_0_1.AuxInt)
6932 if !(d != 0) {
6933 break
6934 }
6935 v.reset(OpMIPS64MOVVconst)
6936 v.AuxInt = int64ToAuxInt(int64(uint64(c) % uint64(d)))
6937 return true
6938 }
6939 return false
6940 }
6941 func rewriteValueMIPS64_OpSelect1(v *Value) bool {
6942 v_0 := v.Args[0]
6943 b := v.Block
6944 typ := &b.Func.Config.Types
6945
6946
6947 for {
6948 if v_0.Op != OpMul64uover {
6949 break
6950 }
6951 y := v_0.Args[1]
6952 x := v_0.Args[0]
6953 v.reset(OpMIPS64SGTU)
6954 v.Type = typ.Bool
6955 v0 := b.NewValue0(v.Pos, OpSelect0, typ.UInt64)
6956 v1 := b.NewValue0(v.Pos, OpMIPS64MULVU, types.NewTuple(typ.UInt64, typ.UInt64))
6957 v1.AddArg2(x, y)
6958 v0.AddArg(v1)
6959 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
6960 v2.AuxInt = int64ToAuxInt(0)
6961 v.AddArg2(v0, v2)
6962 return true
6963 }
6964
6965
6966 for {
6967 if v_0.Op != OpMIPS64MULVU {
6968 break
6969 }
6970 _ = v_0.Args[1]
6971 v_0_0 := v_0.Args[0]
6972 v_0_1 := v_0.Args[1]
6973 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
6974 x := v_0_0
6975 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != -1 {
6976 continue
6977 }
6978 v.reset(OpMIPS64NEGV)
6979 v.AddArg(x)
6980 return true
6981 }
6982 break
6983 }
6984
6985
6986 for {
6987 if v_0.Op != OpMIPS64MULVU {
6988 break
6989 }
6990 _ = v_0.Args[1]
6991 v_0_0 := v_0.Args[0]
6992 v_0_1 := v_0.Args[1]
6993 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
6994 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
6995 continue
6996 }
6997 v.reset(OpMIPS64MOVVconst)
6998 v.AuxInt = int64ToAuxInt(0)
6999 return true
7000 }
7001 break
7002 }
7003
7004
7005 for {
7006 if v_0.Op != OpMIPS64MULVU {
7007 break
7008 }
7009 _ = v_0.Args[1]
7010 v_0_0 := v_0.Args[0]
7011 v_0_1 := v_0.Args[1]
7012 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7013 x := v_0_0
7014 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7015 continue
7016 }
7017 v.copyOf(x)
7018 return true
7019 }
7020 break
7021 }
7022
7023
7024
7025 for {
7026 if v_0.Op != OpMIPS64MULVU {
7027 break
7028 }
7029 _ = v_0.Args[1]
7030 v_0_0 := v_0.Args[0]
7031 v_0_1 := v_0.Args[1]
7032 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7033 x := v_0_0
7034 if v_0_1.Op != OpMIPS64MOVVconst {
7035 continue
7036 }
7037 c := auxIntToInt64(v_0_1.AuxInt)
7038 if !(isPowerOfTwo64(c)) {
7039 continue
7040 }
7041 v.reset(OpMIPS64SLLVconst)
7042 v.AuxInt = int64ToAuxInt(log64(c))
7043 v.AddArg(x)
7044 return true
7045 }
7046 break
7047 }
7048
7049
7050 for {
7051 if v_0.Op != OpMIPS64DIVVU {
7052 break
7053 }
7054 _ = v_0.Args[1]
7055 x := v_0.Args[0]
7056 v_0_1 := v_0.Args[1]
7057 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 1 {
7058 break
7059 }
7060 v.copyOf(x)
7061 return true
7062 }
7063
7064
7065
7066 for {
7067 if v_0.Op != OpMIPS64DIVVU {
7068 break
7069 }
7070 _ = v_0.Args[1]
7071 x := v_0.Args[0]
7072 v_0_1 := v_0.Args[1]
7073 if v_0_1.Op != OpMIPS64MOVVconst {
7074 break
7075 }
7076 c := auxIntToInt64(v_0_1.AuxInt)
7077 if !(isPowerOfTwo64(c)) {
7078 break
7079 }
7080 v.reset(OpMIPS64SRLVconst)
7081 v.AuxInt = int64ToAuxInt(log64(c))
7082 v.AddArg(x)
7083 return true
7084 }
7085
7086
7087 for {
7088 if v_0.Op != OpMIPS64MULVU {
7089 break
7090 }
7091 _ = v_0.Args[1]
7092 v_0_0 := v_0.Args[0]
7093 v_0_1 := v_0.Args[1]
7094 for _i0 := 0; _i0 <= 1; _i0, v_0_0, v_0_1 = _i0+1, v_0_1, v_0_0 {
7095 if v_0_0.Op != OpMIPS64MOVVconst {
7096 continue
7097 }
7098 c := auxIntToInt64(v_0_0.AuxInt)
7099 if v_0_1.Op != OpMIPS64MOVVconst {
7100 continue
7101 }
7102 d := auxIntToInt64(v_0_1.AuxInt)
7103 v.reset(OpMIPS64MOVVconst)
7104 v.AuxInt = int64ToAuxInt(c * d)
7105 return true
7106 }
7107 break
7108 }
7109
7110
7111
7112 for {
7113 if v_0.Op != OpMIPS64DIVV {
7114 break
7115 }
7116 _ = v_0.Args[1]
7117 v_0_0 := v_0.Args[0]
7118 if v_0_0.Op != OpMIPS64MOVVconst {
7119 break
7120 }
7121 c := auxIntToInt64(v_0_0.AuxInt)
7122 v_0_1 := v_0.Args[1]
7123 if v_0_1.Op != OpMIPS64MOVVconst {
7124 break
7125 }
7126 d := auxIntToInt64(v_0_1.AuxInt)
7127 if !(d != 0) {
7128 break
7129 }
7130 v.reset(OpMIPS64MOVVconst)
7131 v.AuxInt = int64ToAuxInt(c / d)
7132 return true
7133 }
7134
7135
7136
7137 for {
7138 if v_0.Op != OpMIPS64DIVVU {
7139 break
7140 }
7141 _ = v_0.Args[1]
7142 v_0_0 := v_0.Args[0]
7143 if v_0_0.Op != OpMIPS64MOVVconst {
7144 break
7145 }
7146 c := auxIntToInt64(v_0_0.AuxInt)
7147 v_0_1 := v_0.Args[1]
7148 if v_0_1.Op != OpMIPS64MOVVconst {
7149 break
7150 }
7151 d := auxIntToInt64(v_0_1.AuxInt)
7152 if !(d != 0) {
7153 break
7154 }
7155 v.reset(OpMIPS64MOVVconst)
7156 v.AuxInt = int64ToAuxInt(int64(uint64(c) / uint64(d)))
7157 return true
7158 }
7159 return false
7160 }
7161 func rewriteValueMIPS64_OpSlicemask(v *Value) bool {
7162 v_0 := v.Args[0]
7163 b := v.Block
7164
7165
7166 for {
7167 t := v.Type
7168 x := v_0
7169 v.reset(OpMIPS64SRAVconst)
7170 v.AuxInt = int64ToAuxInt(63)
7171 v0 := b.NewValue0(v.Pos, OpMIPS64NEGV, t)
7172 v0.AddArg(x)
7173 v.AddArg(v0)
7174 return true
7175 }
7176 }
7177 func rewriteValueMIPS64_OpStore(v *Value) bool {
7178 v_2 := v.Args[2]
7179 v_1 := v.Args[1]
7180 v_0 := v.Args[0]
7181
7182
7183
7184 for {
7185 t := auxToType(v.Aux)
7186 ptr := v_0
7187 val := v_1
7188 mem := v_2
7189 if !(t.Size() == 1) {
7190 break
7191 }
7192 v.reset(OpMIPS64MOVBstore)
7193 v.AddArg3(ptr, val, mem)
7194 return true
7195 }
7196
7197
7198
7199 for {
7200 t := auxToType(v.Aux)
7201 ptr := v_0
7202 val := v_1
7203 mem := v_2
7204 if !(t.Size() == 2) {
7205 break
7206 }
7207 v.reset(OpMIPS64MOVHstore)
7208 v.AddArg3(ptr, val, mem)
7209 return true
7210 }
7211
7212
7213
7214 for {
7215 t := auxToType(v.Aux)
7216 ptr := v_0
7217 val := v_1
7218 mem := v_2
7219 if !(t.Size() == 4 && !is32BitFloat(val.Type)) {
7220 break
7221 }
7222 v.reset(OpMIPS64MOVWstore)
7223 v.AddArg3(ptr, val, mem)
7224 return true
7225 }
7226
7227
7228
7229 for {
7230 t := auxToType(v.Aux)
7231 ptr := v_0
7232 val := v_1
7233 mem := v_2
7234 if !(t.Size() == 8 && !is64BitFloat(val.Type)) {
7235 break
7236 }
7237 v.reset(OpMIPS64MOVVstore)
7238 v.AddArg3(ptr, val, mem)
7239 return true
7240 }
7241
7242
7243
7244 for {
7245 t := auxToType(v.Aux)
7246 ptr := v_0
7247 val := v_1
7248 mem := v_2
7249 if !(t.Size() == 4 && is32BitFloat(val.Type)) {
7250 break
7251 }
7252 v.reset(OpMIPS64MOVFstore)
7253 v.AddArg3(ptr, val, mem)
7254 return true
7255 }
7256
7257
7258
7259 for {
7260 t := auxToType(v.Aux)
7261 ptr := v_0
7262 val := v_1
7263 mem := v_2
7264 if !(t.Size() == 8 && is64BitFloat(val.Type)) {
7265 break
7266 }
7267 v.reset(OpMIPS64MOVDstore)
7268 v.AddArg3(ptr, val, mem)
7269 return true
7270 }
7271 return false
7272 }
7273 func rewriteValueMIPS64_OpZero(v *Value) bool {
7274 v_1 := v.Args[1]
7275 v_0 := v.Args[0]
7276 b := v.Block
7277 config := b.Func.Config
7278 typ := &b.Func.Config.Types
7279
7280
7281 for {
7282 if auxIntToInt64(v.AuxInt) != 0 {
7283 break
7284 }
7285 mem := v_1
7286 v.copyOf(mem)
7287 return true
7288 }
7289
7290
7291 for {
7292 if auxIntToInt64(v.AuxInt) != 1 {
7293 break
7294 }
7295 ptr := v_0
7296 mem := v_1
7297 v.reset(OpMIPS64MOVBstore)
7298 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7299 v0.AuxInt = int64ToAuxInt(0)
7300 v.AddArg3(ptr, v0, mem)
7301 return true
7302 }
7303
7304
7305
7306 for {
7307 if auxIntToInt64(v.AuxInt) != 2 {
7308 break
7309 }
7310 t := auxToType(v.Aux)
7311 ptr := v_0
7312 mem := v_1
7313 if !(t.Alignment()%2 == 0) {
7314 break
7315 }
7316 v.reset(OpMIPS64MOVHstore)
7317 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7318 v0.AuxInt = int64ToAuxInt(0)
7319 v.AddArg3(ptr, v0, mem)
7320 return true
7321 }
7322
7323
7324 for {
7325 if auxIntToInt64(v.AuxInt) != 2 {
7326 break
7327 }
7328 ptr := v_0
7329 mem := v_1
7330 v.reset(OpMIPS64MOVBstore)
7331 v.AuxInt = int32ToAuxInt(1)
7332 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7333 v0.AuxInt = int64ToAuxInt(0)
7334 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7335 v1.AuxInt = int32ToAuxInt(0)
7336 v1.AddArg3(ptr, v0, mem)
7337 v.AddArg3(ptr, v0, v1)
7338 return true
7339 }
7340
7341
7342
7343 for {
7344 if auxIntToInt64(v.AuxInt) != 4 {
7345 break
7346 }
7347 t := auxToType(v.Aux)
7348 ptr := v_0
7349 mem := v_1
7350 if !(t.Alignment()%4 == 0) {
7351 break
7352 }
7353 v.reset(OpMIPS64MOVWstore)
7354 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7355 v0.AuxInt = int64ToAuxInt(0)
7356 v.AddArg3(ptr, v0, mem)
7357 return true
7358 }
7359
7360
7361
7362 for {
7363 if auxIntToInt64(v.AuxInt) != 4 {
7364 break
7365 }
7366 t := auxToType(v.Aux)
7367 ptr := v_0
7368 mem := v_1
7369 if !(t.Alignment()%2 == 0) {
7370 break
7371 }
7372 v.reset(OpMIPS64MOVHstore)
7373 v.AuxInt = int32ToAuxInt(2)
7374 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7375 v0.AuxInt = int64ToAuxInt(0)
7376 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7377 v1.AuxInt = int32ToAuxInt(0)
7378 v1.AddArg3(ptr, v0, mem)
7379 v.AddArg3(ptr, v0, v1)
7380 return true
7381 }
7382
7383
7384 for {
7385 if auxIntToInt64(v.AuxInt) != 4 {
7386 break
7387 }
7388 ptr := v_0
7389 mem := v_1
7390 v.reset(OpMIPS64MOVBstore)
7391 v.AuxInt = int32ToAuxInt(3)
7392 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7393 v0.AuxInt = int64ToAuxInt(0)
7394 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7395 v1.AuxInt = int32ToAuxInt(2)
7396 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7397 v2.AuxInt = int32ToAuxInt(1)
7398 v3 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7399 v3.AuxInt = int32ToAuxInt(0)
7400 v3.AddArg3(ptr, v0, mem)
7401 v2.AddArg3(ptr, v0, v3)
7402 v1.AddArg3(ptr, v0, v2)
7403 v.AddArg3(ptr, v0, v1)
7404 return true
7405 }
7406
7407
7408
7409 for {
7410 if auxIntToInt64(v.AuxInt) != 8 {
7411 break
7412 }
7413 t := auxToType(v.Aux)
7414 ptr := v_0
7415 mem := v_1
7416 if !(t.Alignment()%8 == 0) {
7417 break
7418 }
7419 v.reset(OpMIPS64MOVVstore)
7420 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7421 v0.AuxInt = int64ToAuxInt(0)
7422 v.AddArg3(ptr, v0, mem)
7423 return true
7424 }
7425
7426
7427
7428 for {
7429 if auxIntToInt64(v.AuxInt) != 8 {
7430 break
7431 }
7432 t := auxToType(v.Aux)
7433 ptr := v_0
7434 mem := v_1
7435 if !(t.Alignment()%4 == 0) {
7436 break
7437 }
7438 v.reset(OpMIPS64MOVWstore)
7439 v.AuxInt = int32ToAuxInt(4)
7440 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7441 v0.AuxInt = int64ToAuxInt(0)
7442 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7443 v1.AuxInt = int32ToAuxInt(0)
7444 v1.AddArg3(ptr, v0, mem)
7445 v.AddArg3(ptr, v0, v1)
7446 return true
7447 }
7448
7449
7450
7451 for {
7452 if auxIntToInt64(v.AuxInt) != 8 {
7453 break
7454 }
7455 t := auxToType(v.Aux)
7456 ptr := v_0
7457 mem := v_1
7458 if !(t.Alignment()%2 == 0) {
7459 break
7460 }
7461 v.reset(OpMIPS64MOVHstore)
7462 v.AuxInt = int32ToAuxInt(6)
7463 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7464 v0.AuxInt = int64ToAuxInt(0)
7465 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7466 v1.AuxInt = int32ToAuxInt(4)
7467 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7468 v2.AuxInt = int32ToAuxInt(2)
7469 v3 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7470 v3.AuxInt = int32ToAuxInt(0)
7471 v3.AddArg3(ptr, v0, mem)
7472 v2.AddArg3(ptr, v0, v3)
7473 v1.AddArg3(ptr, v0, v2)
7474 v.AddArg3(ptr, v0, v1)
7475 return true
7476 }
7477
7478
7479 for {
7480 if auxIntToInt64(v.AuxInt) != 3 {
7481 break
7482 }
7483 ptr := v_0
7484 mem := v_1
7485 v.reset(OpMIPS64MOVBstore)
7486 v.AuxInt = int32ToAuxInt(2)
7487 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7488 v0.AuxInt = int64ToAuxInt(0)
7489 v1 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7490 v1.AuxInt = int32ToAuxInt(1)
7491 v2 := b.NewValue0(v.Pos, OpMIPS64MOVBstore, types.TypeMem)
7492 v2.AuxInt = int32ToAuxInt(0)
7493 v2.AddArg3(ptr, v0, mem)
7494 v1.AddArg3(ptr, v0, v2)
7495 v.AddArg3(ptr, v0, v1)
7496 return true
7497 }
7498
7499
7500
7501 for {
7502 if auxIntToInt64(v.AuxInt) != 6 {
7503 break
7504 }
7505 t := auxToType(v.Aux)
7506 ptr := v_0
7507 mem := v_1
7508 if !(t.Alignment()%2 == 0) {
7509 break
7510 }
7511 v.reset(OpMIPS64MOVHstore)
7512 v.AuxInt = int32ToAuxInt(4)
7513 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7514 v0.AuxInt = int64ToAuxInt(0)
7515 v1 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7516 v1.AuxInt = int32ToAuxInt(2)
7517 v2 := b.NewValue0(v.Pos, OpMIPS64MOVHstore, types.TypeMem)
7518 v2.AuxInt = int32ToAuxInt(0)
7519 v2.AddArg3(ptr, v0, mem)
7520 v1.AddArg3(ptr, v0, v2)
7521 v.AddArg3(ptr, v0, v1)
7522 return true
7523 }
7524
7525
7526
7527 for {
7528 if auxIntToInt64(v.AuxInt) != 12 {
7529 break
7530 }
7531 t := auxToType(v.Aux)
7532 ptr := v_0
7533 mem := v_1
7534 if !(t.Alignment()%4 == 0) {
7535 break
7536 }
7537 v.reset(OpMIPS64MOVWstore)
7538 v.AuxInt = int32ToAuxInt(8)
7539 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7540 v0.AuxInt = int64ToAuxInt(0)
7541 v1 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7542 v1.AuxInt = int32ToAuxInt(4)
7543 v2 := b.NewValue0(v.Pos, OpMIPS64MOVWstore, types.TypeMem)
7544 v2.AuxInt = int32ToAuxInt(0)
7545 v2.AddArg3(ptr, v0, mem)
7546 v1.AddArg3(ptr, v0, v2)
7547 v.AddArg3(ptr, v0, v1)
7548 return true
7549 }
7550
7551
7552
7553 for {
7554 if auxIntToInt64(v.AuxInt) != 16 {
7555 break
7556 }
7557 t := auxToType(v.Aux)
7558 ptr := v_0
7559 mem := v_1
7560 if !(t.Alignment()%8 == 0) {
7561 break
7562 }
7563 v.reset(OpMIPS64MOVVstore)
7564 v.AuxInt = int32ToAuxInt(8)
7565 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7566 v0.AuxInt = int64ToAuxInt(0)
7567 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7568 v1.AuxInt = int32ToAuxInt(0)
7569 v1.AddArg3(ptr, v0, mem)
7570 v.AddArg3(ptr, v0, v1)
7571 return true
7572 }
7573
7574
7575
7576 for {
7577 if auxIntToInt64(v.AuxInt) != 24 {
7578 break
7579 }
7580 t := auxToType(v.Aux)
7581 ptr := v_0
7582 mem := v_1
7583 if !(t.Alignment()%8 == 0) {
7584 break
7585 }
7586 v.reset(OpMIPS64MOVVstore)
7587 v.AuxInt = int32ToAuxInt(16)
7588 v0 := b.NewValue0(v.Pos, OpMIPS64MOVVconst, typ.UInt64)
7589 v0.AuxInt = int64ToAuxInt(0)
7590 v1 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7591 v1.AuxInt = int32ToAuxInt(8)
7592 v2 := b.NewValue0(v.Pos, OpMIPS64MOVVstore, types.TypeMem)
7593 v2.AuxInt = int32ToAuxInt(0)
7594 v2.AddArg3(ptr, v0, mem)
7595 v1.AddArg3(ptr, v0, v2)
7596 v.AddArg3(ptr, v0, v1)
7597 return true
7598 }
7599
7600
7601
7602 for {
7603 s := auxIntToInt64(v.AuxInt)
7604 t := auxToType(v.Aux)
7605 ptr := v_0
7606 mem := v_1
7607 if !(s%8 == 0 && s > 24 && s <= 8*128 && t.Alignment()%8 == 0 && !config.noDuffDevice) {
7608 break
7609 }
7610 v.reset(OpMIPS64DUFFZERO)
7611 v.AuxInt = int64ToAuxInt(8 * (128 - s/8))
7612 v.AddArg2(ptr, mem)
7613 return true
7614 }
7615
7616
7617
7618 for {
7619 s := auxIntToInt64(v.AuxInt)
7620 t := auxToType(v.Aux)
7621 ptr := v_0
7622 mem := v_1
7623 if !((s > 8*128 || config.noDuffDevice) || t.Alignment()%8 != 0) {
7624 break
7625 }
7626 v.reset(OpMIPS64LoweredZero)
7627 v.AuxInt = int64ToAuxInt(t.Alignment())
7628 v0 := b.NewValue0(v.Pos, OpMIPS64ADDVconst, ptr.Type)
7629 v0.AuxInt = int64ToAuxInt(s - moveSize(t.Alignment(), config))
7630 v0.AddArg(ptr)
7631 v.AddArg3(ptr, v0, mem)
7632 return true
7633 }
7634 return false
7635 }
7636 func rewriteBlockMIPS64(b *Block) bool {
7637 switch b.Kind {
7638 case BlockMIPS64EQ:
7639
7640
7641 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
7642 v_0 := b.Controls[0]
7643 cmp := v_0.Args[0]
7644 b.resetWithControl(BlockMIPS64FPF, cmp)
7645 return true
7646 }
7647
7648
7649 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
7650 v_0 := b.Controls[0]
7651 cmp := v_0.Args[0]
7652 b.resetWithControl(BlockMIPS64FPT, cmp)
7653 return true
7654 }
7655
7656
7657 for b.Controls[0].Op == OpMIPS64XORconst {
7658 v_0 := b.Controls[0]
7659 if auxIntToInt64(v_0.AuxInt) != 1 {
7660 break
7661 }
7662 cmp := v_0.Args[0]
7663 if cmp.Op != OpMIPS64SGT {
7664 break
7665 }
7666 b.resetWithControl(BlockMIPS64NE, cmp)
7667 return true
7668 }
7669
7670
7671 for b.Controls[0].Op == OpMIPS64XORconst {
7672 v_0 := b.Controls[0]
7673 if auxIntToInt64(v_0.AuxInt) != 1 {
7674 break
7675 }
7676 cmp := v_0.Args[0]
7677 if cmp.Op != OpMIPS64SGTU {
7678 break
7679 }
7680 b.resetWithControl(BlockMIPS64NE, cmp)
7681 return true
7682 }
7683
7684
7685 for b.Controls[0].Op == OpMIPS64XORconst {
7686 v_0 := b.Controls[0]
7687 if auxIntToInt64(v_0.AuxInt) != 1 {
7688 break
7689 }
7690 cmp := v_0.Args[0]
7691 if cmp.Op != OpMIPS64SGTconst {
7692 break
7693 }
7694 b.resetWithControl(BlockMIPS64NE, cmp)
7695 return true
7696 }
7697
7698
7699 for b.Controls[0].Op == OpMIPS64XORconst {
7700 v_0 := b.Controls[0]
7701 if auxIntToInt64(v_0.AuxInt) != 1 {
7702 break
7703 }
7704 cmp := v_0.Args[0]
7705 if cmp.Op != OpMIPS64SGTUconst {
7706 break
7707 }
7708 b.resetWithControl(BlockMIPS64NE, cmp)
7709 return true
7710 }
7711
7712
7713 for b.Controls[0].Op == OpMIPS64SGTUconst {
7714 v_0 := b.Controls[0]
7715 if auxIntToInt64(v_0.AuxInt) != 1 {
7716 break
7717 }
7718 x := v_0.Args[0]
7719 b.resetWithControl(BlockMIPS64NE, x)
7720 return true
7721 }
7722
7723
7724 for b.Controls[0].Op == OpMIPS64SGTU {
7725 v_0 := b.Controls[0]
7726 _ = v_0.Args[1]
7727 x := v_0.Args[0]
7728 v_0_1 := v_0.Args[1]
7729 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7730 break
7731 }
7732 b.resetWithControl(BlockMIPS64EQ, x)
7733 return true
7734 }
7735
7736
7737 for b.Controls[0].Op == OpMIPS64SGTconst {
7738 v_0 := b.Controls[0]
7739 if auxIntToInt64(v_0.AuxInt) != 0 {
7740 break
7741 }
7742 x := v_0.Args[0]
7743 b.resetWithControl(BlockMIPS64GEZ, x)
7744 return true
7745 }
7746
7747
7748 for b.Controls[0].Op == OpMIPS64SGT {
7749 v_0 := b.Controls[0]
7750 _ = v_0.Args[1]
7751 x := v_0.Args[0]
7752 v_0_1 := v_0.Args[1]
7753 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7754 break
7755 }
7756 b.resetWithControl(BlockMIPS64LEZ, x)
7757 return true
7758 }
7759
7760
7761 for b.Controls[0].Op == OpMIPS64MOVVconst {
7762 v_0 := b.Controls[0]
7763 if auxIntToInt64(v_0.AuxInt) != 0 {
7764 break
7765 }
7766 b.Reset(BlockFirst)
7767 return true
7768 }
7769
7770
7771
7772 for b.Controls[0].Op == OpMIPS64MOVVconst {
7773 v_0 := b.Controls[0]
7774 c := auxIntToInt64(v_0.AuxInt)
7775 if !(c != 0) {
7776 break
7777 }
7778 b.Reset(BlockFirst)
7779 b.swapSuccessors()
7780 return true
7781 }
7782 case BlockMIPS64GEZ:
7783
7784
7785
7786 for b.Controls[0].Op == OpMIPS64MOVVconst {
7787 v_0 := b.Controls[0]
7788 c := auxIntToInt64(v_0.AuxInt)
7789 if !(c >= 0) {
7790 break
7791 }
7792 b.Reset(BlockFirst)
7793 return true
7794 }
7795
7796
7797
7798 for b.Controls[0].Op == OpMIPS64MOVVconst {
7799 v_0 := b.Controls[0]
7800 c := auxIntToInt64(v_0.AuxInt)
7801 if !(c < 0) {
7802 break
7803 }
7804 b.Reset(BlockFirst)
7805 b.swapSuccessors()
7806 return true
7807 }
7808 case BlockMIPS64GTZ:
7809
7810
7811
7812 for b.Controls[0].Op == OpMIPS64MOVVconst {
7813 v_0 := b.Controls[0]
7814 c := auxIntToInt64(v_0.AuxInt)
7815 if !(c > 0) {
7816 break
7817 }
7818 b.Reset(BlockFirst)
7819 return true
7820 }
7821
7822
7823
7824 for b.Controls[0].Op == OpMIPS64MOVVconst {
7825 v_0 := b.Controls[0]
7826 c := auxIntToInt64(v_0.AuxInt)
7827 if !(c <= 0) {
7828 break
7829 }
7830 b.Reset(BlockFirst)
7831 b.swapSuccessors()
7832 return true
7833 }
7834 case BlockIf:
7835
7836
7837 for {
7838 cond := b.Controls[0]
7839 b.resetWithControl(BlockMIPS64NE, cond)
7840 return true
7841 }
7842 case BlockMIPS64LEZ:
7843
7844
7845
7846 for b.Controls[0].Op == OpMIPS64MOVVconst {
7847 v_0 := b.Controls[0]
7848 c := auxIntToInt64(v_0.AuxInt)
7849 if !(c <= 0) {
7850 break
7851 }
7852 b.Reset(BlockFirst)
7853 return true
7854 }
7855
7856
7857
7858 for b.Controls[0].Op == OpMIPS64MOVVconst {
7859 v_0 := b.Controls[0]
7860 c := auxIntToInt64(v_0.AuxInt)
7861 if !(c > 0) {
7862 break
7863 }
7864 b.Reset(BlockFirst)
7865 b.swapSuccessors()
7866 return true
7867 }
7868 case BlockMIPS64LTZ:
7869
7870
7871
7872 for b.Controls[0].Op == OpMIPS64MOVVconst {
7873 v_0 := b.Controls[0]
7874 c := auxIntToInt64(v_0.AuxInt)
7875 if !(c < 0) {
7876 break
7877 }
7878 b.Reset(BlockFirst)
7879 return true
7880 }
7881
7882
7883
7884 for b.Controls[0].Op == OpMIPS64MOVVconst {
7885 v_0 := b.Controls[0]
7886 c := auxIntToInt64(v_0.AuxInt)
7887 if !(c >= 0) {
7888 break
7889 }
7890 b.Reset(BlockFirst)
7891 b.swapSuccessors()
7892 return true
7893 }
7894 case BlockMIPS64NE:
7895
7896
7897 for b.Controls[0].Op == OpMIPS64FPFlagTrue {
7898 v_0 := b.Controls[0]
7899 cmp := v_0.Args[0]
7900 b.resetWithControl(BlockMIPS64FPT, cmp)
7901 return true
7902 }
7903
7904
7905 for b.Controls[0].Op == OpMIPS64FPFlagFalse {
7906 v_0 := b.Controls[0]
7907 cmp := v_0.Args[0]
7908 b.resetWithControl(BlockMIPS64FPF, cmp)
7909 return true
7910 }
7911
7912
7913 for b.Controls[0].Op == OpMIPS64XORconst {
7914 v_0 := b.Controls[0]
7915 if auxIntToInt64(v_0.AuxInt) != 1 {
7916 break
7917 }
7918 cmp := v_0.Args[0]
7919 if cmp.Op != OpMIPS64SGT {
7920 break
7921 }
7922 b.resetWithControl(BlockMIPS64EQ, cmp)
7923 return true
7924 }
7925
7926
7927 for b.Controls[0].Op == OpMIPS64XORconst {
7928 v_0 := b.Controls[0]
7929 if auxIntToInt64(v_0.AuxInt) != 1 {
7930 break
7931 }
7932 cmp := v_0.Args[0]
7933 if cmp.Op != OpMIPS64SGTU {
7934 break
7935 }
7936 b.resetWithControl(BlockMIPS64EQ, cmp)
7937 return true
7938 }
7939
7940
7941 for b.Controls[0].Op == OpMIPS64XORconst {
7942 v_0 := b.Controls[0]
7943 if auxIntToInt64(v_0.AuxInt) != 1 {
7944 break
7945 }
7946 cmp := v_0.Args[0]
7947 if cmp.Op != OpMIPS64SGTconst {
7948 break
7949 }
7950 b.resetWithControl(BlockMIPS64EQ, cmp)
7951 return true
7952 }
7953
7954
7955 for b.Controls[0].Op == OpMIPS64XORconst {
7956 v_0 := b.Controls[0]
7957 if auxIntToInt64(v_0.AuxInt) != 1 {
7958 break
7959 }
7960 cmp := v_0.Args[0]
7961 if cmp.Op != OpMIPS64SGTUconst {
7962 break
7963 }
7964 b.resetWithControl(BlockMIPS64EQ, cmp)
7965 return true
7966 }
7967
7968
7969 for b.Controls[0].Op == OpMIPS64SGTUconst {
7970 v_0 := b.Controls[0]
7971 if auxIntToInt64(v_0.AuxInt) != 1 {
7972 break
7973 }
7974 x := v_0.Args[0]
7975 b.resetWithControl(BlockMIPS64EQ, x)
7976 return true
7977 }
7978
7979
7980 for b.Controls[0].Op == OpMIPS64SGTU {
7981 v_0 := b.Controls[0]
7982 _ = v_0.Args[1]
7983 x := v_0.Args[0]
7984 v_0_1 := v_0.Args[1]
7985 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
7986 break
7987 }
7988 b.resetWithControl(BlockMIPS64NE, x)
7989 return true
7990 }
7991
7992
7993 for b.Controls[0].Op == OpMIPS64SGTconst {
7994 v_0 := b.Controls[0]
7995 if auxIntToInt64(v_0.AuxInt) != 0 {
7996 break
7997 }
7998 x := v_0.Args[0]
7999 b.resetWithControl(BlockMIPS64LTZ, x)
8000 return true
8001 }
8002
8003
8004 for b.Controls[0].Op == OpMIPS64SGT {
8005 v_0 := b.Controls[0]
8006 _ = v_0.Args[1]
8007 x := v_0.Args[0]
8008 v_0_1 := v_0.Args[1]
8009 if v_0_1.Op != OpMIPS64MOVVconst || auxIntToInt64(v_0_1.AuxInt) != 0 {
8010 break
8011 }
8012 b.resetWithControl(BlockMIPS64GTZ, x)
8013 return true
8014 }
8015
8016
8017 for b.Controls[0].Op == OpMIPS64MOVVconst {
8018 v_0 := b.Controls[0]
8019 if auxIntToInt64(v_0.AuxInt) != 0 {
8020 break
8021 }
8022 b.Reset(BlockFirst)
8023 b.swapSuccessors()
8024 return true
8025 }
8026
8027
8028
8029 for b.Controls[0].Op == OpMIPS64MOVVconst {
8030 v_0 := b.Controls[0]
8031 c := auxIntToInt64(v_0.AuxInt)
8032 if !(c != 0) {
8033 break
8034 }
8035 b.Reset(BlockFirst)
8036 return true
8037 }
8038 }
8039 return false
8040 }
8041
View as plain text