1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/logopt"
9 "cmd/compile/internal/types"
10 "cmd/internal/obj"
11 "cmd/internal/obj/s390x"
12 "cmd/internal/objabi"
13 "cmd/internal/src"
14 "encoding/binary"
15 "fmt"
16 "io"
17 "math"
18 "math/bits"
19 "os"
20 "path/filepath"
21 )
22
23 type deadValueChoice bool
24
25 const (
26 leaveDeadValues deadValueChoice = false
27 removeDeadValues = true
28 )
29
30
31 func applyRewrite(f *Func, rb blockRewriter, rv valueRewriter, deadcode deadValueChoice) {
32
33 pendingLines := f.cachedLineStarts
34 pendingLines.clear()
35 debug := f.pass.debug
36 if debug > 1 {
37 fmt.Printf("%s: rewriting for %s\n", f.pass.name, f.Name)
38 }
39 for {
40 change := false
41 for _, b := range f.Blocks {
42 var b0 *Block
43 if debug > 1 {
44 b0 = new(Block)
45 *b0 = *b
46 b0.Succs = append([]Edge{}, b.Succs...)
47 }
48 for i, c := range b.ControlValues() {
49 for c.Op == OpCopy {
50 c = c.Args[0]
51 b.ReplaceControl(i, c)
52 }
53 }
54 if rb(b) {
55 change = true
56 if debug > 1 {
57 fmt.Printf("rewriting %s -> %s\n", b0.LongString(), b.LongString())
58 }
59 }
60 for j, v := range b.Values {
61 var v0 *Value
62 if debug > 1 {
63 v0 = new(Value)
64 *v0 = *v
65 v0.Args = append([]*Value{}, v.Args...)
66 }
67 if v.Uses == 0 && v.removeable() {
68 if v.Op != OpInvalid && deadcode == removeDeadValues {
69
70
71
72
73 v.reset(OpInvalid)
74 change = true
75 }
76
77 continue
78 }
79
80 vchange := phielimValue(v)
81 if vchange && debug > 1 {
82 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
83 }
84
85
86
87
88
89
90
91
92 for i, a := range v.Args {
93 if a.Op != OpCopy {
94 continue
95 }
96 aa := copySource(a)
97 v.SetArg(i, aa)
98
99
100
101
102
103 if a.Pos.IsStmt() == src.PosIsStmt {
104 if aa.Block == a.Block && aa.Pos.Line() == a.Pos.Line() && aa.Pos.IsStmt() != src.PosNotStmt {
105 aa.Pos = aa.Pos.WithIsStmt()
106 } else if v.Block == a.Block && v.Pos.Line() == a.Pos.Line() && v.Pos.IsStmt() != src.PosNotStmt {
107 v.Pos = v.Pos.WithIsStmt()
108 } else {
109
110
111
112
113 pendingLines.set(a.Pos, int32(a.Block.ID))
114 }
115 a.Pos = a.Pos.WithNotStmt()
116 }
117 vchange = true
118 for a.Uses == 0 {
119 b := a.Args[0]
120 a.reset(OpInvalid)
121 a = b
122 }
123 }
124 if vchange && debug > 1 {
125 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
126 }
127
128
129 if rv(v) {
130 vchange = true
131
132 if v.Pos.IsStmt() == src.PosIsStmt {
133 if k := nextGoodStatementIndex(v, j, b); k != j {
134 v.Pos = v.Pos.WithNotStmt()
135 b.Values[k].Pos = b.Values[k].Pos.WithIsStmt()
136 }
137 }
138 }
139
140 change = change || vchange
141 if vchange && debug > 1 {
142 fmt.Printf("rewriting %s -> %s\n", v0.LongString(), v.LongString())
143 }
144 }
145 }
146 if !change {
147 break
148 }
149 }
150
151 for _, b := range f.Blocks {
152 j := 0
153 for i, v := range b.Values {
154 vl := v.Pos
155 if v.Op == OpInvalid {
156 if v.Pos.IsStmt() == src.PosIsStmt {
157 pendingLines.set(vl, int32(b.ID))
158 }
159 f.freeValue(v)
160 continue
161 }
162 if v.Pos.IsStmt() != src.PosNotStmt && pendingLines.get(vl) == int32(b.ID) {
163 pendingLines.remove(vl)
164 v.Pos = v.Pos.WithIsStmt()
165 }
166 if i != j {
167 b.Values[j] = v
168 }
169 j++
170 }
171 if pendingLines.get(b.Pos) == int32(b.ID) {
172 b.Pos = b.Pos.WithIsStmt()
173 pendingLines.remove(b.Pos)
174 }
175 b.truncateValues(j)
176 }
177 }
178
179
180
181 func is64BitFloat(t *types.Type) bool {
182 return t.Size() == 8 && t.IsFloat()
183 }
184
185 func is32BitFloat(t *types.Type) bool {
186 return t.Size() == 4 && t.IsFloat()
187 }
188
189 func is64BitInt(t *types.Type) bool {
190 return t.Size() == 8 && t.IsInteger()
191 }
192
193 func is32BitInt(t *types.Type) bool {
194 return t.Size() == 4 && t.IsInteger()
195 }
196
197 func is16BitInt(t *types.Type) bool {
198 return t.Size() == 2 && t.IsInteger()
199 }
200
201 func is8BitInt(t *types.Type) bool {
202 return t.Size() == 1 && t.IsInteger()
203 }
204
205 func isPtr(t *types.Type) bool {
206 return t.IsPtrShaped()
207 }
208
209 func isSigned(t *types.Type) bool {
210 return t.IsSigned()
211 }
212
213
214
215 func mergeSym(x, y Sym) Sym {
216 if x == nil {
217 return y
218 }
219 if y == nil {
220 return x
221 }
222 panic(fmt.Sprintf("mergeSym with two non-nil syms %v %v", x, y))
223 }
224
225 func canMergeSym(x, y Sym) bool {
226 return x == nil || y == nil
227 }
228
229
230
231
232
233 func canMergeLoadClobber(target, load, x *Value) bool {
234
235
236
237
238
239
240 if x.Uses != 1 {
241 return false
242 }
243 loopnest := x.Block.Func.loopnest()
244 loopnest.calculateDepths()
245 if loopnest.depth(target.Block.ID) > loopnest.depth(x.Block.ID) {
246 return false
247 }
248 return canMergeLoad(target, load)
249 }
250
251
252
253 func canMergeLoad(target, load *Value) bool {
254 if target.Block.ID != load.Block.ID {
255
256 return false
257 }
258
259
260
261 if load.Uses != 1 {
262 return false
263 }
264
265 mem := load.MemoryArg()
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282 var args []*Value
283 for _, a := range target.Args {
284 if a != load && a.Block.ID == target.Block.ID {
285 args = append(args, a)
286 }
287 }
288
289
290
291 var memPreds map[*Value]bool
292 for i := 0; len(args) > 0; i++ {
293 const limit = 100
294 if i >= limit {
295
296 return false
297 }
298 v := args[len(args)-1]
299 args = args[:len(args)-1]
300 if target.Block.ID != v.Block.ID {
301
302
303 continue
304 }
305 if v.Op == OpPhi {
306
307
308
309 continue
310 }
311 if v.Type.IsTuple() && v.Type.FieldType(1).IsMemory() {
312
313
314 return false
315 }
316 if v.Op.SymEffect()&SymAddr != 0 {
317
318
319
320
321
322
323
324
325
326
327
328 return false
329 }
330 if v.Type.IsMemory() {
331 if memPreds == nil {
332
333
334
335 memPreds = make(map[*Value]bool)
336 m := mem
337 const limit = 50
338 for i := 0; i < limit; i++ {
339 if m.Op == OpPhi {
340
341
342 break
343 }
344 if m.Block.ID != target.Block.ID {
345 break
346 }
347 if !m.Type.IsMemory() {
348 break
349 }
350 memPreds[m] = true
351 if len(m.Args) == 0 {
352 break
353 }
354 m = m.MemoryArg()
355 }
356 }
357
358
359
360
361
362
363
364
365
366 if memPreds[v] {
367 continue
368 }
369 return false
370 }
371 if len(v.Args) > 0 && v.Args[len(v.Args)-1] == mem {
372
373
374 continue
375 }
376 for _, a := range v.Args {
377 if target.Block.ID == a.Block.ID {
378 args = append(args, a)
379 }
380 }
381 }
382
383 return true
384 }
385
386
387 func isSameCall(sym interface{}, name string) bool {
388 fn := sym.(*AuxCall).Fn
389 return fn != nil && fn.String() == name
390 }
391
392
393 func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) }
394 func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) }
395 func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) }
396 func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) }
397
398
399 func ntz64(x int64) int { return bits.TrailingZeros64(uint64(x)) }
400 func ntz32(x int32) int { return bits.TrailingZeros32(uint32(x)) }
401 func ntz16(x int16) int { return bits.TrailingZeros16(uint16(x)) }
402 func ntz8(x int8) int { return bits.TrailingZeros8(uint8(x)) }
403
404 func oneBit(x int64) bool { return x&(x-1) == 0 && x != 0 }
405 func oneBit8(x int8) bool { return x&(x-1) == 0 && x != 0 }
406 func oneBit16(x int16) bool { return x&(x-1) == 0 && x != 0 }
407 func oneBit32(x int32) bool { return x&(x-1) == 0 && x != 0 }
408 func oneBit64(x int64) bool { return x&(x-1) == 0 && x != 0 }
409
410
411 func nto(x int64) int64 {
412 return int64(ntz64(^x))
413 }
414
415
416
417 func log8(n int8) int64 {
418 return int64(bits.Len8(uint8(n))) - 1
419 }
420 func log16(n int16) int64 {
421 return int64(bits.Len16(uint16(n))) - 1
422 }
423 func log32(n int32) int64 {
424 return int64(bits.Len32(uint32(n))) - 1
425 }
426 func log64(n int64) int64 {
427 return int64(bits.Len64(uint64(n))) - 1
428 }
429
430
431
432 func log2uint32(n int64) int64 {
433 return int64(bits.Len32(uint32(n))) - 1
434 }
435
436
437 func isPowerOfTwo8(n int8) bool {
438 return n > 0 && n&(n-1) == 0
439 }
440 func isPowerOfTwo16(n int16) bool {
441 return n > 0 && n&(n-1) == 0
442 }
443 func isPowerOfTwo32(n int32) bool {
444 return n > 0 && n&(n-1) == 0
445 }
446 func isPowerOfTwo64(n int64) bool {
447 return n > 0 && n&(n-1) == 0
448 }
449
450
451 func isUint64PowerOfTwo(in int64) bool {
452 n := uint64(in)
453 return n > 0 && n&(n-1) == 0
454 }
455
456
457 func isUint32PowerOfTwo(in int64) bool {
458 n := uint64(uint32(in))
459 return n > 0 && n&(n-1) == 0
460 }
461
462
463 func is32Bit(n int64) bool {
464 return n == int64(int32(n))
465 }
466
467
468 func is16Bit(n int64) bool {
469 return n == int64(int16(n))
470 }
471
472
473 func is8Bit(n int64) bool {
474 return n == int64(int8(n))
475 }
476
477
478 func isU8Bit(n int64) bool {
479 return n == int64(uint8(n))
480 }
481
482
483 func isU12Bit(n int64) bool {
484 return 0 <= n && n < (1<<12)
485 }
486
487
488 func isU16Bit(n int64) bool {
489 return n == int64(uint16(n))
490 }
491
492
493 func isU32Bit(n int64) bool {
494 return n == int64(uint32(n))
495 }
496
497
498 func is20Bit(n int64) bool {
499 return -(1<<19) <= n && n < (1<<19)
500 }
501
502
503 func b2i(b bool) int64 {
504 if b {
505 return 1
506 }
507 return 0
508 }
509
510
511 func b2i32(b bool) int32 {
512 if b {
513 return 1
514 }
515 return 0
516 }
517
518
519
520 func shiftIsBounded(v *Value) bool {
521 return v.AuxInt != 0
522 }
523
524
525
526 func truncate64Fto32F(f float64) float32 {
527 if !isExactFloat32(f) {
528 panic("truncate64Fto32F: truncation is not exact")
529 }
530 if !math.IsNaN(f) {
531 return float32(f)
532 }
533
534
535 b := math.Float64bits(f)
536 m := b & ((1 << 52) - 1)
537
538 r := uint32(((b >> 32) & (1 << 31)) | 0x7f800000 | (m >> (52 - 23)))
539 return math.Float32frombits(r)
540 }
541
542
543
544 func extend32Fto64F(f float32) float64 {
545 if !math.IsNaN(float64(f)) {
546 return float64(f)
547 }
548
549
550 b := uint64(math.Float32bits(f))
551
552 r := ((b << 32) & (1 << 63)) | (0x7ff << 52) | ((b & 0x7fffff) << (52 - 23))
553 return math.Float64frombits(r)
554 }
555
556
557 func DivisionNeedsFixUp(v *Value) bool {
558 return v.AuxInt == 0
559 }
560
561
562 func auxFrom64F(f float64) int64 {
563 if f != f {
564 panic("can't encode a NaN in AuxInt field")
565 }
566 return int64(math.Float64bits(f))
567 }
568
569
570 func auxFrom32F(f float32) int64 {
571 if f != f {
572 panic("can't encode a NaN in AuxInt field")
573 }
574 return int64(math.Float64bits(extend32Fto64F(f)))
575 }
576
577
578 func auxTo32F(i int64) float32 {
579 return truncate64Fto32F(math.Float64frombits(uint64(i)))
580 }
581
582
583 func auxTo64F(i int64) float64 {
584 return math.Float64frombits(uint64(i))
585 }
586
587 func auxIntToBool(i int64) bool {
588 if i == 0 {
589 return false
590 }
591 return true
592 }
593 func auxIntToInt8(i int64) int8 {
594 return int8(i)
595 }
596 func auxIntToInt16(i int64) int16 {
597 return int16(i)
598 }
599 func auxIntToInt32(i int64) int32 {
600 return int32(i)
601 }
602 func auxIntToInt64(i int64) int64 {
603 return i
604 }
605 func auxIntToUint8(i int64) uint8 {
606 return uint8(i)
607 }
608 func auxIntToFloat32(i int64) float32 {
609 return float32(math.Float64frombits(uint64(i)))
610 }
611 func auxIntToFloat64(i int64) float64 {
612 return math.Float64frombits(uint64(i))
613 }
614 func auxIntToValAndOff(i int64) ValAndOff {
615 return ValAndOff(i)
616 }
617 func auxIntToArm64BitField(i int64) arm64BitField {
618 return arm64BitField(i)
619 }
620 func auxIntToInt128(x int64) int128 {
621 if x != 0 {
622 panic("nonzero int128 not allowed")
623 }
624 return 0
625 }
626 func auxIntToFlagConstant(x int64) flagConstant {
627 return flagConstant(x)
628 }
629
630 func auxIntToOp(cc int64) Op {
631 return Op(cc)
632 }
633
634 func boolToAuxInt(b bool) int64 {
635 if b {
636 return 1
637 }
638 return 0
639 }
640 func int8ToAuxInt(i int8) int64 {
641 return int64(i)
642 }
643 func int16ToAuxInt(i int16) int64 {
644 return int64(i)
645 }
646 func int32ToAuxInt(i int32) int64 {
647 return int64(i)
648 }
649 func int64ToAuxInt(i int64) int64 {
650 return int64(i)
651 }
652 func uint8ToAuxInt(i uint8) int64 {
653 return int64(int8(i))
654 }
655 func float32ToAuxInt(f float32) int64 {
656 return int64(math.Float64bits(float64(f)))
657 }
658 func float64ToAuxInt(f float64) int64 {
659 return int64(math.Float64bits(f))
660 }
661 func valAndOffToAuxInt(v ValAndOff) int64 {
662 return int64(v)
663 }
664 func arm64BitFieldToAuxInt(v arm64BitField) int64 {
665 return int64(v)
666 }
667 func int128ToAuxInt(x int128) int64 {
668 if x != 0 {
669 panic("nonzero int128 not allowed")
670 }
671 return 0
672 }
673 func flagConstantToAuxInt(x flagConstant) int64 {
674 return int64(x)
675 }
676
677 func opToAuxInt(o Op) int64 {
678 return int64(o)
679 }
680
681 func auxToString(i interface{}) string {
682 return i.(string)
683 }
684 func auxToSym(i interface{}) Sym {
685
686 s, _ := i.(Sym)
687 return s
688 }
689 func auxToType(i interface{}) *types.Type {
690 return i.(*types.Type)
691 }
692 func auxToCall(i interface{}) *AuxCall {
693 return i.(*AuxCall)
694 }
695 func auxToS390xCCMask(i interface{}) s390x.CCMask {
696 return i.(s390x.CCMask)
697 }
698 func auxToS390xRotateParams(i interface{}) s390x.RotateParams {
699 return i.(s390x.RotateParams)
700 }
701
702 func stringToAux(s string) interface{} {
703 return s
704 }
705 func symToAux(s Sym) interface{} {
706 return s
707 }
708 func callToAux(s *AuxCall) interface{} {
709 return s
710 }
711 func typeToAux(t *types.Type) interface{} {
712 return t
713 }
714 func s390xCCMaskToAux(c s390x.CCMask) interface{} {
715 return c
716 }
717 func s390xRotateParamsToAux(r s390x.RotateParams) interface{} {
718 return r
719 }
720
721
722 func uaddOvf(a, b int64) bool {
723 return uint64(a)+uint64(b) < uint64(a)
724 }
725
726
727
728 func devirt(v *Value, aux interface{}, sym Sym, offset int64) *AuxCall {
729 f := v.Block.Func
730 n, ok := sym.(*obj.LSym)
731 if !ok {
732 return nil
733 }
734 lsym := f.fe.DerefItab(n, offset)
735 if f.pass.debug > 0 {
736 if lsym != nil {
737 f.Warnl(v.Pos, "de-virtualizing call")
738 } else {
739 f.Warnl(v.Pos, "couldn't de-virtualize call")
740 }
741 }
742 if lsym == nil {
743 return nil
744 }
745 va := aux.(*AuxCall)
746 return StaticAuxCall(lsym, va.args, va.results)
747 }
748
749
750
751 func devirtLESym(v *Value, aux interface{}, sym Sym, offset int64) *obj.LSym {
752 n, ok := sym.(*obj.LSym)
753 if !ok {
754 return nil
755 }
756
757 f := v.Block.Func
758 lsym := f.fe.DerefItab(n, offset)
759 if f.pass.debug > 0 {
760 if lsym != nil {
761 f.Warnl(v.Pos, "de-virtualizing call")
762 } else {
763 f.Warnl(v.Pos, "couldn't de-virtualize call")
764 }
765 }
766 if lsym == nil {
767 return nil
768 }
769 return lsym
770 }
771
772 func devirtLECall(v *Value, sym *obj.LSym) *Value {
773 v.Op = OpStaticLECall
774 v.Aux.(*AuxCall).Fn = sym
775 v.RemoveArg(0)
776 return v
777 }
778
779
780 func isSamePtr(p1, p2 *Value) bool {
781 if p1 == p2 {
782 return true
783 }
784 if p1.Op != p2.Op {
785 return false
786 }
787 switch p1.Op {
788 case OpOffPtr:
789 return p1.AuxInt == p2.AuxInt && isSamePtr(p1.Args[0], p2.Args[0])
790 case OpAddr, OpLocalAddr:
791
792
793 return p1.Aux == p2.Aux && p1.Args[0].Op == p2.Args[0].Op
794 case OpAddPtr:
795 return p1.Args[1] == p2.Args[1] && isSamePtr(p1.Args[0], p2.Args[0])
796 }
797 return false
798 }
799
800 func isStackPtr(v *Value) bool {
801 for v.Op == OpOffPtr || v.Op == OpAddPtr {
802 v = v.Args[0]
803 }
804 return v.Op == OpSP || v.Op == OpLocalAddr
805 }
806
807
808
809
810 func disjoint(p1 *Value, n1 int64, p2 *Value, n2 int64) bool {
811 if n1 == 0 || n2 == 0 {
812 return true
813 }
814 if p1 == p2 {
815 return false
816 }
817 baseAndOffset := func(ptr *Value) (base *Value, offset int64) {
818 base, offset = ptr, 0
819 for base.Op == OpOffPtr {
820 offset += base.AuxInt
821 base = base.Args[0]
822 }
823 return base, offset
824 }
825 p1, off1 := baseAndOffset(p1)
826 p2, off2 := baseAndOffset(p2)
827 if isSamePtr(p1, p2) {
828 return !overlap(off1, n1, off2, n2)
829 }
830
831
832
833
834 switch p1.Op {
835 case OpAddr, OpLocalAddr:
836 if p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpSP {
837 return true
838 }
839 return p2.Op == OpArg && p1.Args[0].Op == OpSP
840 case OpArg:
841 if p2.Op == OpSP || p2.Op == OpLocalAddr {
842 return true
843 }
844 case OpSP:
845 return p2.Op == OpAddr || p2.Op == OpLocalAddr || p2.Op == OpArg || p2.Op == OpSP
846 }
847 return false
848 }
849
850
851 func moveSize(align int64, c *Config) int64 {
852 switch {
853 case align%8 == 0 && c.PtrSize == 8:
854 return 8
855 case align%4 == 0:
856 return 4
857 case align%2 == 0:
858 return 2
859 }
860 return 1
861 }
862
863
864
865
866 func mergePoint(b *Block, a ...*Value) *Block {
867
868
869
870 d := 100
871
872 for d > 0 {
873 for _, x := range a {
874 if b == x.Block {
875 goto found
876 }
877 }
878 if len(b.Preds) > 1 {
879
880 return nil
881 }
882 b = b.Preds[0].b
883 d--
884 }
885 return nil
886 found:
887
888
889 r := b
890
891
892 na := 0
893 for d > 0 {
894 for _, x := range a {
895 if b == x.Block {
896 na++
897 }
898 }
899 if na == len(a) {
900
901 return r
902 }
903 if len(b.Preds) > 1 {
904 return nil
905 }
906 b = b.Preds[0].b
907 d--
908
909 }
910 return nil
911 }
912
913
914
915
916
917 func clobber(vv ...*Value) bool {
918 for _, v := range vv {
919 v.reset(OpInvalid)
920
921 }
922 return true
923 }
924
925
926
927
928 func clobberIfDead(v *Value) bool {
929 if v.Uses == 1 {
930 v.reset(OpInvalid)
931 }
932
933 return true
934 }
935
936
937
938
939
940 func noteRule(s string) bool {
941 fmt.Println(s)
942 return true
943 }
944
945
946
947
948
949
950 func countRule(v *Value, key string) bool {
951 f := v.Block.Func
952 if f.ruleMatches == nil {
953 f.ruleMatches = make(map[string]int)
954 }
955 f.ruleMatches[key]++
956 return true
957 }
958
959
960
961 func warnRule(cond bool, v *Value, s string) bool {
962 if pos := v.Pos; pos.Line() > 1 && cond {
963 v.Block.Func.Warnl(pos, s)
964 }
965 return true
966 }
967
968
969 func flagArg(v *Value) *Value {
970 if len(v.Args) != 1 || !v.Args[0].Type.IsFlags() {
971 return nil
972 }
973 return v.Args[0]
974 }
975
976
977
978
979
980
981 func arm64Negate(op Op) Op {
982 switch op {
983 case OpARM64LessThan:
984 return OpARM64GreaterEqual
985 case OpARM64LessThanU:
986 return OpARM64GreaterEqualU
987 case OpARM64GreaterThan:
988 return OpARM64LessEqual
989 case OpARM64GreaterThanU:
990 return OpARM64LessEqualU
991 case OpARM64LessEqual:
992 return OpARM64GreaterThan
993 case OpARM64LessEqualU:
994 return OpARM64GreaterThanU
995 case OpARM64GreaterEqual:
996 return OpARM64LessThan
997 case OpARM64GreaterEqualU:
998 return OpARM64LessThanU
999 case OpARM64Equal:
1000 return OpARM64NotEqual
1001 case OpARM64NotEqual:
1002 return OpARM64Equal
1003 case OpARM64LessThanF:
1004 return OpARM64NotLessThanF
1005 case OpARM64NotLessThanF:
1006 return OpARM64LessThanF
1007 case OpARM64LessEqualF:
1008 return OpARM64NotLessEqualF
1009 case OpARM64NotLessEqualF:
1010 return OpARM64LessEqualF
1011 case OpARM64GreaterThanF:
1012 return OpARM64NotGreaterThanF
1013 case OpARM64NotGreaterThanF:
1014 return OpARM64GreaterThanF
1015 case OpARM64GreaterEqualF:
1016 return OpARM64NotGreaterEqualF
1017 case OpARM64NotGreaterEqualF:
1018 return OpARM64GreaterEqualF
1019 default:
1020 panic("unreachable")
1021 }
1022 }
1023
1024
1025
1026
1027
1028
1029 func arm64Invert(op Op) Op {
1030 switch op {
1031 case OpARM64LessThan:
1032 return OpARM64GreaterThan
1033 case OpARM64LessThanU:
1034 return OpARM64GreaterThanU
1035 case OpARM64GreaterThan:
1036 return OpARM64LessThan
1037 case OpARM64GreaterThanU:
1038 return OpARM64LessThanU
1039 case OpARM64LessEqual:
1040 return OpARM64GreaterEqual
1041 case OpARM64LessEqualU:
1042 return OpARM64GreaterEqualU
1043 case OpARM64GreaterEqual:
1044 return OpARM64LessEqual
1045 case OpARM64GreaterEqualU:
1046 return OpARM64LessEqualU
1047 case OpARM64Equal, OpARM64NotEqual:
1048 return op
1049 case OpARM64LessThanF:
1050 return OpARM64GreaterThanF
1051 case OpARM64GreaterThanF:
1052 return OpARM64LessThanF
1053 case OpARM64LessEqualF:
1054 return OpARM64GreaterEqualF
1055 case OpARM64GreaterEqualF:
1056 return OpARM64LessEqualF
1057 case OpARM64NotLessThanF:
1058 return OpARM64NotGreaterThanF
1059 case OpARM64NotGreaterThanF:
1060 return OpARM64NotLessThanF
1061 case OpARM64NotLessEqualF:
1062 return OpARM64NotGreaterEqualF
1063 case OpARM64NotGreaterEqualF:
1064 return OpARM64NotLessEqualF
1065 default:
1066 panic("unreachable")
1067 }
1068 }
1069
1070
1071
1072
1073 func ccARM64Eval(op Op, flags *Value) int {
1074 fop := flags.Op
1075 if fop == OpARM64InvertFlags {
1076 return -ccARM64Eval(op, flags.Args[0])
1077 }
1078 if fop != OpARM64FlagConstant {
1079 return 0
1080 }
1081 fc := flagConstant(flags.AuxInt)
1082 b2i := func(b bool) int {
1083 if b {
1084 return 1
1085 }
1086 return -1
1087 }
1088 switch op {
1089 case OpARM64Equal:
1090 return b2i(fc.eq())
1091 case OpARM64NotEqual:
1092 return b2i(fc.ne())
1093 case OpARM64LessThan:
1094 return b2i(fc.lt())
1095 case OpARM64LessThanU:
1096 return b2i(fc.ult())
1097 case OpARM64GreaterThan:
1098 return b2i(fc.gt())
1099 case OpARM64GreaterThanU:
1100 return b2i(fc.ugt())
1101 case OpARM64LessEqual:
1102 return b2i(fc.le())
1103 case OpARM64LessEqualU:
1104 return b2i(fc.ule())
1105 case OpARM64GreaterEqual:
1106 return b2i(fc.ge())
1107 case OpARM64GreaterEqualU:
1108 return b2i(fc.uge())
1109 }
1110 return 0
1111 }
1112
1113
1114
1115 func logRule(s string) {
1116 if ruleFile == nil {
1117
1118
1119
1120
1121
1122
1123 w, err := os.OpenFile(filepath.Join(os.Getenv("GOROOT"), "src", "rulelog"),
1124 os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
1125 if err != nil {
1126 panic(err)
1127 }
1128 ruleFile = w
1129 }
1130 _, err := fmt.Fprintln(ruleFile, s)
1131 if err != nil {
1132 panic(err)
1133 }
1134 }
1135
1136 var ruleFile io.Writer
1137
1138 func min(x, y int64) int64 {
1139 if x < y {
1140 return x
1141 }
1142 return y
1143 }
1144
1145 func isConstZero(v *Value) bool {
1146 switch v.Op {
1147 case OpConstNil:
1148 return true
1149 case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConst32F, OpConst64F:
1150 return v.AuxInt == 0
1151 }
1152 return false
1153 }
1154
1155
1156 func reciprocalExact64(c float64) bool {
1157 b := math.Float64bits(c)
1158 man := b & (1<<52 - 1)
1159 if man != 0 {
1160 return false
1161 }
1162 exp := b >> 52 & (1<<11 - 1)
1163
1164
1165 switch exp {
1166 case 0:
1167 return false
1168 case 0x7ff:
1169 return false
1170 case 0x7fe:
1171 return false
1172 default:
1173 return true
1174 }
1175 }
1176
1177
1178 func reciprocalExact32(c float32) bool {
1179 b := math.Float32bits(c)
1180 man := b & (1<<23 - 1)
1181 if man != 0 {
1182 return false
1183 }
1184 exp := b >> 23 & (1<<8 - 1)
1185
1186
1187 switch exp {
1188 case 0:
1189 return false
1190 case 0xff:
1191 return false
1192 case 0xfe:
1193 return false
1194 default:
1195 return true
1196 }
1197 }
1198
1199
1200 func isARMImmRot(v uint32) bool {
1201 for i := 0; i < 16; i++ {
1202 if v&^0xff == 0 {
1203 return true
1204 }
1205 v = v<<2 | v>>30
1206 }
1207
1208 return false
1209 }
1210
1211
1212
1213 func overlap(offset1, size1, offset2, size2 int64) bool {
1214 if offset1 >= offset2 && offset2+size2 > offset1 {
1215 return true
1216 }
1217 if offset2 >= offset1 && offset1+size1 > offset2 {
1218 return true
1219 }
1220 return false
1221 }
1222
1223 func areAdjacentOffsets(off1, off2, size int64) bool {
1224 return off1+size == off2 || off1 == off2+size
1225 }
1226
1227
1228
1229
1230 func zeroUpper32Bits(x *Value, depth int) bool {
1231 switch x.Op {
1232 case OpAMD64MOVLconst, OpAMD64MOVLload, OpAMD64MOVLQZX, OpAMD64MOVLloadidx1,
1233 OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVBload, OpAMD64MOVBloadidx1,
1234 OpAMD64MOVLloadidx4, OpAMD64ADDLload, OpAMD64SUBLload, OpAMD64ANDLload,
1235 OpAMD64ORLload, OpAMD64XORLload, OpAMD64CVTTSD2SL,
1236 OpAMD64ADDL, OpAMD64ADDLconst, OpAMD64SUBL, OpAMD64SUBLconst,
1237 OpAMD64ANDL, OpAMD64ANDLconst, OpAMD64ORL, OpAMD64ORLconst,
1238 OpAMD64XORL, OpAMD64XORLconst, OpAMD64NEGL, OpAMD64NOTL,
1239 OpAMD64SHRL, OpAMD64SHRLconst, OpAMD64SARL, OpAMD64SARLconst,
1240 OpAMD64SHLL, OpAMD64SHLLconst:
1241 return true
1242 case OpArg:
1243 return x.Type.Width == 4
1244 case OpPhi, OpSelect0, OpSelect1:
1245
1246
1247 if depth <= 0 {
1248 return false
1249 }
1250 for i := range x.Args {
1251 if !zeroUpper32Bits(x.Args[i], depth-1) {
1252 return false
1253 }
1254 }
1255 return true
1256
1257 }
1258 return false
1259 }
1260
1261
1262 func zeroUpper48Bits(x *Value, depth int) bool {
1263 switch x.Op {
1264 case OpAMD64MOVWQZX, OpAMD64MOVWload, OpAMD64MOVWloadidx1, OpAMD64MOVWloadidx2:
1265 return true
1266 case OpArg:
1267 return x.Type.Width == 2
1268 case OpPhi, OpSelect0, OpSelect1:
1269
1270
1271 if depth <= 0 {
1272 return false
1273 }
1274 for i := range x.Args {
1275 if !zeroUpper48Bits(x.Args[i], depth-1) {
1276 return false
1277 }
1278 }
1279 return true
1280
1281 }
1282 return false
1283 }
1284
1285
1286 func zeroUpper56Bits(x *Value, depth int) bool {
1287 switch x.Op {
1288 case OpAMD64MOVBQZX, OpAMD64MOVBload, OpAMD64MOVBloadidx1:
1289 return true
1290 case OpArg:
1291 return x.Type.Width == 1
1292 case OpPhi, OpSelect0, OpSelect1:
1293
1294
1295 if depth <= 0 {
1296 return false
1297 }
1298 for i := range x.Args {
1299 if !zeroUpper56Bits(x.Args[i], depth-1) {
1300 return false
1301 }
1302 }
1303 return true
1304
1305 }
1306 return false
1307 }
1308
1309
1310
1311
1312
1313 func isInlinableMemmove(dst, src *Value, sz int64, c *Config) bool {
1314
1315
1316
1317
1318 switch c.arch {
1319 case "amd64":
1320 return sz <= 16 || (sz < 1024 && disjoint(dst, sz, src, sz))
1321 case "386", "arm64":
1322 return sz <= 8
1323 case "s390x", "ppc64", "ppc64le":
1324 return sz <= 8 || disjoint(dst, sz, src, sz)
1325 case "arm", "mips", "mips64", "mipsle", "mips64le":
1326 return sz <= 4
1327 }
1328 return false
1329 }
1330
1331
1332
1333
1334 func logLargeCopy(v *Value, s int64) bool {
1335 if s < 128 {
1336 return true
1337 }
1338 if logopt.Enabled() {
1339 logopt.LogOpt(v.Pos, "copy", "lower", v.Block.Func.Name, fmt.Sprintf("%d bytes", s))
1340 }
1341 return true
1342 }
1343
1344
1345
1346 func hasSmallRotate(c *Config) bool {
1347 switch c.arch {
1348 case "amd64", "386":
1349 return true
1350 default:
1351 return false
1352 }
1353 }
1354
1355 func newPPC64ShiftAuxInt(sh, mb, me, sz int64) int32 {
1356 if sh < 0 || sh >= sz {
1357 panic("PPC64 shift arg sh out of range")
1358 }
1359 if mb < 0 || mb >= sz {
1360 panic("PPC64 shift arg mb out of range")
1361 }
1362 if me < 0 || me >= sz {
1363 panic("PPC64 shift arg me out of range")
1364 }
1365 return int32(sh<<16 | mb<<8 | me)
1366 }
1367
1368 func GetPPC64Shiftsh(auxint int64) int64 {
1369 return int64(int8(auxint >> 16))
1370 }
1371
1372 func GetPPC64Shiftmb(auxint int64) int64 {
1373 return int64(int8(auxint >> 8))
1374 }
1375
1376 func GetPPC64Shiftme(auxint int64) int64 {
1377 return int64(int8(auxint))
1378 }
1379
1380
1381
1382
1383
1384 func isPPC64WordRotateMask(v64 int64) bool {
1385
1386 v := uint32(v64)
1387 vp := (v & -v) + v
1388
1389 vn := ^v
1390 vpn := (vn & -vn) + vn
1391 return (v&vp == 0 || vn&vpn == 0) && v != 0
1392 }
1393
1394
1395
1396
1397 func encodePPC64RotateMask(rotate, mask, nbits int64) int64 {
1398 var mb, me, mbn, men int
1399
1400
1401 if mask == 0 || ^mask == 0 || rotate >= nbits {
1402 panic("Invalid PPC64 rotate mask")
1403 } else if nbits == 32 {
1404 mb = bits.LeadingZeros32(uint32(mask))
1405 me = 32 - bits.TrailingZeros32(uint32(mask))
1406 mbn = bits.LeadingZeros32(^uint32(mask))
1407 men = 32 - bits.TrailingZeros32(^uint32(mask))
1408 } else {
1409 mb = bits.LeadingZeros64(uint64(mask))
1410 me = 64 - bits.TrailingZeros64(uint64(mask))
1411 mbn = bits.LeadingZeros64(^uint64(mask))
1412 men = 64 - bits.TrailingZeros64(^uint64(mask))
1413 }
1414
1415 if mb == 0 && me == int(nbits) {
1416
1417 mb, me = men, mbn
1418 }
1419
1420 return int64(me) | int64(mb<<8) | int64(rotate<<16) | int64(nbits<<24)
1421 }
1422
1423
1424
1425 func DecodePPC64RotateMask(sauxint int64) (rotate, mb, me int64, mask uint64) {
1426 auxint := uint64(sauxint)
1427 rotate = int64((auxint >> 16) & 0xFF)
1428 mb = int64((auxint >> 8) & 0xFF)
1429 me = int64((auxint >> 0) & 0xFF)
1430 nbits := int64((auxint >> 24) & 0xFF)
1431 mask = ((1 << uint(nbits-mb)) - 1) ^ ((1 << uint(nbits-me)) - 1)
1432 if mb > me {
1433 mask = ^mask
1434 }
1435 if nbits == 32 {
1436 mask = uint64(uint32(mask))
1437 }
1438
1439
1440
1441 me = (me - 1) & (nbits - 1)
1442 return
1443 }
1444
1445
1446
1447
1448 func isPPC64ValidShiftMask(v int64) bool {
1449 if (v != 0) && ((v+1)&v) == 0 {
1450 return true
1451 }
1452 return false
1453 }
1454
1455 func getPPC64ShiftMaskLength(v int64) int64 {
1456 return int64(bits.Len64(uint64(v)))
1457 }
1458
1459
1460
1461 func mergePPC64RShiftMask(m, s, nbits int64) int64 {
1462 smask := uint64((1<<uint(nbits))-1) >> uint(s)
1463 return m & int64(smask)
1464 }
1465
1466
1467 func mergePPC64AndSrwi(m, s int64) int64 {
1468 mask := mergePPC64RShiftMask(m, s, 32)
1469 if !isPPC64WordRotateMask(mask) {
1470 return 0
1471 }
1472 return encodePPC64RotateMask(32-s, mask, 32)
1473 }
1474
1475
1476
1477 func mergePPC64ClrlsldiSrw(sld, srw int64) int64 {
1478 mask_1 := uint64(0xFFFFFFFF >> uint(srw))
1479
1480 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1481
1482
1483 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(sld))
1484
1485 r_1 := 32 - srw
1486 r_2 := GetPPC64Shiftsh(sld)
1487 r_3 := (r_1 + r_2) & 31
1488
1489 if uint64(uint32(mask_3)) != mask_3 || mask_3 == 0 {
1490 return 0
1491 }
1492 return encodePPC64RotateMask(int64(r_3), int64(mask_3), 32)
1493 }
1494
1495
1496
1497 func mergePPC64ClrlsldiRlwinm(sld int32, rlw int64) int64 {
1498 r_1, _, _, mask_1 := DecodePPC64RotateMask(rlw)
1499
1500 mask_2 := uint64(0xFFFFFFFFFFFFFFFF) >> uint(GetPPC64Shiftmb(int64(sld)))
1501
1502
1503 mask_3 := (mask_1 & mask_2) << uint(GetPPC64Shiftsh(int64(sld)))
1504 r_2 := GetPPC64Shiftsh(int64(sld))
1505 r_3 := (r_1 + r_2) & 31
1506
1507
1508 if !isPPC64WordRotateMask(int64(mask_3)) || uint64(uint32(mask_3)) != mask_3 {
1509 return 0
1510 }
1511 return encodePPC64RotateMask(r_3, int64(mask_3), 32)
1512 }
1513
1514
1515
1516 func mergePPC64SldiSrw(sld, srw int64) int64 {
1517 if sld > srw || srw >= 32 {
1518 return 0
1519 }
1520 mask_r := uint32(0xFFFFFFFF) >> uint(srw)
1521 mask_l := uint32(0xFFFFFFFF) >> uint(sld)
1522 mask := (mask_r & mask_l) << uint(sld)
1523 return encodePPC64RotateMask((32-srw+sld)&31, int64(mask), 32)
1524 }
1525
1526
1527 func rotateLeft32(v, rotate int64) int64 {
1528 return int64(bits.RotateLeft32(uint32(v), int(rotate)))
1529 }
1530
1531
1532 func armBFAuxInt(lsb, width int64) arm64BitField {
1533 if lsb < 0 || lsb > 63 {
1534 panic("ARM(64) bit field lsb constant out of range")
1535 }
1536 if width < 1 || width > 64 {
1537 panic("ARM(64) bit field width constant out of range")
1538 }
1539 return arm64BitField(width | lsb<<8)
1540 }
1541
1542
1543 func (bfc arm64BitField) getARM64BFlsb() int64 {
1544 return int64(uint64(bfc) >> 8)
1545 }
1546
1547
1548 func (bfc arm64BitField) getARM64BFwidth() int64 {
1549 return int64(bfc) & 0xff
1550 }
1551
1552
1553 func isARM64BFMask(lsb, mask, rshift int64) bool {
1554 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1555 return shiftedMask != 0 && isPowerOfTwo64(shiftedMask+1) && nto(shiftedMask)+lsb < 64
1556 }
1557
1558
1559 func arm64BFWidth(mask, rshift int64) int64 {
1560 shiftedMask := int64(uint64(mask) >> uint64(rshift))
1561 if shiftedMask == 0 {
1562 panic("ARM64 BF mask is zero")
1563 }
1564 return nto(shiftedMask)
1565 }
1566
1567
1568
1569 func sizeof(t interface{}) int64 {
1570 return t.(*types.Type).Size()
1571 }
1572
1573
1574
1575
1576 func registerizable(b *Block, typ *types.Type) bool {
1577 if typ.IsPtrShaped() || typ.IsFloat() {
1578 return true
1579 }
1580 if typ.IsInteger() {
1581 return typ.Size() <= b.Func.Config.RegSize
1582 }
1583 return false
1584 }
1585
1586
1587 func needRaceCleanup(sym *AuxCall, v *Value) bool {
1588 f := v.Block.Func
1589 if !f.Config.Race {
1590 return false
1591 }
1592 if !isSameCall(sym, "runtime.racefuncenter") && !isSameCall(sym, "runtime.racefuncenterfp") && !isSameCall(sym, "runtime.racefuncexit") {
1593 return false
1594 }
1595 for _, b := range f.Blocks {
1596 for _, v := range b.Values {
1597 switch v.Op {
1598 case OpStaticCall:
1599
1600
1601 s := v.Aux.(*AuxCall).Fn.String()
1602 switch s {
1603 case "runtime.racefuncenter", "runtime.racefuncenterfp", "runtime.racefuncexit",
1604 "runtime.panicdivide", "runtime.panicwrap",
1605 "runtime.panicshift":
1606 continue
1607 }
1608
1609
1610 return false
1611 case OpPanicBounds, OpPanicExtend:
1612
1613 case OpClosureCall, OpInterCall:
1614
1615 return false
1616 }
1617 }
1618 }
1619 if isSameCall(sym, "runtime.racefuncenter") {
1620
1621 if v.Args[0].Op != OpStore {
1622 return false
1623 }
1624 mem := v.Args[0].Args[2]
1625 v.Args[0].reset(OpCopy)
1626 v.Args[0].AddArg(mem)
1627 }
1628 return true
1629 }
1630
1631
1632 func symIsRO(sym interface{}) bool {
1633 lsym := sym.(*obj.LSym)
1634 return lsym.Type == objabi.SRODATA && len(lsym.R) == 0
1635 }
1636
1637
1638 func symIsROZero(sym Sym) bool {
1639 lsym := sym.(*obj.LSym)
1640 if lsym.Type != objabi.SRODATA || len(lsym.R) != 0 {
1641 return false
1642 }
1643 for _, b := range lsym.P {
1644 if b != 0 {
1645 return false
1646 }
1647 }
1648 return true
1649 }
1650
1651
1652 func read8(sym interface{}, off int64) uint8 {
1653 lsym := sym.(*obj.LSym)
1654 if off >= int64(len(lsym.P)) || off < 0 {
1655
1656
1657
1658
1659 return 0
1660 }
1661 return lsym.P[off]
1662 }
1663
1664
1665 func read16(sym interface{}, off int64, byteorder binary.ByteOrder) uint16 {
1666 lsym := sym.(*obj.LSym)
1667
1668
1669 var src []byte
1670 if 0 <= off && off < int64(len(lsym.P)) {
1671 src = lsym.P[off:]
1672 }
1673 buf := make([]byte, 2)
1674 copy(buf, src)
1675 return byteorder.Uint16(buf)
1676 }
1677
1678
1679 func read32(sym interface{}, off int64, byteorder binary.ByteOrder) uint32 {
1680 lsym := sym.(*obj.LSym)
1681 var src []byte
1682 if 0 <= off && off < int64(len(lsym.P)) {
1683 src = lsym.P[off:]
1684 }
1685 buf := make([]byte, 4)
1686 copy(buf, src)
1687 return byteorder.Uint32(buf)
1688 }
1689
1690
1691 func read64(sym interface{}, off int64, byteorder binary.ByteOrder) uint64 {
1692 lsym := sym.(*obj.LSym)
1693 var src []byte
1694 if 0 <= off && off < int64(len(lsym.P)) {
1695 src = lsym.P[off:]
1696 }
1697 buf := make([]byte, 8)
1698 copy(buf, src)
1699 return byteorder.Uint64(buf)
1700 }
1701
1702
1703 func sequentialAddresses(x, y *Value, n int64) bool {
1704 if x.Op == Op386ADDL && y.Op == Op386LEAL1 && y.AuxInt == n && y.Aux == nil &&
1705 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1706 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1707 return true
1708 }
1709 if x.Op == Op386LEAL1 && y.Op == Op386LEAL1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
1710 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1711 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1712 return true
1713 }
1714 if x.Op == OpAMD64ADDQ && y.Op == OpAMD64LEAQ1 && y.AuxInt == n && y.Aux == nil &&
1715 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1716 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1717 return true
1718 }
1719 if x.Op == OpAMD64LEAQ1 && y.Op == OpAMD64LEAQ1 && y.AuxInt == x.AuxInt+n && x.Aux == y.Aux &&
1720 (x.Args[0] == y.Args[0] && x.Args[1] == y.Args[1] ||
1721 x.Args[0] == y.Args[1] && x.Args[1] == y.Args[0]) {
1722 return true
1723 }
1724 return false
1725 }
1726
1727
1728
1729
1730
1731
1732
1733
1734
1735
1736
1737 type flagConstant uint8
1738
1739
1740 func (fc flagConstant) N() bool {
1741 return fc&1 != 0
1742 }
1743
1744
1745 func (fc flagConstant) Z() bool {
1746 return fc&2 != 0
1747 }
1748
1749
1750
1751 func (fc flagConstant) C() bool {
1752 return fc&4 != 0
1753 }
1754
1755
1756 func (fc flagConstant) V() bool {
1757 return fc&8 != 0
1758 }
1759
1760 func (fc flagConstant) eq() bool {
1761 return fc.Z()
1762 }
1763 func (fc flagConstant) ne() bool {
1764 return !fc.Z()
1765 }
1766 func (fc flagConstant) lt() bool {
1767 return fc.N() != fc.V()
1768 }
1769 func (fc flagConstant) le() bool {
1770 return fc.Z() || fc.lt()
1771 }
1772 func (fc flagConstant) gt() bool {
1773 return !fc.Z() && fc.ge()
1774 }
1775 func (fc flagConstant) ge() bool {
1776 return fc.N() == fc.V()
1777 }
1778 func (fc flagConstant) ult() bool {
1779 return !fc.C()
1780 }
1781 func (fc flagConstant) ule() bool {
1782 return fc.Z() || fc.ult()
1783 }
1784 func (fc flagConstant) ugt() bool {
1785 return !fc.Z() && fc.uge()
1786 }
1787 func (fc flagConstant) uge() bool {
1788 return fc.C()
1789 }
1790
1791 func (fc flagConstant) ltNoov() bool {
1792 return fc.lt() && !fc.V()
1793 }
1794 func (fc flagConstant) leNoov() bool {
1795 return fc.le() && !fc.V()
1796 }
1797 func (fc flagConstant) gtNoov() bool {
1798 return fc.gt() && !fc.V()
1799 }
1800 func (fc flagConstant) geNoov() bool {
1801 return fc.ge() && !fc.V()
1802 }
1803
1804 func (fc flagConstant) String() string {
1805 return fmt.Sprintf("N=%v,Z=%v,C=%v,V=%v", fc.N(), fc.Z(), fc.C(), fc.V())
1806 }
1807
1808 type flagConstantBuilder struct {
1809 N bool
1810 Z bool
1811 C bool
1812 V bool
1813 }
1814
1815 func (fcs flagConstantBuilder) encode() flagConstant {
1816 var fc flagConstant
1817 if fcs.N {
1818 fc |= 1
1819 }
1820 if fcs.Z {
1821 fc |= 2
1822 }
1823 if fcs.C {
1824 fc |= 4
1825 }
1826 if fcs.V {
1827 fc |= 8
1828 }
1829 return fc
1830 }
1831
1832
1833
1834
1835
1836
1837 func addFlags64(x, y int64) flagConstant {
1838 var fcb flagConstantBuilder
1839 fcb.Z = x+y == 0
1840 fcb.N = x+y < 0
1841 fcb.C = uint64(x+y) < uint64(x)
1842 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
1843 return fcb.encode()
1844 }
1845
1846
1847 func subFlags64(x, y int64) flagConstant {
1848 var fcb flagConstantBuilder
1849 fcb.Z = x-y == 0
1850 fcb.N = x-y < 0
1851 fcb.C = uint64(y) <= uint64(x)
1852 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
1853 return fcb.encode()
1854 }
1855
1856
1857 func addFlags32(x, y int32) flagConstant {
1858 var fcb flagConstantBuilder
1859 fcb.Z = x+y == 0
1860 fcb.N = x+y < 0
1861 fcb.C = uint32(x+y) < uint32(x)
1862 fcb.V = x >= 0 && y >= 0 && x+y < 0 || x < 0 && y < 0 && x+y >= 0
1863 return fcb.encode()
1864 }
1865
1866
1867 func subFlags32(x, y int32) flagConstant {
1868 var fcb flagConstantBuilder
1869 fcb.Z = x-y == 0
1870 fcb.N = x-y < 0
1871 fcb.C = uint32(y) <= uint32(x)
1872 fcb.V = x >= 0 && y < 0 && x-y < 0 || x < 0 && y >= 0 && x-y >= 0
1873 return fcb.encode()
1874 }
1875
1876
1877
1878 func logicFlags64(x int64) flagConstant {
1879 var fcb flagConstantBuilder
1880 fcb.Z = x == 0
1881 fcb.N = x < 0
1882 return fcb.encode()
1883 }
1884
1885
1886
1887 func logicFlags32(x int32) flagConstant {
1888 var fcb flagConstantBuilder
1889 fcb.Z = x == 0
1890 fcb.N = x < 0
1891 return fcb.encode()
1892 }
1893
View as plain text