1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/reflectdata"
9 "cmd/compile/internal/types"
10 "cmd/internal/obj"
11 "cmd/internal/objabi"
12 "cmd/internal/src"
13 "fmt"
14 "internal/buildcfg"
15 )
16
17
18
19
20
21
22
23 type ZeroRegion struct {
24 base *Value
25 mask uint64
26 }
27
28
29
30 func mightBeHeapPointer(v *Value) bool {
31 if IsGlobalAddr(v) {
32 return false
33 }
34 return true
35 }
36
37
38
39
40 func mightContainHeapPointer(ptr *Value, size int64, mem *Value, zeroes map[ID]ZeroRegion) bool {
41 if IsReadOnlyGlobalAddr(ptr) {
42
43 return false
44 }
45
46
47
48
49 var off int64
50 for ptr.Op == OpOffPtr {
51 off += ptr.AuxInt
52 ptr = ptr.Args[0]
53 }
54
55 ptrSize := ptr.Block.Func.Config.PtrSize
56 if off%ptrSize != 0 {
57 return true
58 }
59 if size%ptrSize != 0 {
60 ptr.Fatalf("unaligned pointer write")
61 }
62 if off < 0 || off+size > 64*ptrSize {
63
64 return true
65 }
66 z := zeroes[mem.ID]
67 if ptr != z.base {
68
69 return true
70 }
71
72 m := (uint64(1)<<(size/ptrSize) - 1) << (off / ptrSize)
73
74 if z.mask&m == m {
75
76 return false
77 }
78 return true
79 }
80
81
82
83
84 func needwb(v *Value, zeroes map[ID]ZeroRegion) bool {
85 t, ok := v.Aux.(*types.Type)
86 if !ok {
87 v.Fatalf("store aux is not a type: %s", v.LongString())
88 }
89 if !t.HasPointers() {
90 return false
91 }
92 dst := v.Args[0]
93 if IsStackAddr(dst) {
94 return false
95 }
96
97
98 if mightContainHeapPointer(dst, t.Size(), v.MemoryArg(), zeroes) {
99 return true
100 }
101
102
103 switch v.Op {
104 case OpStore:
105 if !mightBeHeapPointer(v.Args[1]) {
106 return false
107 }
108 case OpZero:
109 return false
110 case OpMove:
111 if !mightContainHeapPointer(v.Args[1], t.Size(), v.Args[2], zeroes) {
112 return false
113 }
114 default:
115 v.Fatalf("store op unknown: %s", v.LongString())
116 }
117 return true
118 }
119
120
121 func needWBsrc(v *Value) bool {
122 return !IsGlobalAddr(v)
123 }
124
125
126
127 func needWBdst(ptr, mem *Value, zeroes map[ID]ZeroRegion) bool {
128
129 var off int64
130 for ptr.Op == OpOffPtr {
131 off += ptr.AuxInt
132 ptr = ptr.Args[0]
133 }
134 ptrSize := ptr.Block.Func.Config.PtrSize
135 if off%ptrSize != 0 {
136 return true
137 }
138 if off < 0 || off >= 64*ptrSize {
139
140 return true
141 }
142 z := zeroes[mem.ID]
143 if ptr != z.base {
144 return true
145 }
146
147
148 return z.mask>>uint(off/ptrSize)&1 == 0
149 }
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164 func writebarrier(f *Func) {
165 if !f.fe.UseWriteBarrier() {
166 return
167 }
168
169
170
171
172 const maxEntries = 8
173
174 var sb, sp, wbaddr, const0 *Value
175 var cgoCheckPtrWrite, cgoCheckMemmove *obj.LSym
176 var wbZero, wbMove *obj.LSym
177 var stores, after []*Value
178 var sset, sset2 *sparseSet
179 var storeNumber []int32
180
181
182 select1 := f.Cache.allocValueSlice(f.NumValues())
183 defer func() { f.Cache.freeValueSlice(select1) }()
184 for _, b := range f.Blocks {
185 for _, v := range b.Values {
186 if v.Op != OpSelectN {
187 continue
188 }
189 if v.AuxInt != 1 {
190 continue
191 }
192 select1[v.Args[0].ID] = v
193 }
194 }
195
196 zeroes := f.computeZeroMap(select1)
197 for _, b := range f.Blocks {
198
199
200 nWBops := 0
201 for _, v := range b.Values {
202 switch v.Op {
203 case OpStore, OpMove, OpZero:
204 if needwb(v, zeroes) {
205 switch v.Op {
206 case OpStore:
207 v.Op = OpStoreWB
208 case OpMove:
209 v.Op = OpMoveWB
210 case OpZero:
211 v.Op = OpZeroWB
212 }
213 nWBops++
214 }
215 }
216 }
217 if nWBops == 0 {
218 continue
219 }
220
221 if wbaddr == nil {
222
223
224 initpos := f.Entry.Pos
225 sp, sb = f.spSb()
226 wbsym := f.fe.Syslook("writeBarrier")
227 wbaddr = f.Entry.NewValue1A(initpos, OpAddr, f.Config.Types.UInt32Ptr, wbsym, sb)
228 wbZero = f.fe.Syslook("wbZero")
229 wbMove = f.fe.Syslook("wbMove")
230 if buildcfg.Experiment.CgoCheck2 {
231 cgoCheckPtrWrite = f.fe.Syslook("cgoCheckPtrWrite")
232 cgoCheckMemmove = f.fe.Syslook("cgoCheckMemmove")
233 }
234 const0 = f.ConstInt32(f.Config.Types.UInt32, 0)
235
236
237 sset = f.newSparseSet(f.NumValues())
238 defer f.retSparseSet(sset)
239 sset2 = f.newSparseSet(f.NumValues())
240 defer f.retSparseSet(sset2)
241 storeNumber = f.Cache.allocInt32Slice(f.NumValues())
242 defer f.Cache.freeInt32Slice(storeNumber)
243 }
244
245
246 b.Values = storeOrder(b.Values, sset, storeNumber)
247 again:
248
249
250
251 var last *Value
252 var start, end int
253 var nonPtrStores int
254 values := b.Values
255 FindSeq:
256 for i := len(values) - 1; i >= 0; i-- {
257 w := values[i]
258 switch w.Op {
259 case OpStoreWB, OpMoveWB, OpZeroWB:
260 start = i
261 if last == nil {
262 last = w
263 end = i + 1
264 }
265 nonPtrStores = 0
266 case OpVarDef, OpVarLive:
267 continue
268 case OpStore:
269 if last == nil {
270 continue
271 }
272 nonPtrStores++
273 if nonPtrStores > 2 {
274 break FindSeq
275 }
276 default:
277 if last == nil {
278 continue
279 }
280 break FindSeq
281 }
282 }
283 stores = append(stores[:0], b.Values[start:end]...)
284 after = append(after[:0], b.Values[end:]...)
285 b.Values = b.Values[:start]
286
287
288 mem := stores[0].MemoryArg()
289 pos := stores[0].Pos
290
291
292
293
294
295
296
297
298
299
300 type volatileCopy struct {
301 src *Value
302 tmp *Value
303 }
304 var volatiles []volatileCopy
305
306 if !(f.ABIDefault == f.ABI1 && len(f.Config.intParamRegs) >= 3) {
307
308
309
310 copyLoop:
311 for _, w := range stores {
312 if w.Op == OpMoveWB {
313 val := w.Args[1]
314 if isVolatile(val) {
315 for _, c := range volatiles {
316 if val == c.src {
317 continue copyLoop
318 }
319 }
320
321 t := val.Type.Elem()
322 tmp := f.NewLocal(w.Pos, t)
323 mem = b.NewValue1A(w.Pos, OpVarDef, types.TypeMem, tmp, mem)
324 tmpaddr := b.NewValue2A(w.Pos, OpLocalAddr, t.PtrTo(), tmp, sp, mem)
325 siz := t.Size()
326 mem = b.NewValue3I(w.Pos, OpMove, types.TypeMem, siz, tmpaddr, val, mem)
327 mem.Aux = t
328 volatiles = append(volatiles, volatileCopy{val, tmpaddr})
329 }
330 }
331 }
332 }
333
334
335 bThen := f.NewBlock(BlockPlain)
336 bEnd := f.NewBlock(b.Kind)
337 bThen.Pos = pos
338 bEnd.Pos = b.Pos
339 b.Pos = pos
340
341
342 bEnd.CopyControls(b)
343 bEnd.Likely = b.Likely
344 for _, e := range b.Succs {
345 bEnd.Succs = append(bEnd.Succs, e)
346 e.b.Preds[e.i].b = bEnd
347 }
348
349
350
351 cfgtypes := &f.Config.Types
352 flag := b.NewValue2(pos, OpLoad, cfgtypes.UInt32, wbaddr, mem)
353 flag = b.NewValue2(pos, OpNeq32, cfgtypes.Bool, flag, const0)
354 b.Kind = BlockIf
355 b.SetControl(flag)
356 b.Likely = BranchUnlikely
357 b.Succs = b.Succs[:0]
358 b.AddEdgeTo(bThen)
359 b.AddEdgeTo(bEnd)
360 bThen.AddEdgeTo(bEnd)
361
362
363 memThen := mem
364 var curCall *Value
365 var curPtr *Value
366 addEntry := func(pos src.XPos, v *Value) {
367 if curCall == nil || curCall.AuxInt == maxEntries {
368 t := types.NewTuple(types.Types[types.TUINTPTR].PtrTo(), types.TypeMem)
369 curCall = bThen.NewValue1(pos, OpWB, t, memThen)
370 curPtr = bThen.NewValue1(pos, OpSelect0, types.Types[types.TUINTPTR].PtrTo(), curCall)
371 memThen = bThen.NewValue1(pos, OpSelect1, types.TypeMem, curCall)
372 }
373
374 num := curCall.AuxInt
375 curCall.AuxInt = num + 1
376 wbuf := bThen.NewValue1I(pos, OpOffPtr, types.Types[types.TUINTPTR].PtrTo(), num*f.Config.PtrSize, curPtr)
377 memThen = bThen.NewValue3A(pos, OpStore, types.TypeMem, types.Types[types.TUINTPTR], wbuf, v, memThen)
378 }
379
380
381
382
383
384
385
386
387
388
389
390
391 srcs := sset
392 srcs.clear()
393
394
395 dsts := sset2
396 dsts.clear()
397
398 for _, w := range stores {
399 if w.Op != OpStoreWB {
400 continue
401 }
402 pos := w.Pos
403 ptr := w.Args[0]
404 val := w.Args[1]
405 if !srcs.contains(val.ID) && needWBsrc(val) {
406 srcs.add(val.ID)
407 addEntry(pos, val)
408 }
409 if !dsts.contains(ptr.ID) && needWBdst(ptr, w.Args[2], zeroes) {
410 dsts.add(ptr.ID)
411
412
413
414
415
416
417
418 oldVal := bThen.NewValue2(pos, OpLoad, types.Types[types.TUINTPTR], ptr, memThen)
419
420 addEntry(pos, oldVal)
421 }
422 f.fe.Func().SetWBPos(pos)
423 nWBops--
424 }
425
426 for _, w := range stores {
427 pos := w.Pos
428 switch w.Op {
429 case OpZeroWB:
430 dst := w.Args[0]
431 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
432
433 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
434 memThen = wbcall(pos, bThen, wbZero, sp, memThen, taddr, dst)
435 f.fe.Func().SetWBPos(pos)
436 nWBops--
437 case OpMoveWB:
438 dst := w.Args[0]
439 src := w.Args[1]
440 if isVolatile(src) {
441 for _, c := range volatiles {
442 if src == c.src {
443 src = c.tmp
444 break
445 }
446 }
447 }
448 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
449
450 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
451 memThen = wbcall(pos, bThen, wbMove, sp, memThen, taddr, dst, src)
452 f.fe.Func().SetWBPos(pos)
453 nWBops--
454 }
455 }
456
457
458 mem = bEnd.NewValue2(pos, OpPhi, types.TypeMem, mem, memThen)
459
460
461 for _, w := range stores {
462 pos := w.Pos
463 switch w.Op {
464 case OpStoreWB:
465 ptr := w.Args[0]
466 val := w.Args[1]
467 if buildcfg.Experiment.CgoCheck2 {
468
469 mem = wbcall(pos, bEnd, cgoCheckPtrWrite, sp, mem, ptr, val)
470 }
471 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
472 case OpZeroWB:
473 dst := w.Args[0]
474 mem = bEnd.NewValue2I(pos, OpZero, types.TypeMem, w.AuxInt, dst, mem)
475 mem.Aux = w.Aux
476 case OpMoveWB:
477 dst := w.Args[0]
478 src := w.Args[1]
479 if isVolatile(src) {
480 for _, c := range volatiles {
481 if src == c.src {
482 src = c.tmp
483 break
484 }
485 }
486 }
487 if buildcfg.Experiment.CgoCheck2 {
488
489 typ := reflectdata.TypeLinksym(w.Aux.(*types.Type))
490 taddr := b.NewValue1A(pos, OpAddr, b.Func.Config.Types.Uintptr, typ, sb)
491 mem = wbcall(pos, bEnd, cgoCheckMemmove, sp, mem, taddr, dst, src)
492 }
493 mem = bEnd.NewValue3I(pos, OpMove, types.TypeMem, w.AuxInt, dst, src, mem)
494 mem.Aux = w.Aux
495 case OpVarDef, OpVarLive:
496 mem = bEnd.NewValue1A(pos, w.Op, types.TypeMem, w.Aux, mem)
497 case OpStore:
498 ptr := w.Args[0]
499 val := w.Args[1]
500 mem = bEnd.NewValue3A(pos, OpStore, types.TypeMem, w.Aux, ptr, val, mem)
501 }
502 }
503
504
505
506
507
508 bEnd.Values = append(bEnd.Values, last)
509 last.Block = bEnd
510 last.reset(OpWBend)
511 last.Pos = last.Pos.WithNotStmt()
512 last.Type = types.TypeMem
513 last.AddArg(mem)
514
515
516 for _, w := range stores {
517 if w != last {
518 w.resetArgs()
519 }
520 }
521 for _, w := range stores {
522 if w != last {
523 f.freeValue(w)
524 }
525 }
526
527
528 bEnd.Values = append(bEnd.Values, after...)
529 for _, w := range after {
530 w.Block = bEnd
531 }
532
533
534 if nWBops > 0 {
535 goto again
536 }
537 }
538 }
539
540
541
542 func (f *Func) computeZeroMap(select1 []*Value) map[ID]ZeroRegion {
543
544 ptrSize := f.Config.PtrSize
545
546
547
548
549 zeroes := map[ID]ZeroRegion{}
550
551 for _, b := range f.Blocks {
552 for _, v := range b.Values {
553 if mem, ok := IsNewObject(v, select1); ok {
554
555
556
557
558 if types.LocalPkg.Path == "runtime" && v.Type.IsUnsafePtr() {
559 continue
560 }
561
562 nptr := v.Type.Elem().Size() / ptrSize
563 if nptr > 64 {
564 nptr = 64
565 }
566 zeroes[mem.ID] = ZeroRegion{base: v, mask: 1<<uint(nptr) - 1}
567 }
568 }
569 }
570
571 for {
572 changed := false
573 for _, b := range f.Blocks {
574
575
576 for _, v := range b.Values {
577 if v.Op != OpStore {
578 continue
579 }
580 z, ok := zeroes[v.MemoryArg().ID]
581 if !ok {
582 continue
583 }
584 ptr := v.Args[0]
585 var off int64
586 size := v.Aux.(*types.Type).Size()
587 for ptr.Op == OpOffPtr {
588 off += ptr.AuxInt
589 ptr = ptr.Args[0]
590 }
591 if ptr != z.base {
592
593
594
595
596 continue
597 }
598
599
600
601 if d := off % ptrSize; d != 0 {
602 off -= d
603 size += d
604 }
605 if d := size % ptrSize; d != 0 {
606 size += ptrSize - d
607 }
608
609 min := off
610 max := off + size
611 if min < 0 {
612 min = 0
613 }
614 if max > 64*ptrSize {
615 max = 64 * ptrSize
616 }
617
618
619 for i := min; i < max; i += ptrSize {
620 bit := i / ptrSize
621 z.mask &^= 1 << uint(bit)
622 }
623 if z.mask == 0 {
624
625 continue
626 }
627
628 if zeroes[v.ID] != z {
629 zeroes[v.ID] = z
630 changed = true
631 }
632 }
633 }
634 if !changed {
635 break
636 }
637 }
638 if f.pass.debug > 0 {
639 fmt.Printf("func %s\n", f.Name)
640 for mem, z := range zeroes {
641 fmt.Printf(" memory=v%d ptr=%v zeromask=%b\n", mem, z.base, z.mask)
642 }
643 }
644 return zeroes
645 }
646
647
648 func wbcall(pos src.XPos, b *Block, fn *obj.LSym, sp, mem *Value, args ...*Value) *Value {
649 config := b.Func.Config
650 typ := config.Types.Uintptr
651 nargs := len(args)
652
653
654 inRegs := b.Func.ABIDefault == b.Func.ABI1 && len(config.intParamRegs) >= 3
655
656 if !inRegs {
657
658 off := config.ctxt.Arch.FixedFrameSize
659 for _, arg := range args {
660 stkaddr := b.NewValue1I(pos, OpOffPtr, typ.PtrTo(), off, sp)
661 mem = b.NewValue3A(pos, OpStore, types.TypeMem, typ, stkaddr, arg, mem)
662 off += typ.Size()
663 }
664 args = args[:0]
665 }
666
667 args = append(args, mem)
668
669
670 argTypes := make([]*types.Type, nargs, 3)
671 for i := 0; i < nargs; i++ {
672 argTypes[i] = typ
673 }
674 call := b.NewValue0A(pos, OpStaticCall, types.TypeResultMem, StaticAuxCall(fn, b.Func.ABIDefault.ABIAnalyzeTypes(argTypes, nil)))
675 call.AddArgs(args...)
676 call.AuxInt = int64(nargs) * typ.Size()
677 return b.NewValue1I(pos, OpSelectN, types.TypeMem, 0, call)
678 }
679
680
681 func round(o int64, r int64) int64 {
682 return (o + r - 1) &^ (r - 1)
683 }
684
685
686 func IsStackAddr(v *Value) bool {
687 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
688 v = v.Args[0]
689 }
690 switch v.Op {
691 case OpSP, OpLocalAddr, OpSelectNAddr, OpGetCallerSP:
692 return true
693 }
694 return false
695 }
696
697
698 func IsGlobalAddr(v *Value) bool {
699 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
700 v = v.Args[0]
701 }
702 if v.Op == OpAddr && v.Args[0].Op == OpSB {
703 return true
704 }
705 if v.Op == OpConstNil {
706 return true
707 }
708 if v.Op == OpLoad && IsReadOnlyGlobalAddr(v.Args[0]) {
709 return true
710 }
711 return false
712 }
713
714
715 func IsReadOnlyGlobalAddr(v *Value) bool {
716 if v.Op == OpConstNil {
717
718 return true
719 }
720 if v.Op == OpAddr && v.Aux != nil && v.Aux.(*obj.LSym).Type == objabi.SRODATA {
721 return true
722 }
723 return false
724 }
725
726
727
728 func IsNewObject(v *Value, select1 []*Value) (mem *Value, ok bool) {
729 f := v.Block.Func
730 c := f.Config
731 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
732 if v.Op != OpSelectN || v.AuxInt != 0 {
733 return nil, false
734 }
735 mem = select1[v.Args[0].ID]
736 if mem == nil {
737 return nil, false
738 }
739 } else {
740 if v.Op != OpLoad {
741 return nil, false
742 }
743 mem = v.MemoryArg()
744 if mem.Op != OpSelectN {
745 return nil, false
746 }
747 if mem.Type != types.TypeMem {
748 return nil, false
749 }
750 }
751 call := mem.Args[0]
752 if call.Op != OpStaticCall {
753 return nil, false
754 }
755 if !isSameCall(call.Aux, "runtime.newobject") {
756 return nil, false
757 }
758 if f.ABIDefault == f.ABI1 && len(c.intParamRegs) >= 1 {
759 if v.Args[0] == call {
760 return mem, true
761 }
762 return nil, false
763 }
764 if v.Args[0].Op != OpOffPtr {
765 return nil, false
766 }
767 if v.Args[0].Args[0].Op != OpSP {
768 return nil, false
769 }
770 if v.Args[0].AuxInt != c.ctxt.Arch.FixedFrameSize+c.RegSize {
771 return nil, false
772 }
773 return mem, true
774 }
775
776
777
778 func IsSanitizerSafeAddr(v *Value) bool {
779 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
780 v = v.Args[0]
781 }
782 switch v.Op {
783 case OpSP, OpLocalAddr, OpSelectNAddr:
784
785 return true
786 case OpITab, OpStringPtr, OpGetClosurePtr:
787
788
789 return true
790 case OpAddr:
791 vt := v.Aux.(*obj.LSym).Type
792 return vt == objabi.SRODATA || vt == objabi.SLIBFUZZER_8BIT_COUNTER || vt == objabi.SCOVERAGE_COUNTER || vt == objabi.SCOVERAGE_AUXVAR
793 }
794 return false
795 }
796
797
798
799 func isVolatile(v *Value) bool {
800 for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy || v.Op == OpSelectNAddr {
801 v = v.Args[0]
802 }
803 return v.Op == OpSP
804 }
805
View as plain text