Source file
src/runtime/mbitmap.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56 package runtime
57
58 import (
59 "internal/abi"
60 "internal/goarch"
61 "internal/runtime/atomic"
62 "internal/runtime/sys"
63 "unsafe"
64 )
65
66 const (
67
68
69
70
71 mallocHeaderSize = 8
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101 minSizeForMallocHeader = goarch.PtrSize * ptrBits
102 )
103
104
105
106
107
108
109
110
111
112 func heapBitsInSpan(userSize uintptr) bool {
113
114
115 return userSize <= minSizeForMallocHeader
116 }
117
118
119
120
121
122 type typePointers struct {
123
124
125
126 elem uintptr
127
128
129
130 addr uintptr
131
132
133
134
135
136 mask uintptr
137
138
139
140 typ *_type
141 }
142
143
144
145
146
147
148
149
150
151
152
153
154 func (span *mspan) typePointersOf(addr, size uintptr) typePointers {
155 base := span.objBase(addr)
156 tp := span.typePointersOfUnchecked(base)
157 if base == addr && size == span.elemsize {
158 return tp
159 }
160 return tp.fastForward(addr-tp.addr, addr+size)
161 }
162
163
164
165
166
167
168
169
170
171 func (span *mspan) typePointersOfUnchecked(addr uintptr) typePointers {
172 const doubleCheck = false
173 if doubleCheck && span.objBase(addr) != addr {
174 print("runtime: addr=", addr, " base=", span.objBase(addr), "\n")
175 throw("typePointersOfUnchecked consisting of non-base-address for object")
176 }
177
178 spc := span.spanclass
179 if spc.noscan() {
180 return typePointers{}
181 }
182 if heapBitsInSpan(span.elemsize) {
183
184 return typePointers{elem: addr, addr: addr, mask: span.heapBitsSmallForAddr(addr)}
185 }
186
187
188 var typ *_type
189 if spc.sizeclass() != 0 {
190
191 typ = *(**_type)(unsafe.Pointer(addr))
192 addr += mallocHeaderSize
193 } else {
194 typ = span.largeType
195 if typ == nil {
196
197 return typePointers{}
198 }
199 }
200 gcmask := getGCMask(typ)
201 return typePointers{elem: addr, addr: addr, mask: readUintptr(gcmask), typ: typ}
202 }
203
204
205
206
207
208
209
210
211
212
213 func (span *mspan) typePointersOfType(typ *abi.Type, addr uintptr) typePointers {
214 const doubleCheck = false
215 if doubleCheck && typ == nil {
216 throw("bad type passed to typePointersOfType")
217 }
218 if span.spanclass.noscan() {
219 return typePointers{}
220 }
221
222 gcmask := getGCMask(typ)
223 return typePointers{elem: addr, addr: addr, mask: readUintptr(gcmask), typ: typ}
224 }
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246 func (tp typePointers) nextFast() (typePointers, uintptr) {
247
248 if tp.mask == 0 {
249 return tp, 0
250 }
251
252 var i int
253 if goarch.PtrSize == 8 {
254 i = sys.TrailingZeros64(uint64(tp.mask))
255 } else {
256 i = sys.TrailingZeros32(uint32(tp.mask))
257 }
258
259 tp.mask ^= uintptr(1) << (i & (ptrBits - 1))
260
261 return tp, tp.addr + uintptr(i)*goarch.PtrSize
262 }
263
264
265
266
267
268
269
270
271
272 func (tp typePointers) next(limit uintptr) (typePointers, uintptr) {
273 for {
274 if tp.mask != 0 {
275 return tp.nextFast()
276 }
277
278
279 if tp.typ == nil {
280 return typePointers{}, 0
281 }
282
283
284 if tp.addr+goarch.PtrSize*ptrBits >= tp.elem+tp.typ.PtrBytes {
285 tp.elem += tp.typ.Size_
286 tp.addr = tp.elem
287 } else {
288 tp.addr += ptrBits * goarch.PtrSize
289 }
290
291
292 if tp.addr >= limit {
293 return typePointers{}, 0
294 }
295
296
297 tp.mask = readUintptr(addb(getGCMask(tp.typ), (tp.addr-tp.elem)/goarch.PtrSize/8))
298 if tp.addr+goarch.PtrSize*ptrBits > limit {
299 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
300 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
301 }
302 }
303 }
304
305
306
307
308
309
310
311
312 func (tp typePointers) fastForward(n, limit uintptr) typePointers {
313
314 target := tp.addr + n
315 if target >= limit {
316 return typePointers{}
317 }
318 if tp.typ == nil {
319
320
321 tp.mask &^= (1 << ((target - tp.addr) / goarch.PtrSize)) - 1
322
323 if tp.addr+goarch.PtrSize*ptrBits > limit {
324 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
325 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
326 }
327 return tp
328 }
329
330
331
332 if n >= tp.typ.Size_ {
333
334
335 oldelem := tp.elem
336 tp.elem += (tp.addr - tp.elem + n) / tp.typ.Size_ * tp.typ.Size_
337 tp.addr = tp.elem + alignDown(n-(tp.elem-oldelem), ptrBits*goarch.PtrSize)
338 } else {
339 tp.addr += alignDown(n, ptrBits*goarch.PtrSize)
340 }
341
342 if tp.addr-tp.elem >= tp.typ.PtrBytes {
343
344
345 tp.elem += tp.typ.Size_
346 tp.addr = tp.elem
347 tp.mask = readUintptr(getGCMask(tp.typ))
348
349
350 if tp.addr >= limit {
351 return typePointers{}
352 }
353 } else {
354
355
356 tp.mask = readUintptr(addb(getGCMask(tp.typ), (tp.addr-tp.elem)/goarch.PtrSize/8))
357 tp.mask &^= (1 << ((target - tp.addr) / goarch.PtrSize)) - 1
358 }
359 if tp.addr+goarch.PtrSize*ptrBits > limit {
360 bits := (tp.addr + goarch.PtrSize*ptrBits - limit) / goarch.PtrSize
361 tp.mask &^= ((1 << (bits)) - 1) << (ptrBits - bits)
362 }
363 return tp
364 }
365
366
367
368
369
370
371 func (span *mspan) objBase(addr uintptr) uintptr {
372 return span.base() + span.objIndex(addr)*span.elemsize
373 }
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417 func bulkBarrierPreWrite(dst, src, size uintptr, typ *abi.Type) {
418 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
419 throw("bulkBarrierPreWrite: unaligned arguments")
420 }
421 if !writeBarrier.enabled {
422 return
423 }
424 s := spanOf(dst)
425 if s == nil {
426
427
428 for _, datap := range activeModules() {
429 if datap.data <= dst && dst < datap.edata {
430 bulkBarrierBitmap(dst, src, size, dst-datap.data, datap.gcdatamask.bytedata)
431 return
432 }
433 }
434 for _, datap := range activeModules() {
435 if datap.bss <= dst && dst < datap.ebss {
436 bulkBarrierBitmap(dst, src, size, dst-datap.bss, datap.gcbssmask.bytedata)
437 return
438 }
439 }
440 return
441 } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
442
443
444
445
446
447
448 return
449 }
450 buf := &getg().m.p.ptr().wbBuf
451
452
453 const doubleCheck = false
454 if doubleCheck {
455 doubleCheckTypePointersOfType(s, typ, dst, size)
456 }
457
458 var tp typePointers
459 if typ != nil {
460 tp = s.typePointersOfType(typ, dst)
461 } else {
462 tp = s.typePointersOf(dst, size)
463 }
464 if src == 0 {
465 for {
466 var addr uintptr
467 if tp, addr = tp.next(dst + size); addr == 0 {
468 break
469 }
470 dstx := (*uintptr)(unsafe.Pointer(addr))
471 p := buf.get1()
472 p[0] = *dstx
473 }
474 } else {
475 for {
476 var addr uintptr
477 if tp, addr = tp.next(dst + size); addr == 0 {
478 break
479 }
480 dstx := (*uintptr)(unsafe.Pointer(addr))
481 srcx := (*uintptr)(unsafe.Pointer(src + (addr - dst)))
482 p := buf.get2()
483 p[0] = *dstx
484 p[1] = *srcx
485 }
486 }
487 }
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503 func bulkBarrierPreWriteSrcOnly(dst, src, size uintptr, typ *abi.Type) {
504 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
505 throw("bulkBarrierPreWrite: unaligned arguments")
506 }
507 if !writeBarrier.enabled {
508 return
509 }
510 buf := &getg().m.p.ptr().wbBuf
511 s := spanOf(dst)
512
513
514 const doubleCheck = false
515 if doubleCheck {
516 doubleCheckTypePointersOfType(s, typ, dst, size)
517 }
518
519 var tp typePointers
520 if typ != nil {
521 tp = s.typePointersOfType(typ, dst)
522 } else {
523 tp = s.typePointersOf(dst, size)
524 }
525 for {
526 var addr uintptr
527 if tp, addr = tp.next(dst + size); addr == 0 {
528 break
529 }
530 srcx := (*uintptr)(unsafe.Pointer(addr - dst + src))
531 p := buf.get1()
532 p[0] = *srcx
533 }
534 }
535
536
537 func (s *mspan) initHeapBits() {
538 if goarch.PtrSize == 8 && !s.spanclass.noscan() && s.spanclass.sizeclass() == 1 {
539 b := s.heapBits()
540 for i := range b {
541 b[i] = ^uintptr(0)
542 }
543 } else if (!s.spanclass.noscan() && heapBitsInSpan(s.elemsize)) || s.isUserArenaChunk {
544 b := s.heapBits()
545 clear(b)
546 }
547 }
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563 func (span *mspan) heapBits() []uintptr {
564 const doubleCheck = false
565
566 if doubleCheck && !span.isUserArenaChunk {
567 if span.spanclass.noscan() {
568 throw("heapBits called for noscan")
569 }
570 if span.elemsize > minSizeForMallocHeader {
571 throw("heapBits called for span class that should have a malloc header")
572 }
573 }
574
575
576
577 if span.npages == 1 {
578
579 return heapBitsSlice(span.base(), pageSize)
580 }
581 return heapBitsSlice(span.base(), span.npages*pageSize)
582 }
583
584
585
586
587 func heapBitsSlice(spanBase, spanSize uintptr) []uintptr {
588 bitmapSize := spanSize / goarch.PtrSize / 8
589 elems := int(bitmapSize / goarch.PtrSize)
590 var sl notInHeapSlice
591 sl = notInHeapSlice{(*notInHeap)(unsafe.Pointer(spanBase + spanSize - bitmapSize)), elems, elems}
592 return *(*[]uintptr)(unsafe.Pointer(&sl))
593 }
594
595
596
597
598
599
600
601 func (span *mspan) heapBitsSmallForAddr(addr uintptr) uintptr {
602 spanSize := span.npages * pageSize
603 bitmapSize := spanSize / goarch.PtrSize / 8
604 hbits := (*byte)(unsafe.Pointer(span.base() + spanSize - bitmapSize))
605
606
607
608
609
610
611
612
613
614 i := (addr - span.base()) / goarch.PtrSize / ptrBits
615 j := (addr - span.base()) / goarch.PtrSize % ptrBits
616 bits := span.elemsize / goarch.PtrSize
617 word0 := (*uintptr)(unsafe.Pointer(addb(hbits, goarch.PtrSize*(i+0))))
618 word1 := (*uintptr)(unsafe.Pointer(addb(hbits, goarch.PtrSize*(i+1))))
619
620 var read uintptr
621 if j+bits > ptrBits {
622
623 bits0 := ptrBits - j
624 bits1 := bits - bits0
625 read = *word0 >> j
626 read |= (*word1 & ((1 << bits1) - 1)) << bits0
627 } else {
628
629 read = (*word0 >> j) & ((1 << bits) - 1)
630 }
631 return read
632 }
633
634
635
636
637
638
639
640
641 func (span *mspan) writeHeapBitsSmall(x, dataSize uintptr, typ *_type) (scanSize uintptr) {
642
643 src0 := readUintptr(getGCMask(typ))
644
645
646 scanSize = typ.PtrBytes
647 src := src0
648 if typ.Size_ == goarch.PtrSize {
649 src = (1 << (dataSize / goarch.PtrSize)) - 1
650 } else {
651
652
653
654 if doubleCheckHeapSetType && !asanenabled && dataSize%typ.Size_ != 0 {
655 throw("runtime: (*mspan).writeHeapBitsSmall: dataSize is not a multiple of typ.Size_")
656 }
657 for i := typ.Size_; i < dataSize; i += typ.Size_ {
658 src |= src0 << (i / goarch.PtrSize)
659 scanSize += typ.Size_
660 }
661 if asanenabled {
662
663
664 src &= (1 << (dataSize / goarch.PtrSize)) - 1
665 }
666 }
667
668
669
670 dst := unsafe.Pointer(span.base() + pageSize - pageSize/goarch.PtrSize/8)
671 o := (x - span.base()) / goarch.PtrSize
672 i := o / ptrBits
673 j := o % ptrBits
674 bits := span.elemsize / goarch.PtrSize
675 if j+bits > ptrBits {
676
677 bits0 := ptrBits - j
678 bits1 := bits - bits0
679 dst0 := (*uintptr)(add(dst, (i+0)*goarch.PtrSize))
680 dst1 := (*uintptr)(add(dst, (i+1)*goarch.PtrSize))
681 *dst0 = (*dst0)&(^uintptr(0)>>bits0) | (src << j)
682 *dst1 = (*dst1)&^((1<<bits1)-1) | (src >> bits0)
683 } else {
684
685 dst := (*uintptr)(add(dst, i*goarch.PtrSize))
686 *dst = (*dst)&^(((1<<bits)-1)<<j) | (src << j)
687 }
688
689 const doubleCheck = false
690 if doubleCheck {
691 srcRead := span.heapBitsSmallForAddr(x)
692 if srcRead != src {
693 print("runtime: x=", hex(x), " i=", i, " j=", j, " bits=", bits, "\n")
694 print("runtime: dataSize=", dataSize, " typ.Size_=", typ.Size_, " typ.PtrBytes=", typ.PtrBytes, "\n")
695 print("runtime: src0=", hex(src0), " src=", hex(src), " srcRead=", hex(srcRead), "\n")
696 throw("bad pointer bits written for small object")
697 }
698 }
699 return
700 }
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719 const doubleCheckHeapSetType = doubleCheckMalloc
720
721 func heapSetTypeNoHeader(x, dataSize uintptr, typ *_type, span *mspan) uintptr {
722 if doubleCheckHeapSetType && (!heapBitsInSpan(dataSize) || !heapBitsInSpan(span.elemsize)) {
723 throw("tried to write heap bits, but no heap bits in span")
724 }
725 scanSize := span.writeHeapBitsSmall(x, dataSize, typ)
726 if doubleCheckHeapSetType {
727 doubleCheckHeapType(x, dataSize, typ, nil, span)
728 }
729 return scanSize
730 }
731
732 func heapSetTypeSmallHeader(x, dataSize uintptr, typ *_type, header **_type, span *mspan) uintptr {
733 *header = typ
734 if doubleCheckHeapSetType {
735 doubleCheckHeapType(x, dataSize, typ, header, span)
736 }
737 return span.elemsize
738 }
739
740 func heapSetTypeLarge(x, dataSize uintptr, typ *_type, span *mspan) uintptr {
741 gctyp := typ
742
743 span.largeType = gctyp
744 if doubleCheckHeapSetType {
745 doubleCheckHeapType(x, dataSize, typ, &span.largeType, span)
746 }
747 return span.elemsize
748 }
749
750 func doubleCheckHeapType(x, dataSize uintptr, gctyp *_type, header **_type, span *mspan) {
751 doubleCheckHeapPointers(x, dataSize, gctyp, header, span)
752
753
754
755
756 maxIterBytes := span.elemsize
757 if header == nil {
758 maxIterBytes = dataSize
759 }
760 off := alignUp(uintptr(cheaprand())%dataSize, goarch.PtrSize)
761 size := dataSize - off
762 if size == 0 {
763 off -= goarch.PtrSize
764 size += goarch.PtrSize
765 }
766 interior := x + off
767 size -= alignDown(uintptr(cheaprand())%size, goarch.PtrSize)
768 if size == 0 {
769 size = goarch.PtrSize
770 }
771
772 size = (size + gctyp.Size_ - 1) / gctyp.Size_ * gctyp.Size_
773 if interior+size > x+maxIterBytes {
774 size = x + maxIterBytes - interior
775 }
776 doubleCheckHeapPointersInterior(x, interior, size, dataSize, gctyp, header, span)
777 }
778
779 func doubleCheckHeapPointers(x, dataSize uintptr, typ *_type, header **_type, span *mspan) {
780
781 tp := span.typePointersOfUnchecked(span.objBase(x))
782 maxIterBytes := span.elemsize
783 if header == nil {
784 maxIterBytes = dataSize
785 }
786 bad := false
787 for i := uintptr(0); i < maxIterBytes; i += goarch.PtrSize {
788
789 want := false
790 if i < span.elemsize {
791 off := i % typ.Size_
792 if off < typ.PtrBytes {
793 j := off / goarch.PtrSize
794 want = *addb(getGCMask(typ), j/8)>>(j%8)&1 != 0
795 }
796 }
797 if want {
798 var addr uintptr
799 tp, addr = tp.next(x + span.elemsize)
800 if addr == 0 {
801 println("runtime: found bad iterator")
802 }
803 if addr != x+i {
804 print("runtime: addr=", hex(addr), " x+i=", hex(x+i), "\n")
805 bad = true
806 }
807 }
808 }
809 if !bad {
810 var addr uintptr
811 tp, addr = tp.next(x + span.elemsize)
812 if addr == 0 {
813 return
814 }
815 println("runtime: extra pointer:", hex(addr))
816 }
817 print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, " TFlagGCMaskOnDemaind=", typ.TFlag&abi.TFlagGCMaskOnDemand != 0, "\n")
818 print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, "\n")
819 print("runtime: typ=", unsafe.Pointer(typ), " typ.PtrBytes=", typ.PtrBytes, "\n")
820 print("runtime: limit=", hex(x+span.elemsize), "\n")
821 tp = span.typePointersOfUnchecked(x)
822 dumpTypePointers(tp)
823 for {
824 var addr uintptr
825 if tp, addr = tp.next(x + span.elemsize); addr == 0 {
826 println("runtime: would've stopped here")
827 dumpTypePointers(tp)
828 break
829 }
830 print("runtime: addr=", hex(addr), "\n")
831 dumpTypePointers(tp)
832 }
833 throw("heapSetType: pointer entry not correct")
834 }
835
836 func doubleCheckHeapPointersInterior(x, interior, size, dataSize uintptr, typ *_type, header **_type, span *mspan) {
837 bad := false
838 if interior < x {
839 print("runtime: interior=", hex(interior), " x=", hex(x), "\n")
840 throw("found bad interior pointer")
841 }
842 off := interior - x
843 tp := span.typePointersOf(interior, size)
844 for i := off; i < off+size; i += goarch.PtrSize {
845
846 want := false
847 if i < span.elemsize {
848 off := i % typ.Size_
849 if off < typ.PtrBytes {
850 j := off / goarch.PtrSize
851 want = *addb(getGCMask(typ), j/8)>>(j%8)&1 != 0
852 }
853 }
854 if want {
855 var addr uintptr
856 tp, addr = tp.next(interior + size)
857 if addr == 0 {
858 println("runtime: found bad iterator")
859 bad = true
860 }
861 if addr != x+i {
862 print("runtime: addr=", hex(addr), " x+i=", hex(x+i), "\n")
863 bad = true
864 }
865 }
866 }
867 if !bad {
868 var addr uintptr
869 tp, addr = tp.next(interior + size)
870 if addr == 0 {
871 return
872 }
873 println("runtime: extra pointer:", hex(addr))
874 }
875 print("runtime: hasHeader=", header != nil, " typ.Size_=", typ.Size_, "\n")
876 print("runtime: x=", hex(x), " dataSize=", dataSize, " elemsize=", span.elemsize, " interior=", hex(interior), " size=", size, "\n")
877 print("runtime: limit=", hex(interior+size), "\n")
878 tp = span.typePointersOf(interior, size)
879 dumpTypePointers(tp)
880 for {
881 var addr uintptr
882 if tp, addr = tp.next(interior + size); addr == 0 {
883 println("runtime: would've stopped here")
884 dumpTypePointers(tp)
885 break
886 }
887 print("runtime: addr=", hex(addr), "\n")
888 dumpTypePointers(tp)
889 }
890
891 print("runtime: want: ")
892 for i := off; i < off+size; i += goarch.PtrSize {
893
894 want := false
895 if i < dataSize {
896 off := i % typ.Size_
897 if off < typ.PtrBytes {
898 j := off / goarch.PtrSize
899 want = *addb(getGCMask(typ), j/8)>>(j%8)&1 != 0
900 }
901 }
902 if want {
903 print("1")
904 } else {
905 print("0")
906 }
907 }
908 println()
909
910 throw("heapSetType: pointer entry not correct")
911 }
912
913
914 func doubleCheckTypePointersOfType(s *mspan, typ *_type, addr, size uintptr) {
915 if typ == nil {
916 return
917 }
918 if typ.Kind_&abi.KindMask == abi.Interface {
919
920
921
922 return
923 }
924 tp0 := s.typePointersOfType(typ, addr)
925 tp1 := s.typePointersOf(addr, size)
926 failed := false
927 for {
928 var addr0, addr1 uintptr
929 tp0, addr0 = tp0.next(addr + size)
930 tp1, addr1 = tp1.next(addr + size)
931 if addr0 != addr1 {
932 failed = true
933 break
934 }
935 if addr0 == 0 {
936 break
937 }
938 }
939 if failed {
940 tp0 := s.typePointersOfType(typ, addr)
941 tp1 := s.typePointersOf(addr, size)
942 print("runtime: addr=", hex(addr), " size=", size, "\n")
943 print("runtime: type=", toRType(typ).string(), "\n")
944 dumpTypePointers(tp0)
945 dumpTypePointers(tp1)
946 for {
947 var addr0, addr1 uintptr
948 tp0, addr0 = tp0.next(addr + size)
949 tp1, addr1 = tp1.next(addr + size)
950 print("runtime: ", hex(addr0), " ", hex(addr1), "\n")
951 if addr0 == 0 && addr1 == 0 {
952 break
953 }
954 }
955 throw("mismatch between typePointersOfType and typePointersOf")
956 }
957 }
958
959 func dumpTypePointers(tp typePointers) {
960 print("runtime: tp.elem=", hex(tp.elem), " tp.typ=", unsafe.Pointer(tp.typ), "\n")
961 print("runtime: tp.addr=", hex(tp.addr), " tp.mask=")
962 for i := uintptr(0); i < ptrBits; i++ {
963 if tp.mask&(uintptr(1)<<i) != 0 {
964 print("1")
965 } else {
966 print("0")
967 }
968 }
969 println()
970 }
971
972
973
974
975
976 func addb(p *byte, n uintptr) *byte {
977
978
979
980 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + n))
981 }
982
983
984
985
986
987 func subtractb(p *byte, n uintptr) *byte {
988
989
990
991 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - n))
992 }
993
994
995
996
997
998 func add1(p *byte) *byte {
999
1000
1001
1002 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + 1))
1003 }
1004
1005
1006
1007
1008
1009
1010
1011 func subtract1(p *byte) *byte {
1012
1013
1014
1015 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - 1))
1016 }
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027 type markBits struct {
1028 bytep *uint8
1029 mask uint8
1030 index uintptr
1031 }
1032
1033
1034 func (s *mspan) allocBitsForIndex(allocBitIndex uintptr) markBits {
1035 bytep, mask := s.allocBits.bitp(allocBitIndex)
1036 return markBits{bytep, mask, allocBitIndex}
1037 }
1038
1039
1040
1041
1042
1043 func (s *mspan) refillAllocCache(whichByte uint16) {
1044 bytes := (*[8]uint8)(unsafe.Pointer(s.allocBits.bytep(uintptr(whichByte))))
1045 aCache := uint64(0)
1046 aCache |= uint64(bytes[0])
1047 aCache |= uint64(bytes[1]) << (1 * 8)
1048 aCache |= uint64(bytes[2]) << (2 * 8)
1049 aCache |= uint64(bytes[3]) << (3 * 8)
1050 aCache |= uint64(bytes[4]) << (4 * 8)
1051 aCache |= uint64(bytes[5]) << (5 * 8)
1052 aCache |= uint64(bytes[6]) << (6 * 8)
1053 aCache |= uint64(bytes[7]) << (7 * 8)
1054 s.allocCache = ^aCache
1055 }
1056
1057
1058
1059
1060
1061 func (s *mspan) nextFreeIndex() uint16 {
1062 sfreeindex := s.freeindex
1063 snelems := s.nelems
1064 if sfreeindex == snelems {
1065 return sfreeindex
1066 }
1067 if sfreeindex > snelems {
1068 throw("s.freeindex > s.nelems")
1069 }
1070
1071 aCache := s.allocCache
1072
1073 bitIndex := sys.TrailingZeros64(aCache)
1074 for bitIndex == 64 {
1075
1076 sfreeindex = (sfreeindex + 64) &^ (64 - 1)
1077 if sfreeindex >= snelems {
1078 s.freeindex = snelems
1079 return snelems
1080 }
1081 whichByte := sfreeindex / 8
1082
1083 s.refillAllocCache(whichByte)
1084 aCache = s.allocCache
1085 bitIndex = sys.TrailingZeros64(aCache)
1086
1087
1088 }
1089 result := sfreeindex + uint16(bitIndex)
1090 if result >= snelems {
1091 s.freeindex = snelems
1092 return snelems
1093 }
1094
1095 s.allocCache >>= uint(bitIndex + 1)
1096 sfreeindex = result + 1
1097
1098 if sfreeindex%64 == 0 && sfreeindex != snelems {
1099
1100
1101
1102
1103
1104 whichByte := sfreeindex / 8
1105 s.refillAllocCache(whichByte)
1106 }
1107 s.freeindex = sfreeindex
1108 return result
1109 }
1110
1111
1112
1113
1114
1115
1116 func (s *mspan) isFree(index uintptr) bool {
1117 if index < uintptr(s.freeIndexForScan) {
1118 return false
1119 }
1120 bytep, mask := s.allocBits.bitp(index)
1121 return *bytep&mask == 0
1122 }
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132 func (s *mspan) divideByElemSize(n uintptr) uintptr {
1133 const doubleCheck = false
1134
1135
1136 q := uintptr((uint64(n) * uint64(s.divMul)) >> 32)
1137
1138 if doubleCheck && q != n/s.elemsize {
1139 println(n, "/", s.elemsize, "should be", n/s.elemsize, "but got", q)
1140 throw("bad magic division")
1141 }
1142 return q
1143 }
1144
1145
1146
1147
1148 func (s *mspan) objIndex(p uintptr) uintptr {
1149 return s.divideByElemSize(p - s.base())
1150 }
1151
1152 func markBitsForAddr(p uintptr) markBits {
1153 s := spanOf(p)
1154 objIndex := s.objIndex(p)
1155 return s.markBitsForIndex(objIndex)
1156 }
1157
1158 func (s *mspan) markBitsForIndex(objIndex uintptr) markBits {
1159 bytep, mask := s.gcmarkBits.bitp(objIndex)
1160 return markBits{bytep, mask, objIndex}
1161 }
1162
1163 func (s *mspan) markBitsForBase() markBits {
1164 return markBits{&s.gcmarkBits.x, uint8(1), 0}
1165 }
1166
1167
1168 func (m markBits) isMarked() bool {
1169 return *m.bytep&m.mask != 0
1170 }
1171
1172
1173 func (m markBits) setMarked() {
1174
1175
1176
1177 atomic.Or8(m.bytep, m.mask)
1178 }
1179
1180
1181 func (m markBits) setMarkedNonAtomic() {
1182 *m.bytep |= m.mask
1183 }
1184
1185
1186 func (m markBits) clearMarked() {
1187
1188
1189
1190 atomic.And8(m.bytep, ^m.mask)
1191 }
1192
1193
1194 func markBitsForSpan(base uintptr) (mbits markBits) {
1195 mbits = markBitsForAddr(base)
1196 if mbits.mask != 1 {
1197 throw("markBitsForSpan: unaligned start")
1198 }
1199 return mbits
1200 }
1201
1202
1203 func (m *markBits) advance() {
1204 if m.mask == 1<<7 {
1205 m.bytep = (*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(m.bytep)) + 1))
1206 m.mask = 1
1207 } else {
1208 m.mask = m.mask << 1
1209 }
1210 m.index++
1211 }
1212
1213
1214
1215 const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32))
1216
1217
1218 func badPointer(s *mspan, p, refBase, refOff uintptr) {
1219
1220
1221
1222
1223
1224
1225
1226
1227 printlock()
1228 print("runtime: pointer ", hex(p))
1229 if s != nil {
1230 state := s.state.get()
1231 if state != mSpanInUse {
1232 print(" to unallocated span")
1233 } else {
1234 print(" to unused region of span")
1235 }
1236 print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
1237 }
1238 print("\n")
1239 if refBase != 0 {
1240 print("runtime: found in object at *(", hex(refBase), "+", hex(refOff), ")\n")
1241 gcDumpObject("object", refBase, refOff)
1242 }
1243 getg().m.traceback = 2
1244 throw("found bad pointer in Go heap (incorrect use of unsafe or cgo?)")
1245 }
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271 func findObject(p, refBase, refOff uintptr) (base uintptr, s *mspan, objIndex uintptr) {
1272 s = spanOf(p)
1273
1274
1275 if s == nil {
1276 if (GOARCH == "amd64" || GOARCH == "arm64") && p == clobberdeadPtr && debug.invalidptr != 0 {
1277
1278
1279
1280 badPointer(s, p, refBase, refOff)
1281 }
1282 return
1283 }
1284
1285
1286
1287
1288 if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
1289
1290 if state == mSpanManual {
1291 return
1292 }
1293
1294
1295 if debug.invalidptr != 0 {
1296 badPointer(s, p, refBase, refOff)
1297 }
1298 return
1299 }
1300
1301 objIndex = s.objIndex(p)
1302 base = s.base() + objIndex*s.elemsize
1303 return
1304 }
1305
1306
1307
1308
1309 func reflect_verifyNotInHeapPtr(p uintptr) bool {
1310
1311
1312
1313 return spanOf(p) == nil && p != clobberdeadPtr
1314 }
1315
1316 const ptrBits = 8 * goarch.PtrSize
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326 func bulkBarrierBitmap(dst, src, size, maskOffset uintptr, bits *uint8) {
1327 word := maskOffset / goarch.PtrSize
1328 bits = addb(bits, word/8)
1329 mask := uint8(1) << (word % 8)
1330
1331 buf := &getg().m.p.ptr().wbBuf
1332 for i := uintptr(0); i < size; i += goarch.PtrSize {
1333 if mask == 0 {
1334 bits = addb(bits, 1)
1335 if *bits == 0 {
1336
1337 i += 7 * goarch.PtrSize
1338 continue
1339 }
1340 mask = 1
1341 }
1342 if *bits&mask != 0 {
1343 dstx := (*uintptr)(unsafe.Pointer(dst + i))
1344 if src == 0 {
1345 p := buf.get1()
1346 p[0] = *dstx
1347 } else {
1348 srcx := (*uintptr)(unsafe.Pointer(src + i))
1349 p := buf.get2()
1350 p[0] = *dstx
1351 p[1] = *srcx
1352 }
1353 }
1354 mask <<= 1
1355 }
1356 }
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372 func typeBitsBulkBarrier(typ *_type, dst, src, size uintptr) {
1373 if typ == nil {
1374 throw("runtime: typeBitsBulkBarrier without type")
1375 }
1376 if typ.Size_ != size {
1377 println("runtime: typeBitsBulkBarrier with type ", toRType(typ).string(), " of size ", typ.Size_, " but memory size", size)
1378 throw("runtime: invalid typeBitsBulkBarrier")
1379 }
1380 if !writeBarrier.enabled {
1381 return
1382 }
1383 ptrmask := getGCMask(typ)
1384 buf := &getg().m.p.ptr().wbBuf
1385 var bits uint32
1386 for i := uintptr(0); i < typ.PtrBytes; i += goarch.PtrSize {
1387 if i&(goarch.PtrSize*8-1) == 0 {
1388 bits = uint32(*ptrmask)
1389 ptrmask = addb(ptrmask, 1)
1390 } else {
1391 bits = bits >> 1
1392 }
1393 if bits&1 != 0 {
1394 dstx := (*uintptr)(unsafe.Pointer(dst + i))
1395 srcx := (*uintptr)(unsafe.Pointer(src + i))
1396 p := buf.get2()
1397 p[0] = *dstx
1398 p[1] = *srcx
1399 }
1400 }
1401 }
1402
1403
1404
1405 func (s *mspan) countAlloc() int {
1406 count := 0
1407 bytes := divRoundUp(uintptr(s.nelems), 8)
1408
1409
1410
1411
1412 for i := uintptr(0); i < bytes; i += 8 {
1413
1414
1415
1416
1417 mrkBits := *(*uint64)(unsafe.Pointer(s.gcmarkBits.bytep(i)))
1418 count += sys.OnesCount64(mrkBits)
1419 }
1420 return count
1421 }
1422
1423
1424
1425 func readUintptr(p *byte) uintptr {
1426 x := *(*uintptr)(unsafe.Pointer(p))
1427 if goarch.BigEndian {
1428 if goarch.PtrSize == 8 {
1429 return uintptr(sys.Bswap64(uint64(x)))
1430 }
1431 return uintptr(sys.Bswap32(uint32(x)))
1432 }
1433 return x
1434 }
1435
1436 var debugPtrmask struct {
1437 lock mutex
1438 data *byte
1439 }
1440
1441
1442
1443
1444 func progToPointerMask(prog *byte, size uintptr) bitvector {
1445 n := (size/goarch.PtrSize + 7) / 8
1446 x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
1447 x[len(x)-1] = 0xa1
1448 n = runGCProg(prog, &x[0])
1449 if x[len(x)-1] != 0xa1 {
1450 throw("progToPointerMask: overflow")
1451 }
1452 return bitvector{int32(n), &x[0]}
1453 }
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473 func runGCProg(prog, dst *byte) uintptr {
1474 dstStart := dst
1475
1476
1477 var bits uintptr
1478 var nbits uintptr
1479
1480 p := prog
1481 Run:
1482 for {
1483
1484
1485 for ; nbits >= 8; nbits -= 8 {
1486 *dst = uint8(bits)
1487 dst = add1(dst)
1488 bits >>= 8
1489 }
1490
1491
1492 inst := uintptr(*p)
1493 p = add1(p)
1494 n := inst & 0x7F
1495 if inst&0x80 == 0 {
1496
1497 if n == 0 {
1498
1499 break Run
1500 }
1501 nbyte := n / 8
1502 for i := uintptr(0); i < nbyte; i++ {
1503 bits |= uintptr(*p) << nbits
1504 p = add1(p)
1505 *dst = uint8(bits)
1506 dst = add1(dst)
1507 bits >>= 8
1508 }
1509 if n %= 8; n > 0 {
1510 bits |= uintptr(*p) << nbits
1511 p = add1(p)
1512 nbits += n
1513 }
1514 continue Run
1515 }
1516
1517
1518 if n == 0 {
1519 for off := uint(0); ; off += 7 {
1520 x := uintptr(*p)
1521 p = add1(p)
1522 n |= (x & 0x7F) << off
1523 if x&0x80 == 0 {
1524 break
1525 }
1526 }
1527 }
1528
1529
1530 c := uintptr(0)
1531 for off := uint(0); ; off += 7 {
1532 x := uintptr(*p)
1533 p = add1(p)
1534 c |= (x & 0x7F) << off
1535 if x&0x80 == 0 {
1536 break
1537 }
1538 }
1539 c *= n
1540
1541
1542
1543
1544
1545
1546
1547
1548 src := dst
1549 const maxBits = goarch.PtrSize*8 - 7
1550 if n <= maxBits {
1551
1552 pattern := bits
1553 npattern := nbits
1554
1555
1556 src = subtract1(src)
1557 for npattern < n {
1558 pattern <<= 8
1559 pattern |= uintptr(*src)
1560 src = subtract1(src)
1561 npattern += 8
1562 }
1563
1564
1565
1566
1567
1568 if npattern > n {
1569 pattern >>= npattern - n
1570 npattern = n
1571 }
1572
1573
1574 if npattern == 1 {
1575
1576
1577
1578
1579
1580
1581 if pattern == 1 {
1582 pattern = 1<<maxBits - 1
1583 npattern = maxBits
1584 } else {
1585 npattern = c
1586 }
1587 } else {
1588 b := pattern
1589 nb := npattern
1590 if nb+nb <= maxBits {
1591
1592 for nb <= goarch.PtrSize*8 {
1593 b |= b << nb
1594 nb += nb
1595 }
1596
1597
1598 nb = maxBits / npattern * npattern
1599 b &= 1<<nb - 1
1600 pattern = b
1601 npattern = nb
1602 }
1603 }
1604
1605
1606
1607
1608 for ; c >= npattern; c -= npattern {
1609 bits |= pattern << nbits
1610 nbits += npattern
1611 for nbits >= 8 {
1612 *dst = uint8(bits)
1613 dst = add1(dst)
1614 bits >>= 8
1615 nbits -= 8
1616 }
1617 }
1618
1619
1620 if c > 0 {
1621 pattern &= 1<<c - 1
1622 bits |= pattern << nbits
1623 nbits += c
1624 }
1625 continue Run
1626 }
1627
1628
1629
1630
1631 off := n - nbits
1632
1633 src = subtractb(src, (off+7)/8)
1634 if frag := off & 7; frag != 0 {
1635 bits |= uintptr(*src) >> (8 - frag) << nbits
1636 src = add1(src)
1637 nbits += frag
1638 c -= frag
1639 }
1640
1641
1642 for i := c / 8; i > 0; i-- {
1643 bits |= uintptr(*src) << nbits
1644 src = add1(src)
1645 *dst = uint8(bits)
1646 dst = add1(dst)
1647 bits >>= 8
1648 }
1649
1650 if c %= 8; c > 0 {
1651 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1652 nbits += c
1653 }
1654 }
1655
1656
1657 totalBits := (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*8 + nbits
1658 nbits += -nbits & 7
1659 for ; nbits > 0; nbits -= 8 {
1660 *dst = uint8(bits)
1661 dst = add1(dst)
1662 bits >>= 8
1663 }
1664 return totalBits
1665 }
1666
1667 func dumpGCProg(p *byte) {
1668 nptr := 0
1669 for {
1670 x := *p
1671 p = add1(p)
1672 if x == 0 {
1673 print("\t", nptr, " end\n")
1674 break
1675 }
1676 if x&0x80 == 0 {
1677 print("\t", nptr, " lit ", x, ":")
1678 n := int(x+7) / 8
1679 for i := 0; i < n; i++ {
1680 print(" ", hex(*p))
1681 p = add1(p)
1682 }
1683 print("\n")
1684 nptr += int(x)
1685 } else {
1686 nbit := int(x &^ 0x80)
1687 if nbit == 0 {
1688 for nb := uint(0); ; nb += 7 {
1689 x := *p
1690 p = add1(p)
1691 nbit |= int(x&0x7f) << nb
1692 if x&0x80 == 0 {
1693 break
1694 }
1695 }
1696 }
1697 count := 0
1698 for nb := uint(0); ; nb += 7 {
1699 x := *p
1700 p = add1(p)
1701 count |= int(x&0x7f) << nb
1702 if x&0x80 == 0 {
1703 break
1704 }
1705 }
1706 print("\t", nptr, " repeat ", nbit, " × ", count, "\n")
1707 nptr += nbit * count
1708 }
1709 }
1710 }
1711
1712
1713
1714
1715
1716
1717
1718 func reflect_gcbits(x any) []byte {
1719 return pointerMask(x)
1720 }
1721
1722
1723
1724
1725 func pointerMask(ep any) (mask []byte) {
1726 e := *efaceOf(&ep)
1727 p := e.data
1728 t := e._type
1729
1730 var et *_type
1731 if t.Kind_&abi.KindMask != abi.Pointer {
1732 throw("bad argument to getgcmask: expected type to be a pointer to the value type whose mask is being queried")
1733 }
1734 et = (*ptrtype)(unsafe.Pointer(t)).Elem
1735
1736
1737 for _, datap := range activeModules() {
1738
1739 if datap.data <= uintptr(p) && uintptr(p) < datap.edata {
1740 bitmap := datap.gcdatamask.bytedata
1741 n := et.Size_
1742 mask = make([]byte, n/goarch.PtrSize)
1743 for i := uintptr(0); i < n; i += goarch.PtrSize {
1744 off := (uintptr(p) + i - datap.data) / goarch.PtrSize
1745 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1746 }
1747 return
1748 }
1749
1750
1751 if datap.bss <= uintptr(p) && uintptr(p) < datap.ebss {
1752 bitmap := datap.gcbssmask.bytedata
1753 n := et.Size_
1754 mask = make([]byte, n/goarch.PtrSize)
1755 for i := uintptr(0); i < n; i += goarch.PtrSize {
1756 off := (uintptr(p) + i - datap.bss) / goarch.PtrSize
1757 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1758 }
1759 return
1760 }
1761 }
1762
1763
1764 if base, s, _ := findObject(uintptr(p), 0, 0); base != 0 {
1765 if s.spanclass.noscan() {
1766 return nil
1767 }
1768 limit := base + s.elemsize
1769
1770
1771
1772
1773 tp := s.typePointersOfUnchecked(base)
1774 base = tp.addr
1775
1776
1777 maskFromHeap := make([]byte, (limit-base)/goarch.PtrSize)
1778 for {
1779 var addr uintptr
1780 if tp, addr = tp.next(limit); addr == 0 {
1781 break
1782 }
1783 maskFromHeap[(addr-base)/goarch.PtrSize] = 1
1784 }
1785
1786
1787
1788
1789 for i := limit; i < s.elemsize; i++ {
1790 if *(*byte)(unsafe.Pointer(i)) != 0 {
1791 throw("found non-zeroed tail of allocation")
1792 }
1793 }
1794
1795
1796
1797 for len(maskFromHeap) > 0 && maskFromHeap[len(maskFromHeap)-1] == 0 {
1798 maskFromHeap = maskFromHeap[:len(maskFromHeap)-1]
1799 }
1800
1801
1802 maskFromType := make([]byte, (limit-base)/goarch.PtrSize)
1803 tp = s.typePointersOfType(et, base)
1804 for {
1805 var addr uintptr
1806 if tp, addr = tp.next(limit); addr == 0 {
1807 break
1808 }
1809 maskFromType[(addr-base)/goarch.PtrSize] = 1
1810 }
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822 differs := false
1823 for i := range maskFromHeap {
1824 if maskFromHeap[i] != maskFromType[i] {
1825 differs = true
1826 break
1827 }
1828 }
1829
1830 if differs {
1831 print("runtime: heap mask=")
1832 for _, b := range maskFromHeap {
1833 print(b)
1834 }
1835 println()
1836 print("runtime: type mask=")
1837 for _, b := range maskFromType {
1838 print(b)
1839 }
1840 println()
1841 print("runtime: type=", toRType(et).string(), "\n")
1842 throw("found two different masks from two different methods")
1843 }
1844
1845
1846 mask = maskFromHeap
1847
1848
1849
1850
1851 KeepAlive(ep)
1852 return
1853 }
1854
1855
1856 if gp := getg(); gp.m.curg.stack.lo <= uintptr(p) && uintptr(p) < gp.m.curg.stack.hi {
1857 found := false
1858 var u unwinder
1859 for u.initAt(gp.m.curg.sched.pc, gp.m.curg.sched.sp, 0, gp.m.curg, 0); u.valid(); u.next() {
1860 if u.frame.sp <= uintptr(p) && uintptr(p) < u.frame.varp {
1861 found = true
1862 break
1863 }
1864 }
1865 if found {
1866 locals, _, _ := u.frame.getStackMap(false)
1867 if locals.n == 0 {
1868 return
1869 }
1870 size := uintptr(locals.n) * goarch.PtrSize
1871 n := (*ptrtype)(unsafe.Pointer(t)).Elem.Size_
1872 mask = make([]byte, n/goarch.PtrSize)
1873 for i := uintptr(0); i < n; i += goarch.PtrSize {
1874 off := (uintptr(p) + i - u.frame.varp + size) / goarch.PtrSize
1875 mask[i/goarch.PtrSize] = locals.ptrbit(off)
1876 }
1877 }
1878 return
1879 }
1880
1881
1882
1883
1884 return
1885 }
1886
View as plain text