Source file
src/runtime/mbitmap.go
Documentation: runtime
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46 package runtime
47
48 import (
49 "internal/goarch"
50 "runtime/internal/atomic"
51 "runtime/internal/sys"
52 "unsafe"
53 )
54
55 const (
56 bitPointer = 1 << 0
57 bitScan = 1 << 4
58
59 heapBitsShift = 1
60 wordsPerBitmapByte = 8 / 2
61
62
63 bitScanAll = bitScan | bitScan<<heapBitsShift | bitScan<<(2*heapBitsShift) | bitScan<<(3*heapBitsShift)
64 bitPointerAll = bitPointer | bitPointer<<heapBitsShift | bitPointer<<(2*heapBitsShift) | bitPointer<<(3*heapBitsShift)
65 )
66
67
68
69
70
71 func addb(p *byte, n uintptr) *byte {
72
73
74
75 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + n))
76 }
77
78
79
80
81
82 func subtractb(p *byte, n uintptr) *byte {
83
84
85
86 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - n))
87 }
88
89
90
91
92
93 func add1(p *byte) *byte {
94
95
96
97 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) + 1))
98 }
99
100
101
102
103
104
105
106 func subtract1(p *byte) *byte {
107
108
109
110 return (*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(p)) - 1))
111 }
112
113
114
115
116
117 type heapBits struct {
118 bitp *uint8
119 shift uint32
120 arena uint32
121 last *uint8
122 }
123
124
125
126 var _ = heapBits{arena: (1<<heapAddrBits)/heapArenaBytes - 1}
127
128
129
130
131
132
133
134
135
136
137 type markBits struct {
138 bytep *uint8
139 mask uint8
140 index uintptr
141 }
142
143
144 func (s *mspan) allocBitsForIndex(allocBitIndex uintptr) markBits {
145 bytep, mask := s.allocBits.bitp(allocBitIndex)
146 return markBits{bytep, mask, allocBitIndex}
147 }
148
149
150
151
152
153 func (s *mspan) refillAllocCache(whichByte uintptr) {
154 bytes := (*[8]uint8)(unsafe.Pointer(s.allocBits.bytep(whichByte)))
155 aCache := uint64(0)
156 aCache |= uint64(bytes[0])
157 aCache |= uint64(bytes[1]) << (1 * 8)
158 aCache |= uint64(bytes[2]) << (2 * 8)
159 aCache |= uint64(bytes[3]) << (3 * 8)
160 aCache |= uint64(bytes[4]) << (4 * 8)
161 aCache |= uint64(bytes[5]) << (5 * 8)
162 aCache |= uint64(bytes[6]) << (6 * 8)
163 aCache |= uint64(bytes[7]) << (7 * 8)
164 s.allocCache = ^aCache
165 }
166
167
168
169
170
171 func (s *mspan) nextFreeIndex() uintptr {
172 sfreeindex := s.freeindex
173 snelems := s.nelems
174 if sfreeindex == snelems {
175 return sfreeindex
176 }
177 if sfreeindex > snelems {
178 throw("s.freeindex > s.nelems")
179 }
180
181 aCache := s.allocCache
182
183 bitIndex := sys.Ctz64(aCache)
184 for bitIndex == 64 {
185
186 sfreeindex = (sfreeindex + 64) &^ (64 - 1)
187 if sfreeindex >= snelems {
188 s.freeindex = snelems
189 return snelems
190 }
191 whichByte := sfreeindex / 8
192
193 s.refillAllocCache(whichByte)
194 aCache = s.allocCache
195 bitIndex = sys.Ctz64(aCache)
196
197
198 }
199 result := sfreeindex + uintptr(bitIndex)
200 if result >= snelems {
201 s.freeindex = snelems
202 return snelems
203 }
204
205 s.allocCache >>= uint(bitIndex + 1)
206 sfreeindex = result + 1
207
208 if sfreeindex%64 == 0 && sfreeindex != snelems {
209
210
211
212
213
214 whichByte := sfreeindex / 8
215 s.refillAllocCache(whichByte)
216 }
217 s.freeindex = sfreeindex
218 return result
219 }
220
221
222
223
224
225
226 func (s *mspan) isFree(index uintptr) bool {
227 if index < s.freeIndexForScan {
228 return false
229 }
230 bytep, mask := s.allocBits.bitp(index)
231 return *bytep&mask == 0
232 }
233
234
235
236
237
238 func (s *mspan) divideByElemSize(n uintptr) uintptr {
239 const doubleCheck = false
240
241
242 q := uintptr((uint64(n) * uint64(s.divMul)) >> 32)
243
244 if doubleCheck && q != n/s.elemsize {
245 println(n, "/", s.elemsize, "should be", n/s.elemsize, "but got", q)
246 throw("bad magic division")
247 }
248 return q
249 }
250
251 func (s *mspan) objIndex(p uintptr) uintptr {
252 return s.divideByElemSize(p - s.base())
253 }
254
255 func markBitsForAddr(p uintptr) markBits {
256 s := spanOf(p)
257 objIndex := s.objIndex(p)
258 return s.markBitsForIndex(objIndex)
259 }
260
261 func (s *mspan) markBitsForIndex(objIndex uintptr) markBits {
262 bytep, mask := s.gcmarkBits.bitp(objIndex)
263 return markBits{bytep, mask, objIndex}
264 }
265
266 func (s *mspan) markBitsForBase() markBits {
267 return markBits{(*uint8)(s.gcmarkBits), uint8(1), 0}
268 }
269
270
271 func (m markBits) isMarked() bool {
272 return *m.bytep&m.mask != 0
273 }
274
275
276 func (m markBits) setMarked() {
277
278
279
280 atomic.Or8(m.bytep, m.mask)
281 }
282
283
284 func (m markBits) setMarkedNonAtomic() {
285 *m.bytep |= m.mask
286 }
287
288
289 func (m markBits) clearMarked() {
290
291
292
293 atomic.And8(m.bytep, ^m.mask)
294 }
295
296
297 func markBitsForSpan(base uintptr) (mbits markBits) {
298 mbits = markBitsForAddr(base)
299 if mbits.mask != 1 {
300 throw("markBitsForSpan: unaligned start")
301 }
302 return mbits
303 }
304
305
306 func (m *markBits) advance() {
307 if m.mask == 1<<7 {
308 m.bytep = (*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(m.bytep)) + 1))
309 m.mask = 1
310 } else {
311 m.mask = m.mask << 1
312 }
313 m.index++
314 }
315
316
317
318
319
320
321
322
323 func heapBitsForAddr(addr uintptr) (h heapBits) {
324
325 arena := arenaIndex(addr)
326 ha := mheap_.arenas[arena.l1()][arena.l2()]
327
328
329
330 if ha == nil {
331
332
333 return
334 }
335 h.bitp = &ha.bitmap[(addr/(goarch.PtrSize*4))%heapArenaBitmapBytes]
336 h.shift = uint32((addr / goarch.PtrSize) & 3)
337 h.arena = uint32(arena)
338 h.last = &ha.bitmap[len(ha.bitmap)-1]
339 return
340 }
341
342
343
344 const clobberdeadPtr = uintptr(0xdeaddead | 0xdeaddead<<((^uintptr(0)>>63)*32))
345
346
347 func badPointer(s *mspan, p, refBase, refOff uintptr) {
348
349
350
351
352
353
354
355
356 printlock()
357 print("runtime: pointer ", hex(p))
358 if s != nil {
359 state := s.state.get()
360 if state != mSpanInUse {
361 print(" to unallocated span")
362 } else {
363 print(" to unused region of span")
364 }
365 print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
366 }
367 print("\n")
368 if refBase != 0 {
369 print("runtime: found in object at *(", hex(refBase), "+", hex(refOff), ")\n")
370 gcDumpObject("object", refBase, refOff)
371 }
372 getg().m.traceback = 2
373 throw("found bad pointer in Go heap (incorrect use of unsafe or cgo?)")
374 }
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391 func findObject(p, refBase, refOff uintptr) (base uintptr, s *mspan, objIndex uintptr) {
392 s = spanOf(p)
393
394
395 if s == nil {
396 if (GOARCH == "amd64" || GOARCH == "arm64") && p == clobberdeadPtr && debug.invalidptr != 0 {
397
398
399
400 badPointer(s, p, refBase, refOff)
401 }
402 return
403 }
404
405
406
407
408 if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
409
410 if state == mSpanManual {
411 return
412 }
413
414
415 if debug.invalidptr != 0 {
416 badPointer(s, p, refBase, refOff)
417 }
418 return
419 }
420
421 objIndex = s.objIndex(p)
422 base = s.base() + objIndex*s.elemsize
423 return
424 }
425
426
427
428
429 func reflect_verifyNotInHeapPtr(p uintptr) bool {
430
431
432
433 return spanOf(p) == nil && p != clobberdeadPtr
434 }
435
436
437
438
439
440
441
442
443 func (h heapBits) next() heapBits {
444 if h.shift < 3*heapBitsShift {
445 h.shift += heapBitsShift
446 } else if h.bitp != h.last {
447 h.bitp, h.shift = add1(h.bitp), 0
448 } else {
449
450 return h.nextArena()
451 }
452 return h
453 }
454
455
456
457
458
459
460
461
462
463
464 func (h heapBits) nextArena() heapBits {
465 h.arena++
466 ai := arenaIdx(h.arena)
467 l2 := mheap_.arenas[ai.l1()]
468 if l2 == nil {
469
470
471
472 return heapBits{}
473 }
474 ha := l2[ai.l2()]
475 if ha == nil {
476 return heapBits{}
477 }
478 h.bitp, h.shift = &ha.bitmap[0], 0
479 h.last = &ha.bitmap[len(ha.bitmap)-1]
480 return h
481 }
482
483
484
485
486
487
488
489
490 func (h heapBits) forward(n uintptr) heapBits {
491 n += uintptr(h.shift) / heapBitsShift
492 nbitp := uintptr(unsafe.Pointer(h.bitp)) + n/4
493 h.shift = uint32(n%4) * heapBitsShift
494 if nbitp <= uintptr(unsafe.Pointer(h.last)) {
495 h.bitp = (*uint8)(unsafe.Pointer(nbitp))
496 return h
497 }
498
499
500 past := nbitp - (uintptr(unsafe.Pointer(h.last)) + 1)
501 h.arena += 1 + uint32(past/heapArenaBitmapBytes)
502 ai := arenaIdx(h.arena)
503 if l2 := mheap_.arenas[ai.l1()]; l2 != nil && l2[ai.l2()] != nil {
504 a := l2[ai.l2()]
505 h.bitp = &a.bitmap[past%heapArenaBitmapBytes]
506 h.last = &a.bitmap[len(a.bitmap)-1]
507 } else {
508 h.bitp, h.last = nil, nil
509 }
510 return h
511 }
512
513
514
515
516 func (h heapBits) forwardOrBoundary(n uintptr) (heapBits, uintptr) {
517 maxn := 4 * ((uintptr(unsafe.Pointer(h.last)) + 1) - uintptr(unsafe.Pointer(h.bitp)))
518 if n > maxn {
519 n = maxn
520 }
521 return h.forward(n), n
522 }
523
524
525
526
527
528
529
530
531 func (h heapBits) bits() uint32 {
532
533
534 return uint32(*h.bitp) >> (h.shift & 31)
535 }
536
537
538
539
540 func (h heapBits) morePointers() bool {
541 return h.bits()&bitScan != 0
542 }
543
544
545
546
547
548
549 func (h heapBits) isPointer() bool {
550 return h.bits()&bitPointer != 0
551 }
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580 func bulkBarrierPreWrite(dst, src, size uintptr) {
581 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
582 throw("bulkBarrierPreWrite: unaligned arguments")
583 }
584 if !writeBarrier.needed {
585 return
586 }
587 if s := spanOf(dst); s == nil {
588
589
590 for _, datap := range activeModules() {
591 if datap.data <= dst && dst < datap.edata {
592 bulkBarrierBitmap(dst, src, size, dst-datap.data, datap.gcdatamask.bytedata)
593 return
594 }
595 }
596 for _, datap := range activeModules() {
597 if datap.bss <= dst && dst < datap.ebss {
598 bulkBarrierBitmap(dst, src, size, dst-datap.bss, datap.gcbssmask.bytedata)
599 return
600 }
601 }
602 return
603 } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
604
605
606
607
608
609
610 return
611 }
612
613 buf := &getg().m.p.ptr().wbBuf
614 h := heapBitsForAddr(dst)
615 if src == 0 {
616 for i := uintptr(0); i < size; i += goarch.PtrSize {
617 if h.isPointer() {
618 dstx := (*uintptr)(unsafe.Pointer(dst + i))
619 if !buf.putFast(*dstx, 0) {
620 wbBufFlush(nil, 0)
621 }
622 }
623 h = h.next()
624 }
625 } else {
626 for i := uintptr(0); i < size; i += goarch.PtrSize {
627 if h.isPointer() {
628 dstx := (*uintptr)(unsafe.Pointer(dst + i))
629 srcx := (*uintptr)(unsafe.Pointer(src + i))
630 if !buf.putFast(*dstx, *srcx) {
631 wbBufFlush(nil, 0)
632 }
633 }
634 h = h.next()
635 }
636 }
637 }
638
639
640
641
642
643
644
645
646
647
648
649 func bulkBarrierPreWriteSrcOnly(dst, src, size uintptr) {
650 if (dst|src|size)&(goarch.PtrSize-1) != 0 {
651 throw("bulkBarrierPreWrite: unaligned arguments")
652 }
653 if !writeBarrier.needed {
654 return
655 }
656 buf := &getg().m.p.ptr().wbBuf
657 h := heapBitsForAddr(dst)
658 for i := uintptr(0); i < size; i += goarch.PtrSize {
659 if h.isPointer() {
660 srcx := (*uintptr)(unsafe.Pointer(src + i))
661 if !buf.putFast(0, *srcx) {
662 wbBufFlush(nil, 0)
663 }
664 }
665 h = h.next()
666 }
667 }
668
669
670
671
672
673
674
675
676
677 func bulkBarrierBitmap(dst, src, size, maskOffset uintptr, bits *uint8) {
678 word := maskOffset / goarch.PtrSize
679 bits = addb(bits, word/8)
680 mask := uint8(1) << (word % 8)
681
682 buf := &getg().m.p.ptr().wbBuf
683 for i := uintptr(0); i < size; i += goarch.PtrSize {
684 if mask == 0 {
685 bits = addb(bits, 1)
686 if *bits == 0 {
687
688 i += 7 * goarch.PtrSize
689 continue
690 }
691 mask = 1
692 }
693 if *bits&mask != 0 {
694 dstx := (*uintptr)(unsafe.Pointer(dst + i))
695 if src == 0 {
696 if !buf.putFast(*dstx, 0) {
697 wbBufFlush(nil, 0)
698 }
699 } else {
700 srcx := (*uintptr)(unsafe.Pointer(src + i))
701 if !buf.putFast(*dstx, *srcx) {
702 wbBufFlush(nil, 0)
703 }
704 }
705 }
706 mask <<= 1
707 }
708 }
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727 func typeBitsBulkBarrier(typ *_type, dst, src, size uintptr) {
728 if typ == nil {
729 throw("runtime: typeBitsBulkBarrier without type")
730 }
731 if typ.size != size {
732 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " of size ", typ.size, " but memory size", size)
733 throw("runtime: invalid typeBitsBulkBarrier")
734 }
735 if typ.kind&kindGCProg != 0 {
736 println("runtime: typeBitsBulkBarrier with type ", typ.string(), " with GC prog")
737 throw("runtime: invalid typeBitsBulkBarrier")
738 }
739 if !writeBarrier.needed {
740 return
741 }
742 ptrmask := typ.gcdata
743 buf := &getg().m.p.ptr().wbBuf
744 var bits uint32
745 for i := uintptr(0); i < typ.ptrdata; i += goarch.PtrSize {
746 if i&(goarch.PtrSize*8-1) == 0 {
747 bits = uint32(*ptrmask)
748 ptrmask = addb(ptrmask, 1)
749 } else {
750 bits = bits >> 1
751 }
752 if bits&1 != 0 {
753 dstx := (*uintptr)(unsafe.Pointer(dst + i))
754 srcx := (*uintptr)(unsafe.Pointer(src + i))
755 if !buf.putFast(*dstx, *srcx) {
756 wbBufFlush(nil, 0)
757 }
758 }
759 }
760 }
761
762
763
764
765
766
767
768
769
770
771
772
773
774 func (h heapBits) initSpan(s *mspan) {
775
776 nw := (s.npages << _PageShift) / goarch.PtrSize
777 if nw%wordsPerBitmapByte != 0 {
778 throw("initSpan: unaligned length")
779 }
780 if h.shift != 0 {
781 throw("initSpan: unaligned base")
782 }
783 isPtrs := goarch.PtrSize == 8 && s.elemsize == goarch.PtrSize
784 for nw > 0 {
785 hNext, anw := h.forwardOrBoundary(nw)
786 nbyte := anw / wordsPerBitmapByte
787 if isPtrs {
788 bitp := h.bitp
789 for i := uintptr(0); i < nbyte; i++ {
790 *bitp = bitPointerAll | bitScanAll
791 bitp = add1(bitp)
792 }
793 } else {
794 memclrNoHeapPointers(unsafe.Pointer(h.bitp), nbyte)
795 }
796 h = hNext
797 nw -= anw
798 }
799 }
800
801
802
803 func (s *mspan) countAlloc() int {
804 count := 0
805 bytes := divRoundUp(s.nelems, 8)
806
807
808
809
810 for i := uintptr(0); i < bytes; i += 8 {
811
812
813
814
815 mrkBits := *(*uint64)(unsafe.Pointer(s.gcmarkBits.bytep(i)))
816 count += sys.OnesCount64(mrkBits)
817 }
818 return count
819 }
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844 func heapBitsSetType(x, size, dataSize uintptr, typ *_type) {
845 const doubleCheck = false
846
847 const (
848 mask1 = bitPointer | bitScan
849 mask2 = bitPointer | bitScan | mask1<<heapBitsShift
850 mask3 = bitPointer | bitScan | mask2<<heapBitsShift
851 )
852
853
854
855
856
857
858
859
860
861 if goarch.PtrSize == 8 && size == goarch.PtrSize {
862
863
864
865
866 if doubleCheck {
867 h := heapBitsForAddr(x)
868 if !h.isPointer() {
869 throw("heapBitsSetType: pointer bit missing")
870 }
871 if !h.morePointers() {
872 throw("heapBitsSetType: scan bit missing")
873 }
874 }
875 return
876 }
877
878 h := heapBitsForAddr(x)
879 ptrmask := typ.gcdata
880
881
882
883
884
885
886
887 if size == 2*goarch.PtrSize {
888 if typ.size == goarch.PtrSize {
889
890
891
892
893
894
895
896
897 if goarch.PtrSize == 4 && dataSize == goarch.PtrSize {
898
899
900 *h.bitp &^= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
901 *h.bitp |= (bitPointer | bitScan) << h.shift
902 } else {
903
904 *h.bitp |= (bitPointer | bitScan | (bitPointer|bitScan)<<heapBitsShift) << h.shift
905 }
906 return
907 }
908
909
910 if doubleCheck {
911 if typ.size != 2*goarch.PtrSize || typ.kind&kindGCProg != 0 {
912 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, " gcprog=", typ.kind&kindGCProg != 0, "\n")
913 throw("heapBitsSetType")
914 }
915 }
916 b := uint32(*ptrmask)
917 hb := b & 3
918 hb |= bitScanAll & ((bitScan << (typ.ptrdata / goarch.PtrSize)) - 1)
919
920
921 *h.bitp &^= (bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << h.shift
922 *h.bitp |= uint8(hb << h.shift)
923 return
924 } else if size == 3*goarch.PtrSize {
925 b := uint8(*ptrmask)
926 if doubleCheck {
927 if b == 0 {
928 println("runtime: invalid type ", typ.string())
929 throw("heapBitsSetType: called with non-pointer type")
930 }
931 if goarch.PtrSize != 8 {
932 throw("heapBitsSetType: unexpected 3 pointer wide size class on 32 bit")
933 }
934 if typ.kind&kindGCProg != 0 {
935 throw("heapBitsSetType: unexpected GC prog for 3 pointer wide size class")
936 }
937 if typ.size == 2*goarch.PtrSize {
938 print("runtime: heapBitsSetType size=", size, " but typ.size=", typ.size, "\n")
939 throw("heapBitsSetType: inconsistent object sizes")
940 }
941 }
942 if typ.size == goarch.PtrSize {
943
944
945 if doubleCheck && *typ.gcdata != 1 {
946 print("runtime: heapBitsSetType size=", size, " typ.size=", typ.size, "but *typ.gcdata", *typ.gcdata, "\n")
947 throw("heapBitsSetType: unexpected gcdata for 1 pointer wide type size in 3 pointer wide size class")
948 }
949
950 b = 7
951 }
952
953 hb := b & 7
954
955 hb |= hb << wordsPerBitmapByte
956
957 hb |= bitScan
958
959 hb |= hb & (bitScan << (2 * heapBitsShift)) >> 1
960
961
962
963 switch h.shift {
964 case 0:
965 *h.bitp &^= mask3 << 0
966 *h.bitp |= hb << 0
967 case 1:
968 *h.bitp &^= mask3 << 1
969 *h.bitp |= hb << 1
970 case 2:
971 *h.bitp &^= mask2 << 2
972 *h.bitp |= (hb & mask2) << 2
973
974
975 h = h.next().next()
976 *h.bitp &^= mask1
977 *h.bitp |= (hb >> 2) & mask1
978 case 3:
979 *h.bitp &^= mask1 << 3
980 *h.bitp |= (hb & mask1) << 3
981
982
983 h = h.next()
984 *h.bitp &^= mask2
985 *h.bitp |= (hb >> 1) & mask2
986 }
987 return
988 }
989
990
991
992
993
994
995
996
997 outOfPlace := false
998 if arenaIndex(x+size-1) != arenaIdx(h.arena) || (doubleCheck && fastrandn(2) == 0) {
999
1000
1001
1002
1003
1004
1005 outOfPlace = true
1006 h.bitp = (*uint8)(unsafe.Pointer(x))
1007 h.last = nil
1008 }
1009
1010 var (
1011
1012 p *byte
1013 b uintptr
1014 nb uintptr
1015 endp *byte
1016 endnb uintptr
1017 pbits uintptr
1018
1019
1020 w uintptr
1021 nw uintptr
1022 hbitp *byte
1023 hb uintptr
1024 )
1025
1026 hbitp = h.bitp
1027
1028
1029
1030
1031
1032 if typ.kind&kindGCProg != 0 {
1033 heapBitsSetTypeGCProg(h, typ.ptrdata, typ.size, dataSize, size, addb(typ.gcdata, 4))
1034 if doubleCheck {
1035
1036
1037
1038
1039
1040
1041
1042 lock(&debugPtrmask.lock)
1043 if debugPtrmask.data == nil {
1044 debugPtrmask.data = (*byte)(persistentalloc(1<<20, 1, &memstats.other_sys))
1045 }
1046 ptrmask = debugPtrmask.data
1047 runGCProg(addb(typ.gcdata, 4), nil, ptrmask, 1)
1048 }
1049 goto Phase4
1050 }
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083 p = ptrmask
1084 if typ.size < dataSize {
1085
1086
1087
1088 const maxBits = goarch.PtrSize*8 - 7
1089 if typ.ptrdata/goarch.PtrSize <= maxBits {
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100 nb = typ.ptrdata / goarch.PtrSize
1101 for i := uintptr(0); i < nb; i += 8 {
1102 b |= uintptr(*p) << i
1103 p = add1(p)
1104 }
1105 nb = typ.size / goarch.PtrSize
1106
1107
1108
1109
1110
1111
1112
1113 pbits = b
1114 endnb = nb
1115 if nb+nb <= maxBits {
1116 for endnb <= goarch.PtrSize*8 {
1117 pbits |= pbits << endnb
1118 endnb += endnb
1119 }
1120
1121
1122
1123 endnb = uintptr(maxBits/byte(nb)) * nb
1124 pbits &= 1<<endnb - 1
1125 b = pbits
1126 nb = endnb
1127 }
1128
1129
1130
1131 p = nil
1132 endp = nil
1133 } else {
1134
1135 n := (typ.ptrdata/goarch.PtrSize+7)/8 - 1
1136 endp = addb(ptrmask, n)
1137 endnb = typ.size/goarch.PtrSize - n*8
1138 }
1139 }
1140 if p != nil {
1141 b = uintptr(*p)
1142 p = add1(p)
1143 nb = 8
1144 }
1145
1146 if typ.size == dataSize {
1147
1148 nw = typ.ptrdata / goarch.PtrSize
1149 } else {
1150
1151
1152
1153 nw = ((dataSize/typ.size-1)*typ.size + typ.ptrdata) / goarch.PtrSize
1154 }
1155 if nw == 0 {
1156
1157 println("runtime: invalid type ", typ.string())
1158 throw("heapBitsSetType: called with non-pointer type")
1159 return
1160 }
1161
1162
1163
1164
1165
1166
1167 switch {
1168 default:
1169 throw("heapBitsSetType: unexpected shift")
1170
1171 case h.shift == 0:
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187 hb = b & bitPointerAll
1188 hb |= bitScanAll
1189 if w += 4; w >= nw {
1190 goto Phase3
1191 }
1192 *hbitp = uint8(hb)
1193 hbitp = add1(hbitp)
1194 b >>= 4
1195 nb -= 4
1196
1197 case h.shift == 2:
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214 hb = (b & (bitPointer | bitPointer<<heapBitsShift)) << (2 * heapBitsShift)
1215 hb |= bitScan << (2 * heapBitsShift)
1216 if nw > 1 {
1217 hb |= bitScan << (3 * heapBitsShift)
1218 }
1219 b >>= 2
1220 nb -= 2
1221 *hbitp &^= uint8((bitPointer | bitScan | ((bitPointer | bitScan) << heapBitsShift)) << (2 * heapBitsShift))
1222 *hbitp |= uint8(hb)
1223 hbitp = add1(hbitp)
1224 if w += 2; w >= nw {
1225
1226
1227
1228 hb = 0
1229 w += 4
1230 goto Phase3
1231 }
1232 }
1233
1234
1235
1236
1237
1238
1239
1240 nb -= 4
1241 for {
1242
1243
1244
1245
1246
1247 hb = b & bitPointerAll
1248 hb |= bitScanAll
1249 if w += 4; w >= nw {
1250 break
1251 }
1252 *hbitp = uint8(hb)
1253 hbitp = add1(hbitp)
1254 b >>= 4
1255
1256
1257 if p != endp {
1258
1259
1260
1261
1262 if nb < 8 {
1263 b |= uintptr(*p) << nb
1264 p = add1(p)
1265 } else {
1266
1267
1268
1269
1270 nb -= 8
1271 }
1272 } else if p == nil {
1273
1274
1275 if nb < 8 {
1276 b |= pbits << nb
1277 nb += endnb
1278 }
1279 nb -= 8
1280 } else {
1281
1282
1283 b |= uintptr(*p) << nb
1284 nb += endnb
1285 if nb < 8 {
1286 b |= uintptr(*ptrmask) << nb
1287 p = add1(ptrmask)
1288 } else {
1289 nb -= 8
1290 p = ptrmask
1291 }
1292 }
1293
1294
1295 hb = b & bitPointerAll
1296 hb |= bitScanAll
1297 if w += 4; w >= nw {
1298 break
1299 }
1300 *hbitp = uint8(hb)
1301 hbitp = add1(hbitp)
1302 b >>= 4
1303 }
1304
1305 Phase3:
1306
1307 if w > nw {
1308
1309
1310
1311 mask := uintptr(1)<<(4-(w-nw)) - 1
1312 hb &= mask | mask<<4
1313 }
1314
1315
1316 nw = size / goarch.PtrSize
1317
1318
1319
1320 if w <= nw {
1321 *hbitp = uint8(hb)
1322 hbitp = add1(hbitp)
1323 hb = 0
1324 for w += 4; w <= nw; w += 4 {
1325 *hbitp = 0
1326 hbitp = add1(hbitp)
1327 }
1328 }
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338 if w == nw+2 {
1339 *hbitp = *hbitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | uint8(hb)
1340 }
1341
1342 Phase4:
1343
1344 if outOfPlace {
1345
1346
1347 h := heapBitsForAddr(x)
1348
1349
1350 cnw := size / goarch.PtrSize
1351 src := (*uint8)(unsafe.Pointer(x))
1352
1353
1354
1355
1356
1357
1358
1359 if doubleCheck {
1360 if !(h.shift == 0 || h.shift == 2) {
1361 print("x=", x, " size=", size, " cnw=", h.shift, "\n")
1362 throw("bad start shift")
1363 }
1364 }
1365 if h.shift == 2 {
1366 *h.bitp = *h.bitp&^((bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift)<<(2*heapBitsShift)) | *src
1367 h = h.next().next()
1368 cnw -= 2
1369 src = addb(src, 1)
1370 }
1371
1372
1373
1374 for cnw >= 4 {
1375
1376
1377 hNext, words := h.forwardOrBoundary(cnw / 4 * 4)
1378
1379
1380 n := words / 4
1381 memmove(unsafe.Pointer(h.bitp), unsafe.Pointer(src), n)
1382 cnw -= words
1383 h = hNext
1384 src = addb(src, n)
1385 }
1386 if doubleCheck && h.shift != 0 {
1387 print("cnw=", cnw, " h.shift=", h.shift, "\n")
1388 throw("bad shift after block copy")
1389 }
1390
1391 if cnw == 2 {
1392 *h.bitp = *h.bitp&^(bitPointer|bitScan|(bitPointer|bitScan)<<heapBitsShift) | *src
1393 src = addb(src, 1)
1394 h = h.next().next()
1395 }
1396 if doubleCheck {
1397 if uintptr(unsafe.Pointer(src)) > x+size {
1398 throw("copy exceeded object size")
1399 }
1400 if !(cnw == 0 || cnw == 2) {
1401 print("x=", x, " size=", size, " cnw=", cnw, "\n")
1402 throw("bad number of remaining words")
1403 }
1404
1405 hbitp = h.bitp
1406 }
1407
1408 memclrNoHeapPointers(unsafe.Pointer(x), uintptr(unsafe.Pointer(src))-x)
1409 }
1410
1411
1412 if doubleCheck {
1413
1414
1415 end := heapBitsForAddr(x + size - goarch.PtrSize)
1416 if outOfPlace {
1417
1418
1419 end = end.next()
1420 } else {
1421
1422
1423
1424 end.shift += heapBitsShift
1425 if end.shift == 4*heapBitsShift {
1426 end.bitp, end.shift = add1(end.bitp), 0
1427 }
1428 }
1429 if typ.kind&kindGCProg == 0 && (hbitp != end.bitp || (w == nw+2) != (end.shift == 2)) {
1430 println("ended at wrong bitmap byte for", typ.string(), "x", dataSize/typ.size)
1431 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1432 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1433 h0 := heapBitsForAddr(x)
1434 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1435 print("ended at hbitp=", hbitp, " but next starts at bitp=", end.bitp, " shift=", end.shift, "\n")
1436 throw("bad heapBitsSetType")
1437 }
1438
1439
1440
1441 h := heapBitsForAddr(x)
1442 nptr := typ.ptrdata / goarch.PtrSize
1443 ndata := typ.size / goarch.PtrSize
1444 count := dataSize / typ.size
1445 totalptr := ((count-1)*typ.size + typ.ptrdata) / goarch.PtrSize
1446 for i := uintptr(0); i < size/goarch.PtrSize; i++ {
1447 j := i % ndata
1448 var have, want uint8
1449 have = (*h.bitp >> h.shift) & (bitPointer | bitScan)
1450 if i >= totalptr {
1451 if typ.kind&kindGCProg != 0 && i < (totalptr+3)/4*4 {
1452
1453
1454 want = bitScan
1455 }
1456 } else {
1457 if j < nptr && (*addb(ptrmask, j/8)>>(j%8))&1 != 0 {
1458 want |= bitPointer
1459 }
1460 want |= bitScan
1461 }
1462 if have != want {
1463 println("mismatch writing bits for", typ.string(), "x", dataSize/typ.size)
1464 print("typ.size=", typ.size, " typ.ptrdata=", typ.ptrdata, " dataSize=", dataSize, " size=", size, "\n")
1465 print("kindGCProg=", typ.kind&kindGCProg != 0, " outOfPlace=", outOfPlace, "\n")
1466 print("w=", w, " nw=", nw, " b=", hex(b), " nb=", nb, " hb=", hex(hb), "\n")
1467 h0 := heapBitsForAddr(x)
1468 print("initial bits h0.bitp=", h0.bitp, " h0.shift=", h0.shift, "\n")
1469 print("current bits h.bitp=", h.bitp, " h.shift=", h.shift, " *h.bitp=", hex(*h.bitp), "\n")
1470 print("ptrmask=", ptrmask, " p=", p, " endp=", endp, " endnb=", endnb, " pbits=", hex(pbits), " b=", hex(b), " nb=", nb, "\n")
1471 println("at word", i, "offset", i*goarch.PtrSize, "have", hex(have), "want", hex(want))
1472 if typ.kind&kindGCProg != 0 {
1473 println("GC program:")
1474 dumpGCProg(addb(typ.gcdata, 4))
1475 }
1476 throw("bad heapBitsSetType")
1477 }
1478 h = h.next()
1479 }
1480 if ptrmask == debugPtrmask.data {
1481 unlock(&debugPtrmask.lock)
1482 }
1483 }
1484 }
1485
1486 var debugPtrmask struct {
1487 lock mutex
1488 data *byte
1489 }
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501 func heapBitsSetTypeGCProg(h heapBits, progSize, elemSize, dataSize, allocSize uintptr, prog *byte) {
1502 if goarch.PtrSize == 8 && allocSize%(4*goarch.PtrSize) != 0 {
1503
1504 throw("heapBitsSetTypeGCProg: small allocation")
1505 }
1506 var totalBits uintptr
1507 if elemSize == dataSize {
1508 totalBits = runGCProg(prog, nil, h.bitp, 2)
1509 if totalBits*goarch.PtrSize != progSize {
1510 println("runtime: heapBitsSetTypeGCProg: total bits", totalBits, "but progSize", progSize)
1511 throw("heapBitsSetTypeGCProg: unexpected bit count")
1512 }
1513 } else {
1514 count := dataSize / elemSize
1515
1516
1517
1518
1519
1520
1521
1522 var trailer [40]byte
1523 i := 0
1524 if n := elemSize/goarch.PtrSize - progSize/goarch.PtrSize; n > 0 {
1525
1526 trailer[i] = 0x01
1527 i++
1528 trailer[i] = 0
1529 i++
1530 if n > 1 {
1531
1532 trailer[i] = 0x81
1533 i++
1534 n--
1535 for ; n >= 0x80; n >>= 7 {
1536 trailer[i] = byte(n | 0x80)
1537 i++
1538 }
1539 trailer[i] = byte(n)
1540 i++
1541 }
1542 }
1543
1544 trailer[i] = 0x80
1545 i++
1546 n := elemSize / goarch.PtrSize
1547 for ; n >= 0x80; n >>= 7 {
1548 trailer[i] = byte(n | 0x80)
1549 i++
1550 }
1551 trailer[i] = byte(n)
1552 i++
1553 n = count - 1
1554 for ; n >= 0x80; n >>= 7 {
1555 trailer[i] = byte(n | 0x80)
1556 i++
1557 }
1558 trailer[i] = byte(n)
1559 i++
1560 trailer[i] = 0
1561 i++
1562
1563 runGCProg(prog, &trailer[0], h.bitp, 2)
1564
1565
1566
1567
1568
1569
1570 totalBits = (elemSize*(count-1) + progSize) / goarch.PtrSize
1571 }
1572 endProg := unsafe.Pointer(addb(h.bitp, (totalBits+3)/4))
1573 endAlloc := unsafe.Pointer(addb(h.bitp, allocSize/goarch.PtrSize/wordsPerBitmapByte))
1574 memclrNoHeapPointers(endProg, uintptr(endAlloc)-uintptr(endProg))
1575 }
1576
1577
1578
1579
1580 func progToPointerMask(prog *byte, size uintptr) bitvector {
1581 n := (size/goarch.PtrSize + 7) / 8
1582 x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
1583 x[len(x)-1] = 0xa1
1584 n = runGCProg(prog, nil, &x[0], 1)
1585 if x[len(x)-1] != 0xa1 {
1586 throw("progToPointerMask: overflow")
1587 }
1588 return bitvector{int32(n), &x[0]}
1589 }
1590
1591
1592
1593
1594
1595
1596
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607
1608
1609
1610
1611
1612
1613 func runGCProg(prog, trailer, dst *byte, size int) uintptr {
1614 dstStart := dst
1615
1616
1617 var bits uintptr
1618 var nbits uintptr
1619
1620 p := prog
1621 Run:
1622 for {
1623
1624
1625 for ; nbits >= 8; nbits -= 8 {
1626 if size == 1 {
1627 *dst = uint8(bits)
1628 dst = add1(dst)
1629 bits >>= 8
1630 } else {
1631 v := bits&bitPointerAll | bitScanAll
1632 *dst = uint8(v)
1633 dst = add1(dst)
1634 bits >>= 4
1635 v = bits&bitPointerAll | bitScanAll
1636 *dst = uint8(v)
1637 dst = add1(dst)
1638 bits >>= 4
1639 }
1640 }
1641
1642
1643 inst := uintptr(*p)
1644 p = add1(p)
1645 n := inst & 0x7F
1646 if inst&0x80 == 0 {
1647
1648 if n == 0 {
1649
1650 if trailer != nil {
1651 p = trailer
1652 trailer = nil
1653 continue
1654 }
1655 break Run
1656 }
1657 nbyte := n / 8
1658 for i := uintptr(0); i < nbyte; i++ {
1659 bits |= uintptr(*p) << nbits
1660 p = add1(p)
1661 if size == 1 {
1662 *dst = uint8(bits)
1663 dst = add1(dst)
1664 bits >>= 8
1665 } else {
1666 v := bits&0xf | bitScanAll
1667 *dst = uint8(v)
1668 dst = add1(dst)
1669 bits >>= 4
1670 v = bits&0xf | bitScanAll
1671 *dst = uint8(v)
1672 dst = add1(dst)
1673 bits >>= 4
1674 }
1675 }
1676 if n %= 8; n > 0 {
1677 bits |= uintptr(*p) << nbits
1678 p = add1(p)
1679 nbits += n
1680 }
1681 continue Run
1682 }
1683
1684
1685 if n == 0 {
1686 for off := uint(0); ; off += 7 {
1687 x := uintptr(*p)
1688 p = add1(p)
1689 n |= (x & 0x7F) << off
1690 if x&0x80 == 0 {
1691 break
1692 }
1693 }
1694 }
1695
1696
1697 c := uintptr(0)
1698 for off := uint(0); ; off += 7 {
1699 x := uintptr(*p)
1700 p = add1(p)
1701 c |= (x & 0x7F) << off
1702 if x&0x80 == 0 {
1703 break
1704 }
1705 }
1706 c *= n
1707
1708
1709
1710
1711
1712
1713
1714
1715 src := dst
1716 const maxBits = goarch.PtrSize*8 - 7
1717 if n <= maxBits {
1718
1719 pattern := bits
1720 npattern := nbits
1721
1722
1723 if size == 1 {
1724 src = subtract1(src)
1725 for npattern < n {
1726 pattern <<= 8
1727 pattern |= uintptr(*src)
1728 src = subtract1(src)
1729 npattern += 8
1730 }
1731 } else {
1732 src = subtract1(src)
1733 for npattern < n {
1734 pattern <<= 4
1735 pattern |= uintptr(*src) & 0xf
1736 src = subtract1(src)
1737 npattern += 4
1738 }
1739 }
1740
1741
1742
1743
1744
1745 if npattern > n {
1746 pattern >>= npattern - n
1747 npattern = n
1748 }
1749
1750
1751 if npattern == 1 {
1752
1753
1754
1755
1756
1757
1758 if pattern == 1 {
1759 pattern = 1<<maxBits - 1
1760 npattern = maxBits
1761 } else {
1762 npattern = c
1763 }
1764 } else {
1765 b := pattern
1766 nb := npattern
1767 if nb+nb <= maxBits {
1768
1769 for nb <= goarch.PtrSize*8 {
1770 b |= b << nb
1771 nb += nb
1772 }
1773
1774
1775 nb = maxBits / npattern * npattern
1776 b &= 1<<nb - 1
1777 pattern = b
1778 npattern = nb
1779 }
1780 }
1781
1782
1783
1784
1785 for ; c >= npattern; c -= npattern {
1786 bits |= pattern << nbits
1787 nbits += npattern
1788 if size == 1 {
1789 for nbits >= 8 {
1790 *dst = uint8(bits)
1791 dst = add1(dst)
1792 bits >>= 8
1793 nbits -= 8
1794 }
1795 } else {
1796 for nbits >= 4 {
1797 *dst = uint8(bits&0xf | bitScanAll)
1798 dst = add1(dst)
1799 bits >>= 4
1800 nbits -= 4
1801 }
1802 }
1803 }
1804
1805
1806 if c > 0 {
1807 pattern &= 1<<c - 1
1808 bits |= pattern << nbits
1809 nbits += c
1810 }
1811 continue Run
1812 }
1813
1814
1815
1816
1817 off := n - nbits
1818 if size == 1 {
1819
1820 src = subtractb(src, (off+7)/8)
1821 if frag := off & 7; frag != 0 {
1822 bits |= uintptr(*src) >> (8 - frag) << nbits
1823 src = add1(src)
1824 nbits += frag
1825 c -= frag
1826 }
1827
1828
1829 for i := c / 8; i > 0; i-- {
1830 bits |= uintptr(*src) << nbits
1831 src = add1(src)
1832 *dst = uint8(bits)
1833 dst = add1(dst)
1834 bits >>= 8
1835 }
1836
1837 if c %= 8; c > 0 {
1838 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1839 nbits += c
1840 }
1841 } else {
1842
1843 src = subtractb(src, (off+3)/4)
1844 if frag := off & 3; frag != 0 {
1845 bits |= (uintptr(*src) & 0xf) >> (4 - frag) << nbits
1846 src = add1(src)
1847 nbits += frag
1848 c -= frag
1849 }
1850
1851
1852 for i := c / 4; i > 0; i-- {
1853 bits |= (uintptr(*src) & 0xf) << nbits
1854 src = add1(src)
1855 *dst = uint8(bits&0xf | bitScanAll)
1856 dst = add1(dst)
1857 bits >>= 4
1858 }
1859
1860 if c %= 4; c > 0 {
1861 bits |= (uintptr(*src) & (1<<c - 1)) << nbits
1862 nbits += c
1863 }
1864 }
1865 }
1866
1867
1868 var totalBits uintptr
1869 if size == 1 {
1870 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*8 + nbits
1871 nbits += -nbits & 7
1872 for ; nbits > 0; nbits -= 8 {
1873 *dst = uint8(bits)
1874 dst = add1(dst)
1875 bits >>= 8
1876 }
1877 } else {
1878 totalBits = (uintptr(unsafe.Pointer(dst))-uintptr(unsafe.Pointer(dstStart)))*4 + nbits
1879 nbits += -nbits & 3
1880 for ; nbits > 0; nbits -= 4 {
1881 v := bits&0xf | bitScanAll
1882 *dst = uint8(v)
1883 dst = add1(dst)
1884 bits >>= 4
1885 }
1886 }
1887 return totalBits
1888 }
1889
1890
1891
1892
1893
1894
1895 func materializeGCProg(ptrdata uintptr, prog *byte) *mspan {
1896
1897 bitmapBytes := divRoundUp(ptrdata, 8*goarch.PtrSize)
1898
1899 pages := divRoundUp(bitmapBytes, pageSize)
1900 s := mheap_.allocManual(pages, spanAllocPtrScalarBits)
1901 runGCProg(addb(prog, 4), nil, (*byte)(unsafe.Pointer(s.startAddr)), 1)
1902 return s
1903 }
1904 func dematerializeGCProg(s *mspan) {
1905 mheap_.freeManual(s, spanAllocPtrScalarBits)
1906 }
1907
1908 func dumpGCProg(p *byte) {
1909 nptr := 0
1910 for {
1911 x := *p
1912 p = add1(p)
1913 if x == 0 {
1914 print("\t", nptr, " end\n")
1915 break
1916 }
1917 if x&0x80 == 0 {
1918 print("\t", nptr, " lit ", x, ":")
1919 n := int(x+7) / 8
1920 for i := 0; i < n; i++ {
1921 print(" ", hex(*p))
1922 p = add1(p)
1923 }
1924 print("\n")
1925 nptr += int(x)
1926 } else {
1927 nbit := int(x &^ 0x80)
1928 if nbit == 0 {
1929 for nb := uint(0); ; nb += 7 {
1930 x := *p
1931 p = add1(p)
1932 nbit |= int(x&0x7f) << nb
1933 if x&0x80 == 0 {
1934 break
1935 }
1936 }
1937 }
1938 count := 0
1939 for nb := uint(0); ; nb += 7 {
1940 x := *p
1941 p = add1(p)
1942 count |= int(x&0x7f) << nb
1943 if x&0x80 == 0 {
1944 break
1945 }
1946 }
1947 print("\t", nptr, " repeat ", nbit, " × ", count, "\n")
1948 nptr += nbit * count
1949 }
1950 }
1951 }
1952
1953
1954
1955 func getgcmaskcb(frame *stkframe, ctxt unsafe.Pointer) bool {
1956 target := (*stkframe)(ctxt)
1957 if frame.sp <= target.sp && target.sp < frame.varp {
1958 *target = *frame
1959 return false
1960 }
1961 return true
1962 }
1963
1964
1965
1966
1967
1968 func reflect_gcbits(x any) []byte {
1969 ret := getgcmask(x)
1970 typ := (*ptrtype)(unsafe.Pointer(efaceOf(&x)._type)).elem
1971 nptr := typ.ptrdata / goarch.PtrSize
1972 for uintptr(len(ret)) > nptr && ret[len(ret)-1] == 0 {
1973 ret = ret[:len(ret)-1]
1974 }
1975 return ret
1976 }
1977
1978
1979
1980
1981 func getgcmask(ep any) (mask []byte) {
1982 e := *efaceOf(&ep)
1983 p := e.data
1984 t := e._type
1985
1986 for _, datap := range activeModules() {
1987
1988 if datap.data <= uintptr(p) && uintptr(p) < datap.edata {
1989 bitmap := datap.gcdatamask.bytedata
1990 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
1991 mask = make([]byte, n/goarch.PtrSize)
1992 for i := uintptr(0); i < n; i += goarch.PtrSize {
1993 off := (uintptr(p) + i - datap.data) / goarch.PtrSize
1994 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
1995 }
1996 return
1997 }
1998
1999
2000 if datap.bss <= uintptr(p) && uintptr(p) < datap.ebss {
2001 bitmap := datap.gcbssmask.bytedata
2002 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
2003 mask = make([]byte, n/goarch.PtrSize)
2004 for i := uintptr(0); i < n; i += goarch.PtrSize {
2005 off := (uintptr(p) + i - datap.bss) / goarch.PtrSize
2006 mask[i/goarch.PtrSize] = (*addb(bitmap, off/8) >> (off % 8)) & 1
2007 }
2008 return
2009 }
2010 }
2011
2012
2013 if base, s, _ := findObject(uintptr(p), 0, 0); base != 0 {
2014 hbits := heapBitsForAddr(base)
2015 n := s.elemsize
2016 mask = make([]byte, n/goarch.PtrSize)
2017 for i := uintptr(0); i < n; i += goarch.PtrSize {
2018 if hbits.isPointer() {
2019 mask[i/goarch.PtrSize] = 1
2020 }
2021 if !hbits.morePointers() {
2022 mask = mask[:i/goarch.PtrSize]
2023 break
2024 }
2025 hbits = hbits.next()
2026 }
2027 return
2028 }
2029
2030
2031 if _g_ := getg(); _g_.m.curg.stack.lo <= uintptr(p) && uintptr(p) < _g_.m.curg.stack.hi {
2032 var frame stkframe
2033 frame.sp = uintptr(p)
2034 _g_ := getg()
2035 gentraceback(_g_.m.curg.sched.pc, _g_.m.curg.sched.sp, 0, _g_.m.curg, 0, nil, 1000, getgcmaskcb, noescape(unsafe.Pointer(&frame)), 0)
2036 if frame.fn.valid() {
2037 locals, _, _ := getStackMap(&frame, nil, false)
2038 if locals.n == 0 {
2039 return
2040 }
2041 size := uintptr(locals.n) * goarch.PtrSize
2042 n := (*ptrtype)(unsafe.Pointer(t)).elem.size
2043 mask = make([]byte, n/goarch.PtrSize)
2044 for i := uintptr(0); i < n; i += goarch.PtrSize {
2045 off := (uintptr(p) + i - frame.varp + size) / goarch.PtrSize
2046 mask[i/goarch.PtrSize] = locals.ptrbit(off)
2047 }
2048 }
2049 return
2050 }
2051
2052
2053
2054
2055 return
2056 }
2057
View as plain text