1
2
3
4
5 package types
6
7 import (
8 "math"
9 "sort"
10
11 "cmd/compile/internal/base"
12 "cmd/internal/src"
13 "internal/types/errors"
14 )
15
16 var PtrSize int
17
18 var RegSize int
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36 var (
37 SlicePtrOffset int64
38 SliceLenOffset int64
39 SliceCapOffset int64
40
41 SliceSize int64
42 StringSize int64
43 )
44
45 var SkipSizeForTracing bool
46
47
48
49 func typePos(t *Type) src.XPos {
50 if pos := t.Pos(); pos.IsKnown() {
51 return pos
52 }
53 base.Fatalf("bad type: %v", t)
54 panic("unreachable")
55 }
56
57
58 var MaxWidth int64
59
60
61
62 var CalcSizeDisabled bool
63
64
65
66 var defercalc int
67
68
69 func RoundUp(o int64, r int64) int64 {
70 if r < 1 || r > 8 || r&(r-1) != 0 {
71 base.Fatalf("Round %d", r)
72 }
73 return (o + r - 1) &^ (r - 1)
74 }
75
76
77
78 func expandiface(t *Type) {
79 seen := make(map[*Sym]*Field)
80 var methods []*Field
81
82 addMethod := func(m *Field, explicit bool) {
83 switch prev := seen[m.Sym]; {
84 case prev == nil:
85 seen[m.Sym] = m
86 case !explicit && Identical(m.Type, prev.Type):
87 return
88 default:
89 base.ErrorfAt(m.Pos, errors.DuplicateDecl, "duplicate method %s", m.Sym.Name)
90 }
91 methods = append(methods, m)
92 }
93
94 {
95 methods := t.Methods()
96 sort.SliceStable(methods, func(i, j int) bool {
97 mi, mj := methods[i], methods[j]
98
99
100 if mi.Sym == nil && mj.Sym == nil {
101 return mi.Type.Sym().Less(mj.Type.Sym())
102 }
103
104
105 if mi.Sym == nil || mj.Sym == nil {
106 return mi.Sym != nil
107 }
108
109
110 return mi.Sym.Less(mj.Sym)
111 })
112 }
113
114 for _, m := range t.Methods() {
115 if m.Sym == nil {
116 continue
117 }
118
119 CheckSize(m.Type)
120 addMethod(m, true)
121 }
122
123 for _, m := range t.Methods() {
124 if m.Sym != nil || m.Type == nil {
125 continue
126 }
127
128
129
130
131 if !m.Type.IsInterface() {
132 continue
133 }
134
135
136
137 for _, t1 := range m.Type.AllMethods() {
138 f := NewField(m.Pos, t1.Sym, t1.Type)
139 addMethod(f, false)
140
141
142 f.Pos = src.NoXPos
143 }
144
145
146 m.Pos = src.NoXPos
147 }
148
149 sort.Sort(MethodsByName(methods))
150
151 if int64(len(methods)) >= MaxWidth/int64(PtrSize) {
152 base.ErrorfAt(typePos(t), 0, "interface too large")
153 }
154 for i, m := range methods {
155 m.Offset = int64(i) * int64(PtrSize)
156 }
157
158 t.SetAllMethods(methods)
159 }
160
161
162
163
164 func calcStructOffset(t *Type, fields []*Field, offset int64) int64 {
165 for _, f := range fields {
166 CalcSize(f.Type)
167 offset = RoundUp(offset, int64(f.Type.align))
168
169 if t.IsStruct() {
170 f.Offset = offset
171
172
173
174
175
176 if f.Type.NotInHeap() {
177 t.SetNotInHeap(true)
178 }
179 }
180
181 offset += f.Type.width
182
183 maxwidth := MaxWidth
184
185
186 if maxwidth < 1<<32 {
187 maxwidth = 1<<31 - 1
188 }
189 if offset >= maxwidth {
190 base.ErrorfAt(typePos(t), 0, "type %L too large", t)
191 offset = 8
192 }
193 }
194
195 return offset
196 }
197
198 func isAtomicStdPkg(p *Pkg) bool {
199 if p.Prefix == `""` {
200 panic("bad package prefix")
201 }
202 return p.Prefix == "sync/atomic" || p.Prefix == "internal/runtime/atomic"
203 }
204
205
206
207
208
209
210 func CalcSize(t *Type) {
211
212
213 if base.EnableTrace && SkipSizeForTracing {
214 return
215 }
216 if PtrSize == 0 {
217
218 return
219 }
220
221 if t == nil {
222 return
223 }
224
225 if t.width == -2 {
226 t.width = 0
227 t.align = 1
228 base.Fatalf("invalid recursive type %v", t)
229 return
230 }
231
232 if t.widthCalculated() {
233 return
234 }
235
236 if CalcSizeDisabled {
237 base.Fatalf("width not calculated: %v", t)
238 }
239
240
241 DeferCheckSize()
242
243 lno := base.Pos
244 if pos := t.Pos(); pos.IsKnown() {
245 base.Pos = pos
246 }
247
248 t.width = -2
249 t.align = 0
250 t.alg = AMEM
251
252 if t.Noalg() {
253 t.setAlg(ANOALG)
254 }
255
256 et := t.Kind()
257 switch et {
258 case TFUNC, TCHAN, TMAP, TSTRING:
259 break
260
261
262 default:
263 if SimType[t.Kind()] != 0 {
264 et = SimType[t.Kind()]
265 }
266 }
267
268 var w int64
269 switch et {
270 default:
271 base.Fatalf("CalcSize: unknown type: %v", t)
272
273
274 case TINT8, TUINT8, TBOOL:
275
276 w = 1
277 t.intRegs = 1
278
279 case TINT16, TUINT16:
280 w = 2
281 t.intRegs = 1
282
283 case TINT32, TUINT32:
284 w = 4
285 t.intRegs = 1
286
287 case TINT64, TUINT64:
288 w = 8
289 t.align = uint8(RegSize)
290 t.intRegs = uint8(8 / RegSize)
291
292 case TFLOAT32:
293 w = 4
294 t.floatRegs = 1
295 t.setAlg(AFLOAT32)
296
297 case TFLOAT64:
298 w = 8
299 t.align = uint8(RegSize)
300 t.floatRegs = 1
301 t.setAlg(AFLOAT64)
302
303 case TCOMPLEX64:
304 w = 8
305 t.align = 4
306 t.floatRegs = 2
307 t.setAlg(ACPLX64)
308
309 case TCOMPLEX128:
310 w = 16
311 t.align = uint8(RegSize)
312 t.floatRegs = 2
313 t.setAlg(ACPLX128)
314
315 case TPTR:
316 w = int64(PtrSize)
317 t.intRegs = 1
318 CheckSize(t.Elem())
319 t.ptrBytes = int64(PtrSize)
320
321 case TUNSAFEPTR:
322 w = int64(PtrSize)
323 t.intRegs = 1
324 t.ptrBytes = int64(PtrSize)
325
326 case TINTER:
327 w = 2 * int64(PtrSize)
328 t.align = uint8(PtrSize)
329 t.intRegs = 2
330 expandiface(t)
331 if len(t.allMethods.Slice()) == 0 {
332 t.setAlg(ANILINTER)
333 } else {
334 t.setAlg(AINTER)
335 }
336 t.ptrBytes = int64(2 * PtrSize)
337
338 case TCHAN:
339 w = int64(PtrSize)
340 t.intRegs = 1
341 t.ptrBytes = int64(PtrSize)
342
343 CheckSize(t.Elem())
344
345
346
347 t1 := NewChanArgs(t)
348 CheckSize(t1)
349
350 case TCHANARGS:
351 t1 := t.ChanArgs()
352 CalcSize(t1)
353
354
355
356
357 CalcSize(t1.Elem())
358 if t1.Elem().width >= 1<<16 {
359 base.Errorf("channel element type too large (>64kB)")
360 }
361 w = 1
362
363 case TMAP:
364 w = int64(PtrSize)
365 t.intRegs = 1
366 CheckSize(t.Elem())
367 CheckSize(t.Key())
368 t.setAlg(ANOEQ)
369 t.ptrBytes = int64(PtrSize)
370
371 case TFORW:
372 base.Fatalf("invalid recursive type %v", t)
373
374 case TANY:
375 base.Fatalf("CalcSize any")
376
377 case TSTRING:
378 if StringSize == 0 {
379 base.Fatalf("early CalcSize string")
380 }
381 w = StringSize
382 t.align = uint8(PtrSize)
383 t.intRegs = 2
384 t.setAlg(ASTRING)
385 t.ptrBytes = int64(PtrSize)
386
387 case TARRAY:
388 if t.Elem() == nil {
389 break
390 }
391
392 CalcSize(t.Elem())
393 t.SetNotInHeap(t.Elem().NotInHeap())
394 if t.Elem().width != 0 {
395 cap := (uint64(MaxWidth) - 1) / uint64(t.Elem().width)
396 if uint64(t.NumElem()) > cap {
397 base.Errorf("type %L larger than address space", t)
398 }
399 }
400 w = t.NumElem() * t.Elem().width
401 t.align = t.Elem().align
402
403
404
405 switch t.NumElem() {
406 case 0:
407 t.intRegs = 0
408 t.floatRegs = 0
409 case 1:
410 t.intRegs = t.Elem().intRegs
411 t.floatRegs = t.Elem().floatRegs
412 default:
413 t.intRegs = math.MaxUint8
414 t.floatRegs = math.MaxUint8
415 }
416 switch a := t.Elem().alg; a {
417 case AMEM, ANOEQ, ANOALG:
418 t.setAlg(a)
419 default:
420 switch t.NumElem() {
421 case 0:
422
423 t.setAlg(AMEM)
424 case 1:
425
426 t.setAlg(a)
427 default:
428 t.setAlg(ASPECIAL)
429 }
430 }
431 if t.NumElem() > 0 {
432 x := PtrDataSize(t.Elem())
433 if x > 0 {
434 t.ptrBytes = t.Elem().width*(t.NumElem()-1) + x
435 }
436 }
437
438 case TSLICE:
439 if t.Elem() == nil {
440 break
441 }
442 w = SliceSize
443 CheckSize(t.Elem())
444 t.align = uint8(PtrSize)
445 t.intRegs = 3
446 t.setAlg(ANOEQ)
447 if !t.Elem().NotInHeap() {
448 t.ptrBytes = int64(PtrSize)
449 }
450
451 case TSTRUCT:
452 if t.IsFuncArgStruct() {
453 base.Fatalf("CalcSize fn struct %v", t)
454 }
455 CalcStructSize(t)
456 w = t.width
457
458
459
460 case TFUNC:
461 t1 := NewFuncArgs(t)
462 CheckSize(t1)
463 w = int64(PtrSize)
464 t.intRegs = 1
465 t.setAlg(ANOEQ)
466 t.ptrBytes = int64(PtrSize)
467
468
469
470 case TFUNCARGS:
471 t1 := t.FuncArgs()
472
473 w = calcStructOffset(t1, t1.Recvs(), 0)
474 w = calcStructOffset(t1, t1.Params(), w)
475 w = RoundUp(w, int64(RegSize))
476 w = calcStructOffset(t1, t1.Results(), w)
477 w = RoundUp(w, int64(RegSize))
478 t1.extra.(*Func).Argwid = w
479 t.align = 1
480 }
481
482 if PtrSize == 4 && w != int64(int32(w)) {
483 base.Errorf("type %v too large", t)
484 }
485
486 t.width = w
487 if t.align == 0 {
488 if w == 0 || w > 8 || w&(w-1) != 0 {
489 base.Fatalf("invalid alignment for %v", t)
490 }
491 t.align = uint8(w)
492 }
493
494 base.Pos = lno
495
496 ResumeCheckSize()
497 }
498
499
500
501
502 func CalcStructSize(t *Type) {
503 var maxAlign uint8 = 1
504
505
506
507 if sym := t.Sym(); sym != nil {
508 switch {
509 case sym.Name == "align64" && isAtomicStdPkg(sym.Pkg):
510 maxAlign = 8
511 }
512 }
513
514 fields := t.Fields()
515 size := calcStructOffset(t, fields, 0)
516
517
518
519
520
521 if size > 0 && fields[len(fields)-1].Type.width == 0 {
522 size++
523 }
524
525 var intRegs, floatRegs uint64
526 for _, field := range fields {
527 typ := field.Type
528
529
530
531 if align := typ.align; align > maxAlign {
532 maxAlign = align
533 }
534
535
536
537 intRegs += uint64(typ.intRegs)
538 floatRegs += uint64(typ.floatRegs)
539 }
540
541
542 size = RoundUp(size, int64(maxAlign))
543
544 if intRegs > math.MaxUint8 || floatRegs > math.MaxUint8 {
545 intRegs = math.MaxUint8
546 floatRegs = math.MaxUint8
547 }
548
549 t.width = size
550 t.align = maxAlign
551 t.intRegs = uint8(intRegs)
552 t.floatRegs = uint8(floatRegs)
553
554
555 t.alg = AMEM
556 if t.Noalg() {
557 t.setAlg(ANOALG)
558 }
559 if len(fields) == 1 && !fields[0].Sym.IsBlank() {
560
561 t.setAlg(fields[0].Type.alg)
562 } else {
563 for i, f := range fields {
564 a := f.Type.alg
565 switch a {
566 case ANOEQ, ANOALG:
567 case AMEM:
568
569 if f.Sym.IsBlank() || IsPaddedField(t, i) {
570 a = ASPECIAL
571 }
572 default:
573
574 a = ASPECIAL
575 }
576 t.setAlg(a)
577 }
578 }
579
580 for i := len(fields) - 1; i >= 0; i-- {
581 f := fields[i]
582 if size := PtrDataSize(f.Type); size > 0 {
583 t.ptrBytes = f.Offset + size
584 break
585 }
586 }
587 }
588
589 func (t *Type) widthCalculated() bool {
590 return t.align > 0
591 }
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609 var deferredTypeStack []*Type
610
611 func CheckSize(t *Type) {
612 if t == nil {
613 return
614 }
615
616
617
618 if t.IsFuncArgStruct() {
619 base.Fatalf("CheckSize %v", t)
620 }
621
622 if defercalc == 0 {
623 CalcSize(t)
624 return
625 }
626
627
628 if !t.Deferwidth() {
629 t.SetDeferwidth(true)
630 deferredTypeStack = append(deferredTypeStack, t)
631 }
632 }
633
634 func DeferCheckSize() {
635 defercalc++
636 }
637
638 func ResumeCheckSize() {
639 if defercalc == 1 {
640 for len(deferredTypeStack) > 0 {
641 t := deferredTypeStack[len(deferredTypeStack)-1]
642 deferredTypeStack = deferredTypeStack[:len(deferredTypeStack)-1]
643 t.SetDeferwidth(false)
644 CalcSize(t)
645 }
646 }
647
648 defercalc--
649 }
650
651
652
653
654
655
656 func PtrDataSize(t *Type) int64 {
657 CalcSize(t)
658 x := t.ptrBytes
659 if t.Kind() == TPTR && t.Elem().NotInHeap() {
660
661
662
663 x = 0
664 }
665 return x
666 }
667
View as plain text