1
2
3
4
5 package walk
6
7 import (
8 "go/constant"
9 "internal/abi"
10
11 "cmd/compile/internal/base"
12 "cmd/compile/internal/ir"
13 "cmd/compile/internal/reflectdata"
14 "cmd/compile/internal/typecheck"
15 "cmd/compile/internal/types"
16 "cmd/internal/src"
17 )
18
19
20 func walkAssign(init *ir.Nodes, n ir.Node) ir.Node {
21 init.Append(ir.TakeInit(n)...)
22
23 var left, right ir.Node
24 switch n.Op() {
25 case ir.OAS:
26 n := n.(*ir.AssignStmt)
27 left, right = n.X, n.Y
28 case ir.OASOP:
29 n := n.(*ir.AssignOpStmt)
30 left, right = n.X, n.Y
31 }
32
33
34
35 var mapAppend *ir.CallExpr
36 if left.Op() == ir.OINDEXMAP && right.Op() == ir.OAPPEND {
37 left := left.(*ir.IndexExpr)
38 mapAppend = right.(*ir.CallExpr)
39 if !ir.SameSafeExpr(left, mapAppend.Args[0]) {
40 base.Fatalf("not same expressions: %v != %v", left, mapAppend.Args[0])
41 }
42 }
43
44 left = walkExpr(left, init)
45 left = safeExpr(left, init)
46 if mapAppend != nil {
47 mapAppend.Args[0] = left
48 }
49
50 if n.Op() == ir.OASOP {
51
52 n = ir.NewAssignStmt(base.Pos, left, typecheck.Expr(ir.NewBinaryExpr(base.Pos, n.(*ir.AssignOpStmt).AsOp, left, right)))
53 } else {
54 n.(*ir.AssignStmt).X = left
55 }
56 as := n.(*ir.AssignStmt)
57
58 if oaslit(as, init) {
59 return ir.NewBlockStmt(as.Pos(), nil)
60 }
61
62 if as.Y == nil {
63
64 return as
65 }
66
67 if !base.Flag.Cfg.Instrumenting && ir.IsZero(as.Y) {
68 return as
69 }
70
71 switch as.Y.Op() {
72 default:
73 as.Y = walkExpr(as.Y, init)
74
75 case ir.ORECV:
76
77
78 recv := as.Y.(*ir.UnaryExpr)
79 recv.X = walkExpr(recv.X, init)
80
81 n1 := typecheck.NodAddr(as.X)
82 r := recv.X
83 return mkcall1(chanfn("chanrecv1", 2, r.Type()), nil, init, r, n1)
84
85 case ir.OAPPEND:
86
87 call := as.Y.(*ir.CallExpr)
88 if call.Type().Elem().NotInHeap() {
89 base.Errorf("%v can't be allocated in Go; it is incomplete (or unallocatable)", call.Type().Elem())
90 }
91 var r ir.Node
92 switch {
93 case isAppendOfMake(call):
94
95 r = extendSlice(call, init)
96 case call.IsDDD:
97 r = appendSlice(call, init)
98 default:
99 r = walkAppend(call, init, as)
100 }
101 as.Y = r
102 if r.Op() == ir.OAPPEND {
103 r := r.(*ir.CallExpr)
104
105
106
107 r.Fun = reflectdata.AppendElemRType(base.Pos, r)
108 return as
109 }
110
111
112 }
113
114 if as.X != nil && as.Y != nil {
115 return convas(as, init)
116 }
117 return as
118 }
119
120
121 func walkAssignDotType(n *ir.AssignListStmt, init *ir.Nodes) ir.Node {
122 walkExprListSafe(n.Lhs, init)
123 n.Rhs[0] = walkExpr(n.Rhs[0], init)
124 return n
125 }
126
127
128 func walkAssignFunc(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
129 init.Append(ir.TakeInit(n)...)
130
131 r := n.Rhs[0]
132 walkExprListSafe(n.Lhs, init)
133 r = walkExpr(r, init)
134
135 if ir.IsIntrinsicCall(r.(*ir.CallExpr)) {
136 n.Rhs = []ir.Node{r}
137 return n
138 }
139 init.Append(r)
140
141 ll := ascompatet(n.Lhs, r.Type())
142 return ir.NewBlockStmt(src.NoXPos, ll)
143 }
144
145
146 func walkAssignList(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
147 init.Append(ir.TakeInit(n)...)
148 return ir.NewBlockStmt(src.NoXPos, ascompatee(ir.OAS, n.Lhs, n.Rhs))
149 }
150
151
152 func walkAssignMapRead(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
153 init.Append(ir.TakeInit(n)...)
154
155 r := n.Rhs[0].(*ir.IndexExpr)
156 walkExprListSafe(n.Lhs, init)
157 r.X = walkExpr(r.X, init)
158 r.Index = walkExpr(r.Index, init)
159 t := r.X.Type()
160
161 fast := mapfast(t)
162 key := mapKeyArg(fast, r, r.Index, false)
163
164
165
166
167
168
169 a := n.Lhs[0]
170
171 var call *ir.CallExpr
172 if w := t.Elem().Size(); w <= abi.ZeroValSize {
173 fn := mapfn(mapaccess2[fast], t, false)
174 call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key)
175 } else {
176 fn := mapfn("mapaccess2_fat", t, true)
177 z := reflectdata.ZeroAddr(w)
178 call = mkcall1(fn, fn.Type().ResultsTuple(), init, reflectdata.IndexMapRType(base.Pos, r), r.X, key, z)
179 }
180
181
182
183
184 if ok := n.Lhs[1]; !ir.IsBlank(ok) && ok.Type().IsBoolean() {
185 call.Type().Field(1).Type = ok.Type()
186 }
187 n.Rhs = []ir.Node{call}
188 n.SetOp(ir.OAS2FUNC)
189
190
191 if ir.IsBlank(a) {
192 return walkExpr(typecheck.Stmt(n), init)
193 }
194
195 var_ := typecheck.TempAt(base.Pos, ir.CurFunc, types.NewPtr(t.Elem()))
196 var_.SetTypecheck(1)
197 var_.MarkNonNil()
198
199 n.Lhs[0] = var_
200 init.Append(walkExpr(n, init))
201
202 as := ir.NewAssignStmt(base.Pos, a, ir.NewStarExpr(base.Pos, var_))
203 return walkExpr(typecheck.Stmt(as), init)
204 }
205
206
207 func walkAssignRecv(init *ir.Nodes, n *ir.AssignListStmt) ir.Node {
208 init.Append(ir.TakeInit(n)...)
209
210 r := n.Rhs[0].(*ir.UnaryExpr)
211 walkExprListSafe(n.Lhs, init)
212 r.X = walkExpr(r.X, init)
213 var n1 ir.Node
214 if ir.IsBlank(n.Lhs[0]) {
215 n1 = typecheck.NodNil()
216 } else {
217 n1 = typecheck.NodAddr(n.Lhs[0])
218 }
219 fn := chanfn("chanrecv2", 2, r.X.Type())
220 ok := n.Lhs[1]
221 call := mkcall1(fn, types.Types[types.TBOOL], init, r.X, n1)
222 return typecheck.Stmt(ir.NewAssignStmt(base.Pos, ok, call))
223 }
224
225
226 func walkReturn(n *ir.ReturnStmt) ir.Node {
227 fn := ir.CurFunc
228
229 fn.NumReturns++
230 if len(n.Results) == 0 {
231 return n
232 }
233
234 results := fn.Type().Results()
235 dsts := make([]ir.Node, len(results))
236 for i, v := range results {
237
238 dsts[i] = typecheck.AssignExpr(v.Nname.(*ir.Name))
239 }
240
241 n.Results = ascompatee(n.Op(), dsts, n.Results)
242 return n
243 }
244
245
246
247
248
249 func ascompatet(nl ir.Nodes, nr *types.Type) []ir.Node {
250 if len(nl) != nr.NumFields() {
251 base.Fatalf("ascompatet: assignment count mismatch: %d = %d", len(nl), nr.NumFields())
252 }
253
254 var nn ir.Nodes
255 for i, l := range nl {
256 if ir.IsBlank(l) {
257 continue
258 }
259 r := nr.Field(i)
260
261
262
263 if tmp, ok := l.(*ir.Name); !ok || !tmp.AutoTemp() || !types.Identical(tmp.Type(), r.Type) {
264 base.FatalfAt(l.Pos(), "assigning %v to %+v", r.Type, l)
265 }
266
267 res := ir.NewResultExpr(base.Pos, nil, types.BADWIDTH)
268 res.Index = int64(i)
269 res.SetType(r.Type)
270 res.SetTypecheck(1)
271
272 nn.Append(ir.NewAssignStmt(base.Pos, l, res))
273 }
274 return nn
275 }
276
277
278
279
280
281 func ascompatee(op ir.Op, nl, nr []ir.Node) []ir.Node {
282
283 if len(nl) != len(nr) {
284 base.Fatalf("assignment operands mismatch: %+v / %+v", ir.Nodes(nl), ir.Nodes(nr))
285 }
286
287 var assigned ir.NameSet
288 var memWrite, deferResultWrite bool
289
290
291
292 affected := func(n ir.Node) bool {
293 if deferResultWrite {
294 return true
295 }
296 return ir.Any(n, func(n ir.Node) bool {
297 if n.Op() == ir.ONAME && assigned.Has(n.(*ir.Name)) {
298 return true
299 }
300 if memWrite && readsMemory(n) {
301 return true
302 }
303 return false
304 })
305 }
306
307
308
309
310 var early ir.Nodes
311 save := func(np *ir.Node) {
312 if n := *np; affected(n) {
313 *np = copyExpr(n, n.Type(), &early)
314 }
315 }
316
317 var late ir.Nodes
318 for i, lorig := range nl {
319 l, r := lorig, nr[i]
320
321
322 if op == ir.ORETURN && ir.SameSafeExpr(l, r) {
323 continue
324 }
325
326
327
328 for {
329
330
331
332 init := ir.TakeInit(l)
333 walkStmtList(init)
334 early.Append(init...)
335
336 switch ll := l.(type) {
337 case *ir.IndexExpr:
338 if ll.X.Type().IsArray() {
339 save(&ll.Index)
340 l = ll.X
341 continue
342 }
343 case *ir.ParenExpr:
344 l = ll.X
345 continue
346 case *ir.SelectorExpr:
347 if ll.Op() == ir.ODOT {
348 l = ll.X
349 continue
350 }
351 }
352 break
353 }
354
355 var name *ir.Name
356 switch l.Op() {
357 default:
358 base.Fatalf("unexpected lvalue %v", l.Op())
359 case ir.ONAME:
360 name = l.(*ir.Name)
361 case ir.OINDEX, ir.OINDEXMAP:
362 l := l.(*ir.IndexExpr)
363 save(&l.X)
364 save(&l.Index)
365 case ir.ODEREF:
366 l := l.(*ir.StarExpr)
367 save(&l.X)
368 case ir.ODOTPTR:
369 l := l.(*ir.SelectorExpr)
370 save(&l.X)
371 }
372
373
374 save(&r)
375
376 appendWalkStmt(&late, convas(ir.NewAssignStmt(base.Pos, lorig, r), &late))
377
378
379
380
381 if name == nil {
382
383
384 memWrite = true
385 continue
386 }
387
388 if name.Class == ir.PPARAMOUT && ir.CurFunc.HasDefer() {
389
390
391
392 deferResultWrite = true
393 continue
394 }
395
396 if ir.IsBlank(name) {
397
398
399 continue
400 }
401
402 if name.Addrtaken() || !name.OnStack() {
403
404
405 memWrite = true
406 continue
407 }
408
409
410
411 assigned.Add(name)
412 }
413
414 early.Append(late.Take()...)
415 return early
416 }
417
418
419
420 func readsMemory(n ir.Node) bool {
421 switch n.Op() {
422 case ir.ONAME:
423 n := n.(*ir.Name)
424 if n.Class == ir.PFUNC {
425 return false
426 }
427 return n.Addrtaken() || !n.OnStack()
428
429 case ir.OADD,
430 ir.OAND,
431 ir.OANDAND,
432 ir.OANDNOT,
433 ir.OBITNOT,
434 ir.OCONV,
435 ir.OCONVIFACE,
436 ir.OCONVNOP,
437 ir.ODIV,
438 ir.ODOT,
439 ir.ODOTTYPE,
440 ir.OLITERAL,
441 ir.OLSH,
442 ir.OMOD,
443 ir.OMUL,
444 ir.ONEG,
445 ir.ONIL,
446 ir.OOR,
447 ir.OOROR,
448 ir.OPAREN,
449 ir.OPLUS,
450 ir.ORSH,
451 ir.OSUB,
452 ir.OXOR:
453 return false
454 }
455
456
457 return true
458 }
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476 func appendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
477 walkAppendArgs(n, init)
478
479 l1 := n.Args[0]
480 l2 := n.Args[1]
481 l2 = cheapExpr(l2, init)
482 n.Args[1] = l2
483
484 var nodes ir.Nodes
485
486
487 s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
488 nodes.Append(ir.NewAssignStmt(base.Pos, s, l1))
489
490 elemtype := s.Type().Elem()
491
492
493 oldPtr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, s)
494 oldLen := ir.NewUnaryExpr(base.Pos, ir.OLEN, s)
495 oldCap := ir.NewUnaryExpr(base.Pos, ir.OCAP, s)
496
497
498 num := ir.NewUnaryExpr(base.Pos, ir.OLEN, l2)
499
500
501 newLen := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
502 nodes.Append(ir.NewAssignStmt(base.Pos, newLen, ir.NewBinaryExpr(base.Pos, ir.OADD, oldLen, num)))
503
504
505 nif := ir.NewIfStmt(base.Pos, nil, nil, nil)
506 nuint := typecheck.Conv(newLen, types.Types[types.TUINT])
507 scapuint := typecheck.Conv(oldCap, types.Types[types.TUINT])
508 nif.Cond = ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, scapuint)
509 nif.Likely = true
510
511
512 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, newLen, nil)
513 slice.SetBounded(true)
514 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, slice)}
515
516
517 call := walkGrowslice(s, nif.PtrInit(), oldPtr, newLen, oldCap, num)
518 nif.Else = []ir.Node{ir.NewAssignStmt(base.Pos, s, call)}
519
520 nodes.Append(nif)
521
522
523
524
525
526
527
528 idx := ir.NewBinaryExpr(base.Pos, ir.OSUB, newLen, ir.NewUnaryExpr(base.Pos, ir.OLEN, l2))
529
530 var ncopy ir.Node
531 if elemtype.HasPointers() {
532
533 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
534 slice.SetType(s.Type())
535 slice.SetBounded(true)
536
537 ir.CurFunc.SetWBPos(n.Pos())
538
539
540 fn := typecheck.LookupRuntime("typedslicecopy", l1.Type().Elem(), l2.Type().Elem())
541 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
542 ptr2, len2 := backingArrayPtrLen(l2)
543 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, reflectdata.AppendElemRType(base.Pos, n), ptr1, len1, ptr2, len2)
544 } else if base.Flag.Cfg.Instrumenting && !base.Flag.CompilingRuntime {
545
546
547
548 slice := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, idx, nil, nil)
549 slice.SetType(s.Type())
550 slice.SetBounded(true)
551
552 ptr1, len1 := backingArrayPtrLen(cheapExpr(slice, &nodes))
553 ptr2, len2 := backingArrayPtrLen(l2)
554
555 fn := typecheck.LookupRuntime("slicecopy", ptr1.Type().Elem(), ptr2.Type().Elem())
556 ncopy = mkcall1(fn, types.Types[types.TINT], &nodes, ptr1, len1, ptr2, len2, ir.NewInt(base.Pos, elemtype.Size()))
557 } else {
558
559 ix := ir.NewIndexExpr(base.Pos, s, idx)
560 ix.SetBounded(true)
561 addr := typecheck.NodAddr(ix)
562
563 sptr := ir.NewUnaryExpr(base.Pos, ir.OSPTR, l2)
564
565 nwid := cheapExpr(typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OLEN, l2), types.Types[types.TUINTPTR]), &nodes)
566 nwid = ir.NewBinaryExpr(base.Pos, ir.OMUL, nwid, ir.NewInt(base.Pos, elemtype.Size()))
567
568
569 fn := typecheck.LookupRuntime("memmove", elemtype, elemtype)
570 ncopy = mkcall1(fn, nil, &nodes, addr, sptr, nwid)
571 }
572 ln := append(nodes, ncopy)
573
574 typecheck.Stmts(ln)
575 walkStmtList(ln)
576 init.Append(ln...)
577 return s
578 }
579
580
581
582 func isAppendOfMake(n ir.Node) bool {
583 if base.Flag.N != 0 || base.Flag.Cfg.Instrumenting {
584 return false
585 }
586
587 if n.Typecheck() == 0 {
588 base.Fatalf("missing typecheck: %+v", n)
589 }
590
591 if n.Op() != ir.OAPPEND {
592 return false
593 }
594 call := n.(*ir.CallExpr)
595 if !call.IsDDD || len(call.Args) != 2 || call.Args[1].Op() != ir.OMAKESLICE {
596 return false
597 }
598
599 mk := call.Args[1].(*ir.MakeExpr)
600 if mk.Cap != nil {
601 return false
602 }
603
604
605
606
607
608
609
610 y := mk.Len
611 if !ir.IsConst(y, constant.Int) && y.Type().Size() > types.Types[types.TUINT].Size() {
612 return false
613 }
614
615 return true
616 }
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649 func extendSlice(n *ir.CallExpr, init *ir.Nodes) ir.Node {
650
651
652
653 l2 := typecheck.Conv(n.Args[1].(*ir.MakeExpr).Len, types.Types[types.TINT])
654 l2 = typecheck.Expr(l2)
655 n.Args[1] = l2
656
657 walkAppendArgs(n, init)
658
659 l1 := n.Args[0]
660 l2 = n.Args[1]
661
662 var nodes []ir.Node
663
664
665 nifneg := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OGE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
666 nifneg.Likely = true
667
668
669 nifneg.Else = []ir.Node{mkcall("panicmakeslicelen", nil, init)}
670 nodes = append(nodes, nifneg)
671
672
673 s := typecheck.TempAt(base.Pos, ir.CurFunc, l1.Type())
674 nodes = append(nodes, ir.NewAssignStmt(base.Pos, s, l1))
675
676
677
678
679 nifnz := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.ONE, l2, ir.NewInt(base.Pos, 0)), nil, nil)
680 nifnz.Likely = true
681 nodes = append(nodes, nifnz)
682
683 elemtype := s.Type().Elem()
684
685
686 nn := typecheck.TempAt(base.Pos, ir.CurFunc, types.Types[types.TINT])
687 nifnz.Body = append(nifnz.Body, ir.NewAssignStmt(base.Pos, nn, ir.NewBinaryExpr(base.Pos, ir.OADD, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2)))
688
689
690 nuint := typecheck.Conv(nn, types.Types[types.TUINT])
691 capuint := typecheck.Conv(ir.NewUnaryExpr(base.Pos, ir.OCAP, s), types.Types[types.TUINT])
692 nif := ir.NewIfStmt(base.Pos, ir.NewBinaryExpr(base.Pos, ir.OLE, nuint, capuint), nil, nil)
693 nif.Likely = true
694
695
696 nt := ir.NewSliceExpr(base.Pos, ir.OSLICE, s, nil, nn, nil)
697 nt.SetBounded(true)
698 nif.Body = []ir.Node{ir.NewAssignStmt(base.Pos, s, nt)}
699
700
701 nif.Else = []ir.Node{
702 ir.NewAssignStmt(base.Pos, s, walkGrowslice(s, nif.PtrInit(),
703 ir.NewUnaryExpr(base.Pos, ir.OSPTR, s),
704 nn,
705 ir.NewUnaryExpr(base.Pos, ir.OCAP, s),
706 l2)),
707 }
708
709 nifnz.Body = append(nifnz.Body, nif)
710
711
712
713 ix := ir.NewIndexExpr(base.Pos, s, ir.NewBinaryExpr(base.Pos, ir.OSUB, ir.NewUnaryExpr(base.Pos, ir.OLEN, s), l2))
714 ix.SetBounded(true)
715 hp := typecheck.ConvNop(typecheck.NodAddr(ix), types.Types[types.TUNSAFEPTR])
716
717
718 hn := typecheck.Conv(ir.NewBinaryExpr(base.Pos, ir.OMUL, l2, ir.NewInt(base.Pos, elemtype.Size())), types.Types[types.TUINTPTR])
719
720 clrname := "memclrNoHeapPointers"
721 hasPointers := elemtype.HasPointers()
722 if hasPointers {
723 clrname = "memclrHasPointers"
724 ir.CurFunc.SetWBPos(n.Pos())
725 }
726
727 var clr ir.Nodes
728 clrfn := mkcall(clrname, nil, &clr, hp, hn)
729 clr.Append(clrfn)
730 if hasPointers {
731
732
733 nif.Body = append(nif.Body, clr...)
734 } else {
735 nifnz.Body = append(nifnz.Body, clr...)
736 }
737
738 typecheck.Stmts(nodes)
739 walkStmtList(nodes)
740 init.Append(nodes...)
741 return s
742 }
743
View as plain text