1
2
3
4
5 package ssa
6
7 import (
8 "cmd/compile/internal/abi"
9 "cmd/compile/internal/base"
10 "cmd/compile/internal/ir"
11 "cmd/compile/internal/types"
12 "cmd/internal/src"
13 "fmt"
14 )
15
16 func postExpandCallsDecompose(f *Func) {
17 decomposeUser(f)
18 decomposeBuiltin(f)
19 }
20
21 func expandCalls(f *Func) {
22
23
24
25
26
27
28 sp, _ := f.spSb()
29
30 x := &expandState{
31 f: f,
32 debug: f.pass.debug,
33 regSize: f.Config.RegSize,
34 sp: sp,
35 typs: &f.Config.Types,
36 wideSelects: make(map[*Value]*Value),
37 commonArgs: make(map[selKey]*Value),
38 commonSelectors: make(map[selKey]*Value),
39 memForCall: make(map[ID]*Value),
40 }
41
42
43 if f.Config.BigEndian {
44 x.firstOp = OpInt64Hi
45 x.secondOp = OpInt64Lo
46 x.firstType = x.typs.Int32
47 x.secondType = x.typs.UInt32
48 } else {
49 x.firstOp = OpInt64Lo
50 x.secondOp = OpInt64Hi
51 x.firstType = x.typs.UInt32
52 x.secondType = x.typs.Int32
53 }
54
55
56 var selects []*Value
57 var calls []*Value
58 var args []*Value
59 var exitBlocks []*Block
60
61 var m0 *Value
62
63
64
65
66
67 for _, b := range f.Blocks {
68 for _, v := range b.Values {
69 switch v.Op {
70 case OpInitMem:
71 m0 = v
72
73 case OpClosureLECall, OpInterLECall, OpStaticLECall, OpTailLECall:
74 calls = append(calls, v)
75
76 case OpArg:
77 args = append(args, v)
78
79 case OpStore:
80 if a := v.Args[1]; a.Op == OpSelectN && !CanSSA(a.Type) {
81 if a.Uses > 1 {
82 panic(fmt.Errorf("Saw double use of wide SelectN %s operand of Store %s",
83 a.LongString(), v.LongString()))
84 }
85 x.wideSelects[a] = v
86 }
87
88 case OpSelectN:
89 if v.Type == types.TypeMem {
90
91 call := v.Args[0]
92 aux := call.Aux.(*AuxCall)
93 mem := x.memForCall[call.ID]
94 if mem == nil {
95 v.AuxInt = int64(aux.abiInfo.OutRegistersUsed())
96 x.memForCall[call.ID] = v
97 } else {
98 panic(fmt.Errorf("Saw two memories for call %v, %v and %v", call, mem, v))
99 }
100 } else {
101 selects = append(selects, v)
102 }
103
104 case OpSelectNAddr:
105 call := v.Args[0]
106 which := v.AuxInt
107 aux := call.Aux.(*AuxCall)
108 pt := v.Type
109 off := x.offsetFrom(x.f.Entry, x.sp, aux.OffsetOfResult(which), pt)
110 v.copyOf(off)
111 }
112 }
113
114
115
116 if isBlockMultiValueExit(b) {
117 exitBlocks = append(exitBlocks, b)
118 }
119 }
120
121
122 for _, v := range args {
123 var rc registerCursor
124 a := x.prAssignForArg(v)
125 aux := x.f.OwnAux
126 regs := a.Registers
127 var offset int64
128 if len(regs) == 0 {
129 offset = a.FrameOffset(aux.abiInfo)
130 }
131 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
132 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
133 x.rewriteSelectOrArg(f.Entry.Pos, f.Entry, v, v, m0, v.Type, rc)
134 }
135
136
137 for _, v := range selects {
138 if v.Op == OpInvalid {
139 continue
140 }
141
142 call := v.Args[0]
143 aux := call.Aux.(*AuxCall)
144 mem := x.memForCall[call.ID]
145 if mem == nil {
146 mem = call.Block.NewValue1I(call.Pos, OpSelectN, types.TypeMem, int64(aux.abiInfo.OutRegistersUsed()), call)
147 x.memForCall[call.ID] = mem
148 }
149
150 i := v.AuxInt
151 regs := aux.RegsOfResult(i)
152
153
154 if store := x.wideSelects[v]; store != nil {
155
156 storeAddr := store.Args[0]
157 mem := store.Args[2]
158 if len(regs) > 0 {
159
160 var rc registerCursor
161 rc.init(regs, aux.abiInfo, nil, storeAddr, 0)
162 mem = x.rewriteWideSelectToStores(call.Pos, call.Block, v, mem, v.Type, rc)
163 store.copyOf(mem)
164 } else {
165
166 offset := aux.OffsetOfResult(i)
167 auxBase := x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
168
169
170 move := store.Block.NewValue3A(store.Pos, OpMove, types.TypeMem, v.Type, storeAddr, auxBase, mem)
171 move.AuxInt = v.Type.Size()
172 store.copyOf(move)
173 }
174 continue
175 }
176
177 var auxBase *Value
178 if len(regs) == 0 {
179 offset := aux.OffsetOfResult(i)
180 auxBase = x.offsetFrom(x.f.Entry, x.sp, offset, types.NewPtr(v.Type))
181 }
182 var rc registerCursor
183 rc.init(regs, aux.abiInfo, nil, auxBase, 0)
184 x.rewriteSelectOrArg(call.Pos, call.Block, v, v, mem, v.Type, rc)
185 }
186
187 rewriteCall := func(v *Value, newOp Op, argStart int) {
188
189 x.rewriteCallArgs(v, argStart)
190 v.Op = newOp
191 rts := abi.RegisterTypes(v.Aux.(*AuxCall).abiInfo.OutParams())
192 v.Type = types.NewResults(append(rts, types.TypeMem))
193 }
194
195
196 for _, v := range calls {
197 switch v.Op {
198 case OpStaticLECall:
199 rewriteCall(v, OpStaticCall, 0)
200 case OpTailLECall:
201 rewriteCall(v, OpTailCall, 0)
202 case OpClosureLECall:
203 rewriteCall(v, OpClosureCall, 2)
204 case OpInterLECall:
205 rewriteCall(v, OpInterCall, 1)
206 }
207 }
208
209
210 for _, b := range exitBlocks {
211 v := b.Controls[0]
212 x.rewriteFuncResults(v, b, f.OwnAux)
213 b.SetControl(v)
214 }
215
216 }
217
218 func (x *expandState) rewriteFuncResults(v *Value, b *Block, aux *AuxCall) {
219
220
221
222
223
224 m0 := v.MemoryArg()
225 mem := m0
226
227 allResults := []*Value{}
228 var oldArgs []*Value
229 argsWithoutMem := v.Args[:len(v.Args)-1]
230
231 for j, a := range argsWithoutMem {
232 oldArgs = append(oldArgs, a)
233 i := int64(j)
234 auxType := aux.TypeOfResult(i)
235 auxBase := b.NewValue2A(v.Pos, OpLocalAddr, types.NewPtr(auxType), aux.NameOfResult(i), x.sp, mem)
236 auxOffset := int64(0)
237 aRegs := aux.RegsOfResult(int64(j))
238 if a.Op == OpDereference {
239 a.Op = OpLoad
240 }
241 var rc registerCursor
242 var result *[]*Value
243 if len(aRegs) > 0 {
244 result = &allResults
245 } else {
246 if a.Op == OpLoad && a.Args[0].Op == OpLocalAddr && a.Args[0].Aux == aux.NameOfResult(i) {
247 continue
248 }
249 }
250 rc.init(aRegs, aux.abiInfo, result, auxBase, auxOffset)
251 mem = x.decomposeAsNecessary(v.Pos, b, a, mem, rc)
252 }
253 v.resetArgs()
254 v.AddArgs(allResults...)
255 v.AddArg(mem)
256 for _, a := range oldArgs {
257 if a.Uses == 0 {
258 if x.debug > 1 {
259 x.Printf("...marking %v unused\n", a.LongString())
260 }
261 x.invalidateRecursively(a)
262 }
263 }
264 v.Type = types.NewResults(append(abi.RegisterTypes(aux.abiInfo.OutParams()), types.TypeMem))
265 return
266 }
267
268 func (x *expandState) rewriteCallArgs(v *Value, firstArg int) {
269 if x.debug > 1 {
270 x.indent(3)
271 defer x.indent(-3)
272 x.Printf("rewriteCallArgs(%s; %d)\n", v.LongString(), firstArg)
273 }
274
275 aux := v.Aux.(*AuxCall)
276 m0 := v.MemoryArg()
277 mem := m0
278 allResults := []*Value{}
279 oldArgs := []*Value{}
280 argsWithoutMem := v.Args[firstArg : len(v.Args)-1]
281
282 sp := x.sp
283 if v.Op == OpTailLECall {
284
285
286 sp = v.Block.NewValue1(src.NoXPos, OpGetCallerSP, x.typs.Uintptr, mem)
287 }
288
289 for i, a := range argsWithoutMem {
290 oldArgs = append(oldArgs, a)
291 auxI := int64(i)
292 aRegs := aux.RegsOfArg(auxI)
293 aType := aux.TypeOfArg(auxI)
294
295 if a.Op == OpDereference {
296 a.Op = OpLoad
297 }
298 var rc registerCursor
299 var result *[]*Value
300 var aOffset int64
301 if len(aRegs) > 0 {
302 result = &allResults
303 } else {
304 aOffset = aux.OffsetOfArg(auxI)
305 }
306 if v.Op == OpTailLECall && a.Op == OpArg && a.AuxInt == 0 {
307
308
309 n := a.Aux.(*ir.Name)
310 if n.Class == ir.PPARAM && n.FrameOffset()+x.f.Config.ctxt.Arch.FixedFrameSize == aOffset {
311 continue
312 }
313 }
314 if x.debug > 1 {
315 x.Printf("...storeArg %s, %v, %d\n", a.LongString(), aType, aOffset)
316 }
317
318 rc.init(aRegs, aux.abiInfo, result, sp, aOffset)
319 mem = x.decomposeAsNecessary(v.Pos, v.Block, a, mem, rc)
320 }
321 var preArgStore [2]*Value
322 preArgs := append(preArgStore[:0], v.Args[0:firstArg]...)
323 v.resetArgs()
324 v.AddArgs(preArgs...)
325 v.AddArgs(allResults...)
326 v.AddArg(mem)
327 for _, a := range oldArgs {
328 if a.Uses == 0 {
329 x.invalidateRecursively(a)
330 }
331 }
332
333 return
334 }
335
336 func (x *expandState) decomposePair(pos src.XPos, b *Block, a, mem *Value, t0, t1 *types.Type, o0, o1 Op, rc *registerCursor) *Value {
337 e := b.NewValue1(pos, o0, t0, a)
338 pos = pos.WithNotStmt()
339 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
340 e = b.NewValue1(pos, o1, t1, a)
341 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t1))
342 return mem
343 }
344
345 func (x *expandState) decomposeOne(pos src.XPos, b *Block, a, mem *Value, t0 *types.Type, o0 Op, rc *registerCursor) *Value {
346 e := b.NewValue1(pos, o0, t0, a)
347 pos = pos.WithNotStmt()
348 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(t0))
349 return mem
350 }
351
352
353
354
355
356
357
358
359
360 func (x *expandState) decomposeAsNecessary(pos src.XPos, b *Block, a, m0 *Value, rc registerCursor) *Value {
361 if x.debug > 1 {
362 x.indent(3)
363 defer x.indent(-3)
364 }
365 at := a.Type
366 if at.Size() == 0 {
367 return m0
368 }
369 if a.Op == OpDereference {
370 a.Op = OpLoad
371 }
372
373 if !rc.hasRegs() && !CanSSA(at) {
374 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
375 if x.debug > 1 {
376 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
377 }
378 if a.Op == OpLoad {
379 m0 = b.NewValue3A(pos, OpMove, types.TypeMem, at, dst, a.Args[0], m0)
380 m0.AuxInt = at.Size()
381 return m0
382 } else {
383 panic(fmt.Errorf("Store of not a load"))
384 }
385 }
386
387 mem := m0
388 switch at.Kind() {
389 case types.TARRAY:
390 et := at.Elem()
391 for i := int64(0); i < at.NumElem(); i++ {
392 e := b.NewValue1I(pos, OpArraySelect, et, i, a)
393 pos = pos.WithNotStmt()
394 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
395 }
396 return mem
397
398 case types.TSTRUCT:
399 if at.IsSIMD() {
400 break
401 }
402 for i := 0; i < at.NumFields(); i++ {
403 et := at.Field(i).Type
404 e := b.NewValue1I(pos, OpStructSelect, et, int64(i), a)
405 pos = pos.WithNotStmt()
406 if x.debug > 1 {
407 x.Printf("...recur decompose %s, %v\n", e.LongString(), et)
408 }
409 mem = x.decomposeAsNecessary(pos, b, e, mem, rc.next(et))
410 }
411 return mem
412
413 case types.TSLICE:
414 mem = x.decomposeOne(pos, b, a, mem, at.Elem().PtrTo(), OpSlicePtr, &rc)
415 pos = pos.WithNotStmt()
416 mem = x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceLen, &rc)
417 return x.decomposeOne(pos, b, a, mem, x.typs.Int, OpSliceCap, &rc)
418
419 case types.TSTRING:
420 return x.decomposePair(pos, b, a, mem, x.typs.BytePtr, x.typs.Int, OpStringPtr, OpStringLen, &rc)
421
422 case types.TINTER:
423 mem = x.decomposeOne(pos, b, a, mem, x.typs.Uintptr, OpITab, &rc)
424 pos = pos.WithNotStmt()
425
426 if a.Op == OpIMake {
427 data := a.Args[1]
428 for data.Op == OpStructMake || data.Op == OpArrayMake1 {
429
430
431 for _, a := range data.Args {
432 if a.Type.Size() > 0 {
433 data = a
434 break
435 }
436 }
437 }
438 return x.decomposeAsNecessary(pos, b, data, mem, rc.next(data.Type))
439 }
440 return x.decomposeOne(pos, b, a, mem, x.typs.BytePtr, OpIData, &rc)
441
442 case types.TCOMPLEX64:
443 return x.decomposePair(pos, b, a, mem, x.typs.Float32, x.typs.Float32, OpComplexReal, OpComplexImag, &rc)
444
445 case types.TCOMPLEX128:
446 return x.decomposePair(pos, b, a, mem, x.typs.Float64, x.typs.Float64, OpComplexReal, OpComplexImag, &rc)
447
448 case types.TINT64:
449 if at.Size() > x.regSize {
450 return x.decomposePair(pos, b, a, mem, x.firstType, x.secondType, x.firstOp, x.secondOp, &rc)
451 }
452 case types.TUINT64:
453 if at.Size() > x.regSize {
454 return x.decomposePair(pos, b, a, mem, x.typs.UInt32, x.typs.UInt32, x.firstOp, x.secondOp, &rc)
455 }
456 }
457
458
459
460 if rc.hasRegs() {
461 if x.debug > 1 {
462 x.Printf("...recur addArg %s\n", a.LongString())
463 }
464 rc.addArg(a)
465 } else {
466 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
467 if x.debug > 1 {
468 x.Printf("...recur store %s at %s\n", a.LongString(), dst.LongString())
469 }
470 mem = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, mem)
471 }
472
473 return mem
474 }
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489 func (x *expandState) rewriteSelectOrArg(pos src.XPos, b *Block, container, a, m0 *Value, at *types.Type, rc registerCursor) *Value {
490
491 if at == types.TypeMem {
492 a.copyOf(m0)
493 return a
494 }
495
496 makeOf := func(a *Value, op Op, args []*Value) *Value {
497 if a == nil {
498 a = b.NewValue0(pos, op, at)
499 a.AddArgs(args...)
500 } else {
501 a.resetArgs()
502 a.Aux, a.AuxInt = nil, 0
503 a.Pos, a.Op, a.Type = pos, op, at
504 a.AddArgs(args...)
505 }
506 return a
507 }
508
509 if at.Size() == 0 {
510
511 return makeOf(a, OpEmpty, nil)
512 }
513
514 sk := selKey{from: container, size: 0, offsetOrIndex: rc.storeOffset, typ: at}
515 dupe := x.commonSelectors[sk]
516 if dupe != nil {
517 if a == nil {
518 return dupe
519 }
520 a.copyOf(dupe)
521 return a
522 }
523
524 var argStore [10]*Value
525 args := argStore[:0]
526
527 addArg := func(a0 *Value) {
528 if a0 == nil {
529 as := "<nil>"
530 if a != nil {
531 as = a.LongString()
532 }
533 panic(fmt.Errorf("a0 should not be nil, a=%v, container=%v, at=%v", as, container.LongString(), at))
534 }
535 args = append(args, a0)
536 }
537
538 switch at.Kind() {
539 case types.TARRAY:
540 et := at.Elem()
541 for i := int64(0); i < at.NumElem(); i++ {
542 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
543 addArg(e)
544 }
545 a = makeOf(a, OpArrayMake1, args)
546 x.commonSelectors[sk] = a
547 return a
548
549 case types.TSTRUCT:
550
551 if at.IsSIMD() {
552 break
553 }
554 for i := 0; i < at.NumFields(); i++ {
555 et := at.Field(i).Type
556 e := x.rewriteSelectOrArg(pos, b, container, nil, m0, et, rc.next(et))
557 if e == nil {
558 panic(fmt.Errorf("nil e, et=%v, et.Size()=%d, i=%d", et, et.Size(), i))
559 }
560 addArg(e)
561 pos = pos.WithNotStmt()
562 }
563 if at.NumFields() > MaxStruct && !types.IsDirectIface(at) {
564 panic(fmt.Errorf("Too many fields (%d, %d bytes), container=%s", at.NumFields(), at.Size(), container.LongString()))
565 }
566 a = makeOf(a, OpStructMake, args)
567 x.commonSelectors[sk] = a
568 return a
569
570 case types.TSLICE:
571 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr)))
572 pos = pos.WithNotStmt()
573 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
574 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
575 a = makeOf(a, OpSliceMake, args)
576 x.commonSelectors[sk] = a
577 return a
578
579 case types.TSTRING:
580 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
581 pos = pos.WithNotStmt()
582 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Int, rc.next(x.typs.Int)))
583 a = makeOf(a, OpStringMake, args)
584 x.commonSelectors[sk] = a
585 return a
586
587 case types.TINTER:
588 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr)))
589 pos = pos.WithNotStmt()
590 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr)))
591 a = makeOf(a, OpIMake, args)
592 x.commonSelectors[sk] = a
593 return a
594
595 case types.TCOMPLEX64:
596 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
597 pos = pos.WithNotStmt()
598 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float32, rc.next(x.typs.Float32)))
599 a = makeOf(a, OpComplexMake, args)
600 x.commonSelectors[sk] = a
601 return a
602
603 case types.TCOMPLEX128:
604 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
605 pos = pos.WithNotStmt()
606 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.Float64, rc.next(x.typs.Float64)))
607 a = makeOf(a, OpComplexMake, args)
608 x.commonSelectors[sk] = a
609 return a
610
611 case types.TINT64:
612 if at.Size() > x.regSize {
613 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.firstType, rc.next(x.firstType)))
614 pos = pos.WithNotStmt()
615 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.secondType, rc.next(x.secondType)))
616 if !x.f.Config.BigEndian {
617
618 args[0], args[1] = args[1], args[0]
619 }
620 a = makeOf(a, OpInt64Make, args)
621 x.commonSelectors[sk] = a
622 return a
623 }
624 case types.TUINT64:
625 if at.Size() > x.regSize {
626 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
627 pos = pos.WithNotStmt()
628 addArg(x.rewriteSelectOrArg(pos, b, container, nil, m0, x.typs.UInt32, rc.next(x.typs.UInt32)))
629 if !x.f.Config.BigEndian {
630
631 args[0], args[1] = args[1], args[0]
632 }
633 a = makeOf(a, OpInt64Make, args)
634 x.commonSelectors[sk] = a
635 return a
636 }
637 }
638
639
640
641
642
643 if container.Op == OpArg {
644 if rc.hasRegs() {
645 op, i := rc.ArgOpAndRegisterFor()
646 name := container.Aux.(*ir.Name)
647 a = makeOf(a, op, nil)
648 a.AuxInt = i
649 a.Aux = &AuxNameOffset{name, rc.storeOffset}
650 } else {
651 key := selKey{container, rc.storeOffset, at.Size(), at}
652 w := x.commonArgs[key]
653 if w != nil && w.Uses != 0 {
654 if a == nil {
655 a = w
656 } else {
657 a.copyOf(w)
658 }
659 } else {
660 if a == nil {
661 aux := container.Aux
662 auxInt := container.AuxInt + rc.storeOffset
663 a = container.Block.NewValue0IA(container.Pos, OpArg, at, auxInt, aux)
664 } else {
665
666 }
667 x.commonArgs[key] = a
668 }
669 }
670 } else if container.Op == OpSelectN {
671 call := container.Args[0]
672 aux := call.Aux.(*AuxCall)
673 which := container.AuxInt
674
675 if at == types.TypeMem {
676 if a != m0 || a != x.memForCall[call.ID] {
677 panic(fmt.Errorf("Memories %s, %s, and %s should all be equal after %s", a.LongString(), m0.LongString(), x.memForCall[call.ID], call.LongString()))
678 }
679 } else if rc.hasRegs() {
680 firstReg := uint32(0)
681 for i := 0; i < int(which); i++ {
682 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
683 }
684 reg := int64(rc.nextSlice + Abi1RO(firstReg))
685 a = makeOf(a, OpSelectN, []*Value{call})
686 a.AuxInt = reg
687 } else {
688 off := x.offsetFrom(x.f.Entry, x.sp, rc.storeOffset+aux.OffsetOfResult(which), types.NewPtr(at))
689 a = makeOf(a, OpLoad, []*Value{off, m0})
690 }
691
692 } else {
693 panic(fmt.Errorf("Expected container OpArg or OpSelectN, saw %v instead", container.LongString()))
694 }
695
696 x.commonSelectors[sk] = a
697 return a
698 }
699
700
701
702
703
704 func (x *expandState) rewriteWideSelectToStores(pos src.XPos, b *Block, container, m0 *Value, at *types.Type, rc registerCursor) *Value {
705
706 if at.Size() == 0 {
707 return m0
708 }
709
710 switch at.Kind() {
711 case types.TARRAY:
712 et := at.Elem()
713 for i := int64(0); i < at.NumElem(); i++ {
714 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
715 }
716 return m0
717
718 case types.TSTRUCT:
719
720 if at.IsSIMD() {
721 break
722 }
723 for i := 0; i < at.NumFields(); i++ {
724 et := at.Field(i).Type
725 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, et, rc.next(et))
726 pos = pos.WithNotStmt()
727 }
728 return m0
729
730 case types.TSLICE:
731 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, at.Elem().PtrTo(), rc.next(x.typs.BytePtr))
732 pos = pos.WithNotStmt()
733 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
734 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
735 return m0
736
737 case types.TSTRING:
738 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
739 pos = pos.WithNotStmt()
740 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Int, rc.next(x.typs.Int))
741 return m0
742
743 case types.TINTER:
744 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Uintptr, rc.next(x.typs.Uintptr))
745 pos = pos.WithNotStmt()
746 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.BytePtr, rc.next(x.typs.BytePtr))
747 return m0
748
749 case types.TCOMPLEX64:
750 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
751 pos = pos.WithNotStmt()
752 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float32, rc.next(x.typs.Float32))
753 return m0
754
755 case types.TCOMPLEX128:
756 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
757 pos = pos.WithNotStmt()
758 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.Float64, rc.next(x.typs.Float64))
759 return m0
760
761 case types.TINT64:
762 if at.Size() > x.regSize {
763 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.firstType, rc.next(x.firstType))
764 pos = pos.WithNotStmt()
765 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.secondType, rc.next(x.secondType))
766 return m0
767 }
768 case types.TUINT64:
769 if at.Size() > x.regSize {
770 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
771 pos = pos.WithNotStmt()
772 m0 = x.rewriteWideSelectToStores(pos, b, container, m0, x.typs.UInt32, rc.next(x.typs.UInt32))
773 return m0
774 }
775 }
776
777
778 if container.Op == OpSelectN {
779 call := container.Args[0]
780 aux := call.Aux.(*AuxCall)
781 which := container.AuxInt
782
783 if rc.hasRegs() {
784 firstReg := uint32(0)
785 for i := 0; i < int(which); i++ {
786 firstReg += uint32(len(aux.abiInfo.OutParam(i).Registers))
787 }
788 reg := int64(rc.nextSlice + Abi1RO(firstReg))
789 a := b.NewValue1I(pos, OpSelectN, at, reg, call)
790 dst := x.offsetFrom(b, rc.storeDest, rc.storeOffset, types.NewPtr(at))
791 m0 = b.NewValue3A(pos, OpStore, types.TypeMem, at, dst, a, m0)
792 } else {
793 panic(fmt.Errorf("Expected rc to have registers"))
794 }
795 } else {
796 panic(fmt.Errorf("Expected container OpSelectN, saw %v instead", container.LongString()))
797 }
798 return m0
799 }
800
801 func isBlockMultiValueExit(b *Block) bool {
802 return (b.Kind == BlockRet || b.Kind == BlockRetJmp) && b.Controls[0] != nil && b.Controls[0].Op == OpMakeResult
803 }
804
805 type Abi1RO uint8
806
807
808 type registerCursor struct {
809 storeDest *Value
810 storeOffset int64
811 regs []abi.RegIndex
812 nextSlice Abi1RO
813 config *abi.ABIConfig
814 regValues *[]*Value
815 }
816
817 func (c *registerCursor) String() string {
818 dest := "<none>"
819 if c.storeDest != nil {
820 dest = fmt.Sprintf("%s+%d", c.storeDest.String(), c.storeOffset)
821 }
822 regs := "<none>"
823 if c.regValues != nil {
824 regs = ""
825 for i, x := range *c.regValues {
826 if i > 0 {
827 regs = regs + "; "
828 }
829 regs = regs + x.LongString()
830 }
831 }
832
833
834 return fmt.Sprintf("RCSR{storeDest=%v, regsLen=%d, nextSlice=%d, regValues=[%s]}", dest, len(c.regs), c.nextSlice, regs)
835 }
836
837
838
839 func (c *registerCursor) next(t *types.Type) registerCursor {
840 c.storeOffset = types.RoundUp(c.storeOffset, t.Alignment())
841 rc := *c
842 c.storeOffset = types.RoundUp(c.storeOffset+t.Size(), t.Alignment())
843 if int(c.nextSlice) < len(c.regs) {
844 w := c.config.NumParamRegs(t)
845 c.nextSlice += Abi1RO(w)
846 }
847 return rc
848 }
849
850
851 func (c *registerCursor) plus(regWidth Abi1RO) registerCursor {
852 rc := *c
853 rc.nextSlice += regWidth
854 return rc
855 }
856
857 func (c *registerCursor) init(regs []abi.RegIndex, info *abi.ABIParamResultInfo, result *[]*Value, storeDest *Value, storeOffset int64) {
858 c.regs = regs
859 c.nextSlice = 0
860 c.storeOffset = storeOffset
861 c.storeDest = storeDest
862 c.config = info.Config()
863 c.regValues = result
864 }
865
866 func (c *registerCursor) addArg(v *Value) {
867 *c.regValues = append(*c.regValues, v)
868 }
869
870 func (c *registerCursor) hasRegs() bool {
871 return len(c.regs) > 0
872 }
873
874 func (c *registerCursor) ArgOpAndRegisterFor() (Op, int64) {
875 r := c.regs[c.nextSlice]
876 return ArgOpAndRegisterFor(r, c.config)
877 }
878
879
880
881 func ArgOpAndRegisterFor(r abi.RegIndex, abiConfig *abi.ABIConfig) (Op, int64) {
882 i := abiConfig.FloatIndexFor(r)
883 if i >= 0 {
884 return OpArgFloatReg, i
885 }
886 return OpArgIntReg, int64(r)
887 }
888
889 type selKey struct {
890 from *Value
891 offsetOrIndex int64
892 size int64
893 typ *types.Type
894 }
895
896 type expandState struct {
897 f *Func
898 debug int
899 regSize int64
900 sp *Value
901 typs *Types
902
903 firstOp Op
904 secondOp Op
905 firstType *types.Type
906 secondType *types.Type
907
908 wideSelects map[*Value]*Value
909 commonSelectors map[selKey]*Value
910 commonArgs map[selKey]*Value
911 memForCall map[ID]*Value
912 indentLevel int
913 }
914
915
916 func (x *expandState) offsetFrom(b *Block, from *Value, offset int64, pt *types.Type) *Value {
917 ft := from.Type
918 if offset == 0 {
919 if ft == pt {
920 return from
921 }
922
923 if (ft.IsPtr() || ft.IsUnsafePtr()) && pt.IsPtr() {
924 return from
925 }
926 }
927
928 for from.Op == OpOffPtr {
929 offset += from.AuxInt
930 from = from.Args[0]
931 }
932 if from == x.sp {
933 return x.f.ConstOffPtrSP(pt, offset, x.sp)
934 }
935 return b.NewValue1I(from.Pos.WithNotStmt(), OpOffPtr, pt, offset, from)
936 }
937
938
939 func (x *expandState) prAssignForArg(v *Value) *abi.ABIParamAssignment {
940 if v.Op != OpArg {
941 panic(fmt.Errorf("Wanted OpArg, instead saw %s", v.LongString()))
942 }
943 return ParamAssignmentForArgName(x.f, v.Aux.(*ir.Name))
944 }
945
946
947 func ParamAssignmentForArgName(f *Func, name *ir.Name) *abi.ABIParamAssignment {
948 abiInfo := f.OwnAux.abiInfo
949 ip := abiInfo.InParams()
950 for i, a := range ip {
951 if a.Name == name {
952 return &ip[i]
953 }
954 }
955 panic(fmt.Errorf("Did not match param %v in prInfo %+v", name, abiInfo.InParams()))
956 }
957
958
959 func (x *expandState) indent(n int) {
960 x.indentLevel += n
961 }
962
963
964 func (x *expandState) Printf(format string, a ...any) (n int, err error) {
965 if x.indentLevel > 0 {
966 fmt.Printf("%[1]*s", x.indentLevel, "")
967 }
968 return fmt.Printf(format, a...)
969 }
970
971 func (x *expandState) invalidateRecursively(a *Value) {
972 var s string
973 if x.debug > 0 {
974 plus := " "
975 if a.Pos.IsStmt() == src.PosIsStmt {
976 plus = " +"
977 }
978 s = a.String() + plus + a.Pos.LineNumber() + " " + a.LongString()
979 if x.debug > 1 {
980 x.Printf("...marking %v unused\n", s)
981 }
982 }
983 lost := a.invalidateRecursively()
984 if x.debug&1 != 0 && lost {
985 x.Printf("Lost statement marker in %s on former %s\n", base.Ctxt.Pkgpath+"."+x.f.Name, s)
986 }
987 }
988
View as plain text