Commit 62ac107a authored by Keith Randall's avatar Keith Randall

cmd/compile: some SSA cleanup

Do some easy TODOs.
Move a bunch of other TODOs into bugs.

Change-Id: Iaba9dad6221a2af11b3cbcc512875f4a85842873
Reviewed-on: https://go-review.googlesource.com/20114
Run-TryBot: Todd Neal <todd@tneal.org>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarTodd Neal <todd@tneal.org>
parent 3afbb690
......@@ -1935,7 +1935,7 @@ func (s *state) expr(n *Node) *ssa.Value {
for !data.Type.IsPtr() {
switch {
case data.Type.IsArray():
data = s.newValue2(ssa.OpArrayIndex, data.Type.Elem(), data, s.constInt(Types[TINT], 0))
data = s.newValue1I(ssa.OpArrayIndex, data.Type.Elem(), 0, data)
case data.Type.IsStruct():
for i := data.Type.NumFields() - 1; i >= 0; i-- {
f := data.Type.FieldType(i)
......
This is a list of things that need to be worked on. It will hopefully
be complete soon.
Coverage
--------
Correctness
-----------
- Debugging info (check & fix as much as we can)
......@@ -14,24 +11,12 @@ Optimizations (better compiled code)
- More strength reduction: multiply -> shift/add combos (Worth doing?)
- Add a value range propagation pass (for bounds elim & bitwidth reduction)
- Make dead store pass inter-block
- redundant CMP in sequences like this:
SUBQ $8, AX
CMP AX, $0
JEQ ...
- If there are a lot of MOVQ $0, ..., then load
0 into a register and use the register as the source instead.
- Allow arrays of length 1 (or longer, with all constant indexes?) to be SSAable.
- Figure out how to make PARAMOUT variables ssa-able.
They need to get spilled automatically at end-of-function somehow.
- If strings are being passed around without being interpreted (ptr
and len fields being accessed) pass them in xmm registers?
Same for interfaces?
- OpArrayIndex should take its index in AuxInt, not a full value.
- remove FLAGS from REP instruction clobbers
- (x86) Combine loads into other ops
Note that this is challenging for ops that generate flags
because flagalloc wants to move those instructions around for
flag regeneration.
- Non-constant rotate detection.
- Do 0 <= x && x < n with one unsigned compare
- nil-check removal in indexed load/store case:
......@@ -44,17 +29,13 @@ Optimizations (better compiled code)
Optimizations (better compiler)
-------------------------------
- Smaller Value.Type (int32 or ptr)? Get rid of types altogether?
- OpStore uses 3 args. Increase the size of Value.argstorage to 3?
- Use a constant cache for OpConstNil, OpConstInterface, OpConstSlice, maybe OpConstString
- Handle signed division overflow and sign extension earlier
- Implement 64 bit const division with high multiply, maybe in the frontend?
- Add bit widths to complex ops
Regalloc
--------
- Make less arch-dependent
- Allow return values to be ssa-able
- Handle 2-address instructions
- Make liveness analysis non-quadratic
......
......@@ -438,7 +438,7 @@ func init() {
argLength: 4,
reg: regInfo{
inputs: []regMask{buildReg("DI"), buildReg("CX"), buildReg("AX")},
clobbers: buildReg("DI CX FLAGS"),
clobbers: buildReg("DI CX"),
},
},
......
......@@ -400,7 +400,7 @@
// indexing operations
// Note: bounds check has already been done
(ArrayIndex (Load ptr mem) idx) && b == v.Args[0].Block -> (Load (PtrIndex <v.Type.PtrTo()> ptr idx) mem)
(ArrayIndex <t> [0] (Load ptr mem)) -> @v.Args[0].Block (Load <t> ptr mem)
(PtrIndex <t> ptr idx) && config.PtrSize == 4 -> (AddPtr ptr (Mul32 <config.fe.TypeInt()> idx (Const32 <config.fe.TypeInt()> [t.Elem().Size()])))
(PtrIndex <t> ptr idx) && config.PtrSize == 8 -> (AddPtr ptr (Mul64 <config.fe.TypeInt()> idx (Const64 <config.fe.TypeInt()> [t.Elem().Size()])))
......
......@@ -335,9 +335,9 @@ var genericOps = []opData{
{name: "GetClosurePtr"}, // get closure pointer from dedicated register
// Indexing operations
{name: "ArrayIndex", argLength: 2}, // arg0=array, arg1=index. Returns a[i]
{name: "PtrIndex", argLength: 2}, // arg0=ptr, arg1=index. Computes ptr+sizeof(*v.type)*index, where index is extended to ptrwidth type
{name: "OffPtr", argLength: 1, aux: "Int64"}, // arg0 + auxint (arg0 and result are pointers)
{name: "ArrayIndex", aux: "Int64", argLength: 1}, // arg0=array, auxint=index. Returns a[i]
{name: "PtrIndex", argLength: 2}, // arg0=ptr, arg1=index. Computes ptr+sizeof(*v.type)*index, where index is extended to ptrwidth type
{name: "OffPtr", argLength: 1, aux: "Int64"}, // arg0 + auxint (arg0 and result are pointers)
// Slices
{name: "SliceMake", argLength: 3}, // arg0=ptr, arg1=len, arg2=cap
......
......@@ -3689,7 +3689,7 @@ var opcodeTable = [...]opInfo{
{1, 2}, // .CX
{2, 1}, // .AX
},
clobbers: 8589934722, // .CX .DI .FLAGS
clobbers: 130, // .CX .DI
},
},
{
......@@ -5110,7 +5110,8 @@ var opcodeTable = [...]opInfo{
},
{
name: "ArrayIndex",
argLen: 2,
auxType: auxInt64,
argLen: 1,
generic: true,
},
{
......
......@@ -1142,25 +1142,25 @@ func rewriteValuegeneric_OpArg(v *Value, config *Config) bool {
func rewriteValuegeneric_OpArrayIndex(v *Value, config *Config) bool {
b := v.Block
_ = b
// match: (ArrayIndex (Load ptr mem) idx)
// cond: b == v.Args[0].Block
// result: (Load (PtrIndex <v.Type.PtrTo()> ptr idx) mem)
// match: (ArrayIndex <t> [0] (Load ptr mem))
// cond:
// result: @v.Args[0].Block (Load <t> ptr mem)
for {
t := v.Type
if v.AuxInt != 0 {
break
}
if v.Args[0].Op != OpLoad {
break
}
ptr := v.Args[0].Args[0]
mem := v.Args[0].Args[1]
idx := v.Args[1]
if !(b == v.Args[0].Block) {
break
}
v.reset(OpLoad)
v0 := b.NewValue0(v.Line, OpPtrIndex, v.Type.PtrTo())
v0.AddArg(ptr)
v0.AddArg(idx)
b = v.Args[0].Block
v0 := b.NewValue0(v.Line, OpLoad, t)
v.reset(OpCopy)
v.AddArg(v0)
v.AddArg(mem)
v0.AddArg(ptr)
v0.AddArg(mem)
return true
}
return false
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment