Commit 19ee2ef9 authored by Daniel Martí's avatar Daniel Martí

cmd/compile: introduce gc.Node.copy method

When making a shallow copy of a node, various methods were used,
including calling nod(OXXX, nil, nil) and then overwriting it, or
"n1 := *n" and then using &n1.

Add a copy method instead, simplifying all of those and making them
consistent.

Passes toolstash -cmp on std cmd.

Change-Id: I3f3fc88bad708edc712bf6d87214cda4ddc43b01
Reviewed-on: https://go-review.googlesource.com/72710
Run-TryBot: Daniel Martí <mvdan@mvdan.cc>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: default avatarMatthew Dempsky <mdempsky@google.com>
parent 321bd8c9
...@@ -223,8 +223,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, reuse canReuseNode) *Node { ...@@ -223,8 +223,7 @@ func convlit1(n *Node, t *types.Type, explicit bool, reuse canReuseNode) *Node {
if n.Op == OLITERAL && !reuse { if n.Op == OLITERAL && !reuse {
// Can't always set n.Type directly on OLITERAL nodes. // Can't always set n.Type directly on OLITERAL nodes.
// See discussion on CL 20813. // See discussion on CL 20813.
nn := *n n = n.copy()
n = &nn
reuse = true reuse = true
} }
...@@ -1333,8 +1332,7 @@ func defaultlitreuse(n *Node, t *types.Type, reuse canReuseNode) *Node { ...@@ -1333,8 +1332,7 @@ func defaultlitreuse(n *Node, t *types.Type, reuse canReuseNode) *Node {
} }
if n.Op == OLITERAL && !reuse { if n.Op == OLITERAL && !reuse {
nn := *n n = n.copy()
n = &nn
reuse = true reuse = true
} }
......
...@@ -466,11 +466,11 @@ func funcargs(nt *Node) { ...@@ -466,11 +466,11 @@ func funcargs(nt *Node) {
// So the two cases must be distinguished. // So the two cases must be distinguished.
// We do not record a pointer to the original node (n->orig). // We do not record a pointer to the original node (n->orig).
// Having multiple names causes too much confusion in later passes. // Having multiple names causes too much confusion in later passes.
nn := *n.Left nn := n.Left.copy()
nn.Orig = &nn nn.Orig = nn
nn.Sym = lookupN("~b", gen) nn.Sym = lookupN("~b", gen)
gen++ gen++
n.Left = &nn n.Left = nn
} }
n.Left.Name.Param.Ntype = n.Right n.Left.Name.Param.Ntype = n.Right
......
...@@ -392,7 +392,7 @@ func inlcopy(n *Node) *Node { ...@@ -392,7 +392,7 @@ func inlcopy(n *Node) *Node {
return n return n
} }
m := *n m := n.copy()
if m.Func != nil { if m.Func != nil {
m.Func.Inl.Set(nil) m.Func.Inl.Set(nil)
} }
...@@ -403,7 +403,7 @@ func inlcopy(n *Node) *Node { ...@@ -403,7 +403,7 @@ func inlcopy(n *Node) *Node {
m.Ninit.Set(inlcopylist(n.Ninit.Slice())) m.Ninit.Set(inlcopylist(n.Ninit.Slice()))
m.Nbody.Set(inlcopylist(n.Nbody.Slice())) m.Nbody.Set(inlcopylist(n.Nbody.Slice()))
return &m return m
} }
// Inlcalls/nodelist/node walks fn's statements and expressions and substitutes any // Inlcalls/nodelist/node walks fn's statements and expressions and substitutes any
...@@ -1192,8 +1192,7 @@ func (subst *inlsubst) node(n *Node) *Node { ...@@ -1192,8 +1192,7 @@ func (subst *inlsubst) node(n *Node) *Node {
return m return m
case OGOTO, OLABEL: case OGOTO, OLABEL:
m := nod(OXXX, nil, nil) m := n.copy()
*m = *n
m.Pos = subst.updatedPos(m.Pos) m.Pos = subst.updatedPos(m.Pos)
m.Ninit.Set(nil) m.Ninit.Set(nil)
p := fmt.Sprintf("%s·%d", n.Left.Sym.Name, inlgen) p := fmt.Sprintf("%s·%d", n.Left.Sym.Name, inlgen)
...@@ -1202,8 +1201,7 @@ func (subst *inlsubst) node(n *Node) *Node { ...@@ -1202,8 +1201,7 @@ func (subst *inlsubst) node(n *Node) *Node {
return m return m
} }
m := nod(OXXX, nil, nil) m := n.copy()
*m = *n
m.Pos = subst.updatedPos(m.Pos) m.Pos = subst.updatedPos(m.Pos)
m.Ninit.Set(nil) m.Ninit.Set(nil)
......
...@@ -109,10 +109,10 @@ func (o *Order) cheapExpr(n *Node) *Node { ...@@ -109,10 +109,10 @@ func (o *Order) cheapExpr(n *Node) *Node {
if l == n.Left { if l == n.Left {
return n return n
} }
a := *n a := n.copy()
a.Orig = &a a.Orig = a
a.Left = l a.Left = l
return typecheck(&a, Erv) return typecheck(a, Erv)
} }
return o.copyExpr(n, n.Type, false) return o.copyExpr(n, n.Type, false)
...@@ -135,20 +135,20 @@ func (o *Order) safeExpr(n *Node) *Node { ...@@ -135,20 +135,20 @@ func (o *Order) safeExpr(n *Node) *Node {
if l == n.Left { if l == n.Left {
return n return n
} }
a := *n a := n.copy()
a.Orig = &a a.Orig = a
a.Left = l a.Left = l
return typecheck(&a, Erv) return typecheck(a, Erv)
case ODOTPTR, OIND: case ODOTPTR, OIND:
l := o.cheapExpr(n.Left) l := o.cheapExpr(n.Left)
if l == n.Left { if l == n.Left {
return n return n
} }
a := *n a := n.copy()
a.Orig = &a a.Orig = a
a.Left = l a.Left = l
return typecheck(&a, Erv) return typecheck(a, Erv)
case OINDEX, OINDEXMAP: case OINDEX, OINDEXMAP:
var l *Node var l *Node
...@@ -161,11 +161,11 @@ func (o *Order) safeExpr(n *Node) *Node { ...@@ -161,11 +161,11 @@ func (o *Order) safeExpr(n *Node) *Node {
if l == n.Left && r == n.Right { if l == n.Left && r == n.Right {
return n return n
} }
a := *n a := n.copy()
a.Orig = &a a.Orig = a
a.Left = l a.Left = l
a.Right = r a.Right = r
return typecheck(&a, Erv) return typecheck(a, Erv)
default: default:
Fatalf("ordersafeexpr %v", n.Op) Fatalf("ordersafeexpr %v", n.Op)
......
...@@ -67,17 +67,17 @@ func instrument(fn *Node) { ...@@ -67,17 +67,17 @@ func instrument(fn *Node) {
// nodpc is the PC of the caller as extracted by // nodpc is the PC of the caller as extracted by
// getcallerpc. We use -widthptr(FP) for x86. // getcallerpc. We use -widthptr(FP) for x86.
// BUG: this will not work on arm. // BUG: this will not work on arm.
nodpc := *nodfp nodpc := nodfp.copy()
nodpc.Type = types.Types[TUINTPTR] nodpc.Type = types.Types[TUINTPTR]
nodpc.Xoffset = int64(-Widthptr) nodpc.Xoffset = int64(-Widthptr)
savedLineno := lineno savedLineno := lineno
lineno = src.NoXPos lineno = src.NoXPos
nd := mkcall("racefuncenter", nil, nil, &nodpc) nd := mkcall("racefuncenter", nil, nil, nodpc)
fn.Func.Enter.Prepend(nd) fn.Func.Enter.Prepend(nd)
nd = mkcall("racefuncexit", nil, nil) nd = mkcall("racefuncexit", nil, nil)
fn.Func.Exit.Append(nd) fn.Func.Exit.Append(nd)
fn.Func.Dcl = append(fn.Func.Dcl, &nodpc) fn.Func.Dcl = append(fn.Func.Dcl, nodpc)
lineno = savedLineno lineno = savedLineno
} }
......
...@@ -328,35 +328,32 @@ func staticcopy(l *Node, r *Node, out *[]*Node) bool { ...@@ -328,35 +328,32 @@ func staticcopy(l *Node, r *Node, out *[]*Node) bool {
// copy slice // copy slice
a := inittemps[r] a := inittemps[r]
n := *l n := l.copy()
n.Xoffset = l.Xoffset + int64(array_array) n.Xoffset = l.Xoffset + int64(array_array)
gdata(&n, nod(OADDR, a, nil), Widthptr) gdata(n, nod(OADDR, a, nil), Widthptr)
n.Xoffset = l.Xoffset + int64(array_nel) n.Xoffset = l.Xoffset + int64(array_nel)
gdata(&n, r.Right, Widthptr) gdata(n, r.Right, Widthptr)
n.Xoffset = l.Xoffset + int64(array_cap) n.Xoffset = l.Xoffset + int64(array_cap)
gdata(&n, r.Right, Widthptr) gdata(n, r.Right, Widthptr)
return true return true
case OARRAYLIT, OSTRUCTLIT: case OARRAYLIT, OSTRUCTLIT:
p := initplans[r] p := initplans[r]
n := *l n := l.copy()
for i := range p.E { for i := range p.E {
e := &p.E[i] e := &p.E[i]
n.Xoffset = l.Xoffset + e.Xoffset n.Xoffset = l.Xoffset + e.Xoffset
n.Type = e.Expr.Type n.Type = e.Expr.Type
if e.Expr.Op == OLITERAL { if e.Expr.Op == OLITERAL {
gdata(&n, e.Expr, int(n.Type.Width)) gdata(n, e.Expr, int(n.Type.Width))
} else { } else {
ll := nod(OXXX, nil, nil) ll := n.copy()
*ll = n
ll.Orig = ll // completely separate copy ll.Orig = ll // completely separate copy
if !staticassign(ll, e.Expr, out) { if !staticassign(ll, e.Expr, out) {
// Requires computation, but we're // Requires computation, but we're
// copying someone else's computation. // copying someone else's computation.
rr := nod(OXXX, nil, nil) rr := orig.copy()
*rr = *orig
rr.Orig = rr // completely separate copy rr.Orig = rr // completely separate copy
rr.Type = ll.Type rr.Type = ll.Type
rr.Xoffset += e.Xoffset rr.Xoffset += e.Xoffset
...@@ -429,13 +426,13 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool { ...@@ -429,13 +426,13 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
ta := types.NewArray(r.Type.Elem(), bound) ta := types.NewArray(r.Type.Elem(), bound)
a := staticname(ta) a := staticname(ta)
inittemps[r] = a inittemps[r] = a
n := *l n := l.copy()
n.Xoffset = l.Xoffset + int64(array_array) n.Xoffset = l.Xoffset + int64(array_array)
gdata(&n, nod(OADDR, a, nil), Widthptr) gdata(n, nod(OADDR, a, nil), Widthptr)
n.Xoffset = l.Xoffset + int64(array_nel) n.Xoffset = l.Xoffset + int64(array_nel)
gdata(&n, r.Right, Widthptr) gdata(n, r.Right, Widthptr)
n.Xoffset = l.Xoffset + int64(array_cap) n.Xoffset = l.Xoffset + int64(array_cap)
gdata(&n, r.Right, Widthptr) gdata(n, r.Right, Widthptr)
// Fall through to init underlying array. // Fall through to init underlying array.
l = a l = a
...@@ -445,17 +442,16 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool { ...@@ -445,17 +442,16 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
initplan(r) initplan(r)
p := initplans[r] p := initplans[r]
n := *l n := l.copy()
for i := range p.E { for i := range p.E {
e := &p.E[i] e := &p.E[i]
n.Xoffset = l.Xoffset + e.Xoffset n.Xoffset = l.Xoffset + e.Xoffset
n.Type = e.Expr.Type n.Type = e.Expr.Type
if e.Expr.Op == OLITERAL { if e.Expr.Op == OLITERAL {
gdata(&n, e.Expr, int(n.Type.Width)) gdata(n, e.Expr, int(n.Type.Width))
} else { } else {
setlineno(e.Expr) setlineno(e.Expr)
a := nod(OXXX, nil, nil) a := n.copy()
*a = n
a.Orig = a // completely separate copy a.Orig = a // completely separate copy
if !staticassign(a, e.Expr, out) { if !staticassign(a, e.Expr, out) {
*out = append(*out, nod(OAS, a, e.Expr)) *out = append(*out, nod(OAS, a, e.Expr))
...@@ -522,11 +518,10 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool { ...@@ -522,11 +518,10 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
// Copy val directly into n. // Copy val directly into n.
n.Type = val.Type n.Type = val.Type
setlineno(val) setlineno(val)
a := nod(OXXX, nil, nil) a := n
*a = n a.Orig = &a
a.Orig = a if !staticassign(&a, val, out) {
if !staticassign(a, val, out) { *out = append(*out, nod(OAS, &a, val))
*out = append(*out, nod(OAS, a, val))
} }
} else { } else {
// Construct temp to hold val, write pointer to temp into n. // Construct temp to hold val, write pointer to temp into n.
......
...@@ -364,6 +364,11 @@ func nodSym(op Op, left *Node, sym *types.Sym) *Node { ...@@ -364,6 +364,11 @@ func nodSym(op Op, left *Node, sym *types.Sym) *Node {
return n return n
} }
func (n *Node) copy() *Node {
n2 := *n
return &n2
}
// methcmp sorts methods by name with exported methods first, // methcmp sorts methods by name with exported methods first,
// and then non-exported methods by their package path. // and then non-exported methods by their package path.
type methcmp []*types.Field type methcmp []*types.Field
...@@ -439,8 +444,8 @@ func treecopy(n *Node, pos src.XPos) *Node { ...@@ -439,8 +444,8 @@ func treecopy(n *Node, pos src.XPos) *Node {
switch n.Op { switch n.Op {
default: default:
m := *n m := n.copy()
m.Orig = &m m.Orig = m
m.Left = treecopy(n.Left, pos) m.Left = treecopy(n.Left, pos)
m.Right = treecopy(n.Right, pos) m.Right = treecopy(n.Right, pos)
m.List.Set(listtreecopy(n.List.Slice(), pos)) m.List.Set(listtreecopy(n.List.Slice(), pos))
...@@ -451,7 +456,7 @@ func treecopy(n *Node, pos src.XPos) *Node { ...@@ -451,7 +456,7 @@ func treecopy(n *Node, pos src.XPos) *Node {
Dump("treecopy", n) Dump("treecopy", n)
Fatalf("treecopy Name") Fatalf("treecopy Name")
} }
return &m return m
case OPACK: case OPACK:
// OPACK nodes are never valid in const value declarations, // OPACK nodes are never valid in const value declarations,
...@@ -1252,8 +1257,7 @@ func safeexpr(n *Node, init *Nodes) *Node { ...@@ -1252,8 +1257,7 @@ func safeexpr(n *Node, init *Nodes) *Node {
if l == n.Left { if l == n.Left {
return n return n
} }
r := nod(OXXX, nil, nil) r := n.copy()
*r = *n
r.Left = l r.Left = l
r = typecheck(r, Erv) r = typecheck(r, Erv)
r = walkexpr(r, init) r = walkexpr(r, init)
...@@ -1264,8 +1268,7 @@ func safeexpr(n *Node, init *Nodes) *Node { ...@@ -1264,8 +1268,7 @@ func safeexpr(n *Node, init *Nodes) *Node {
if l == n.Left { if l == n.Left {
return n return n
} }
a := nod(OXXX, nil, nil) a := n.copy()
*a = *n
a.Left = l a.Left = l
a = walkexpr(a, init) a = walkexpr(a, init)
return a return a
...@@ -1276,8 +1279,7 @@ func safeexpr(n *Node, init *Nodes) *Node { ...@@ -1276,8 +1279,7 @@ func safeexpr(n *Node, init *Nodes) *Node {
if l == n.Left && r == n.Right { if l == n.Left && r == n.Right {
return n return n
} }
a := nod(OXXX, nil, nil) a := n.copy()
*a = *n
a.Left = l a.Left = l
a.Right = r a.Right = r
a = walkexpr(a, init) a = walkexpr(a, init)
......
...@@ -2873,9 +2873,7 @@ func typecheckcomplit(n *Node) *Node { ...@@ -2873,9 +2873,7 @@ func typecheckcomplit(n *Node) *Node {
} }
// Save original node (including n.Right) // Save original node (including n.Right)
norig := nod(n.Op, nil, nil) norig := n.copy()
*norig = *n
setlineno(n.Right) setlineno(n.Right)
n.Right = typecheck(n.Right, Etype|Ecomplit) n.Right = typecheck(n.Right, Etype|Ecomplit)
......
...@@ -3894,7 +3894,7 @@ func wrapCall(n *Node, init *Nodes) *Node { ...@@ -3894,7 +3894,7 @@ func wrapCall(n *Node, init *Nodes) *Node {
// The result of substArgTypes MUST be assigned back to old, e.g. // The result of substArgTypes MUST be assigned back to old, e.g.
// n.Left = substArgTypes(n.Left, t1, t2) // n.Left = substArgTypes(n.Left, t1, t2)
func substArgTypes(old *Node, types_ ...*types.Type) *Node { func substArgTypes(old *Node, types_ ...*types.Type) *Node {
n := *old // make shallow copy n := old.copy() // make shallow copy
for _, t := range types_ { for _, t := range types_ {
dowidth(t) dowidth(t)
...@@ -3903,5 +3903,5 @@ func substArgTypes(old *Node, types_ ...*types.Type) *Node { ...@@ -3903,5 +3903,5 @@ func substArgTypes(old *Node, types_ ...*types.Type) *Node {
if len(types_) > 0 { if len(types_) > 0 {
Fatalf("substArgTypes: too many argument types") Fatalf("substArgTypes: too many argument types")
} }
return &n return n
} }
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment