Commit 5cab0169 authored by Josh Bleecher Snyder's avatar Josh Bleecher Snyder

cmd/compile: rename Node.Int to Node.Int64

gorename -from '"cmd/compile/internal/gc".Node.Int' -to 'Int64'

Change-Id: I2fe3bf9a26ae6b0600d990d0c981e4b8b53020a4
Reviewed-on: https://go-review.googlesource.com/21426Reviewed-by: default avatarBrad Fitzpatrick <bradfitz@golang.org>
parent 00e5a68c
...@@ -206,9 +206,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { ...@@ -206,9 +206,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) {
check := false check := false
if t.IsSigned() { if t.IsSigned() {
check = true check = true
if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<<uint64(t.Width*8-1)) { if gc.Isconst(nl, gc.CTINT) && nl.Int64() != -(1<<uint64(t.Width*8-1)) {
check = false check = false
} else if gc.Isconst(nr, gc.CTINT) && nr.Int() != -1 { } else if gc.Isconst(nr, gc.CTINT) && nr.Int64() != -1 {
check = false check = false
} }
} }
...@@ -397,7 +397,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -397,7 +397,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gc.Cgen(nl, &n1) gc.Cgen(nl, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc >= uint64(nl.Type.Width*8) { if sc >= uint64(nl.Type.Width*8) {
// large shift gets 2 shifts by width-1 // large shift gets 2 shifts by width-1
var n3 gc.Node var n3 gc.Node
......
...@@ -213,7 +213,7 @@ func gmove(f *gc.Node, t *gc.Node) { ...@@ -213,7 +213,7 @@ func gmove(f *gc.Node, t *gc.Node) {
// 64-bit immediates are really 32-bit sign-extended // 64-bit immediates are really 32-bit sign-extended
// unless moving into a register. // unless moving into a register.
if gc.Isint[tt] { if gc.Isint[tt] {
if i := con.Int(); int64(int32(i)) != i { if i := con.Int64(); int64(int32(i)) != i {
goto hard goto hard
} }
} }
...@@ -1310,7 +1310,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool { ...@@ -1310,7 +1310,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool {
if !gc.Isconst(n, gc.CTINT) { if !gc.Isconst(n, gc.CTINT) {
break break
} }
v := n.Int() v := n.Int64()
if v >= 32000 || v <= -32000 { if v >= 32000 || v <= -32000 {
break break
} }
......
...@@ -237,7 +237,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -237,7 +237,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
// shld hi:lo, c // shld hi:lo, c
// shld lo:t, c // shld lo:t, c
case gc.OLROT: case gc.OLROT:
v := uint64(r.Int()) v := uint64(r.Int64())
var bl gc.Node var bl gc.Node
gc.Regalloc(&bl, lo1.Type, nil) gc.Regalloc(&bl, lo1.Type, nil)
...@@ -291,7 +291,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -291,7 +291,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
var p4 *obj.Prog var p4 *obj.Prog
var p5 *obj.Prog var p5 *obj.Prog
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v := uint64(r.Int()) v := uint64(r.Int64())
if v >= 64 { if v >= 64 {
// TODO(kaib): replace with gins(AMOVW, nodintconst(0), &al) // TODO(kaib): replace with gins(AMOVW, nodintconst(0), &al)
// here and below (verify it optimizes to EOR) // here and below (verify it optimizes to EOR)
...@@ -452,7 +452,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -452,7 +452,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
var creg gc.Node var creg gc.Node
var p3 *obj.Prog var p3 *obj.Prog
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v := uint64(r.Int()) v := uint64(r.Int64())
if v >= 64 { if v >= 64 {
if bh.Type.Etype == gc.TINT32 { if bh.Type.Etype == gc.TINT32 {
// MOVW bh->31, al // MOVW bh->31, al
......
...@@ -178,7 +178,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -178,7 +178,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
w := int(nl.Type.Width * 8) w := int(nl.Type.Width * 8)
if op == gc.OLROT { if op == gc.OLROT {
v := nr.Int() v := nr.Int64()
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
if w == 32 { if w == 32 {
...@@ -205,7 +205,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -205,7 +205,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gc.Cgen(nl, &n1) gc.Cgen(nl, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc == 0 { if sc == 0 {
} else // nothing to do } else // nothing to do
if sc >= uint64(nl.Type.Width*8) { if sc >= uint64(nl.Type.Width*8) {
...@@ -475,7 +475,7 @@ func ginscon(as obj.As, c int64, n *gc.Node) { ...@@ -475,7 +475,7 @@ func ginscon(as obj.As, c int64, n *gc.Node) {
} }
func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog {
if t.IsInteger() && n1.Op == gc.OLITERAL && n1.Int() == 0 && n2.Op != gc.OLITERAL { if t.IsInteger() && n1.Op == gc.OLITERAL && n1.Int64() == 0 && n2.Op != gc.OLITERAL {
op = gc.Brrev(op) op = gc.Brrev(op)
n1, n2 = n2, n1 n1, n2 = n2, n1
} }
...@@ -484,7 +484,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { ...@@ -484,7 +484,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog {
gc.Regalloc(&g1, n1.Type, &r1) gc.Regalloc(&g1, n1.Type, &r1)
gc.Cgen(n1, &g1) gc.Cgen(n1, &g1)
gmove(&g1, &r1) gmove(&g1, &r1)
if t.IsInteger() && n2.Op == gc.OLITERAL && n2.Int() == 0 { if t.IsInteger() && n2.Op == gc.OLITERAL && n2.Int64() == 0 {
gins(arm.ACMP, &r1, n2) gins(arm.ACMP, &r1, n2)
} else { } else {
gc.Regalloc(&r2, t, n2) gc.Regalloc(&r2, t, n2)
......
...@@ -112,7 +112,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) { ...@@ -112,7 +112,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) {
case gc.OLITERAL: case gc.OLITERAL:
var n1 gc.Node var n1 gc.Node
n.Convconst(&n1, n.Type) n.Convconst(&n1, n.Type)
i := n1.Int() i := n1.Int64()
gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i))) gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i)))
i >>= 32 i >>= 32
if n.Type.Etype == gc.TINT64 { if n.Type.Etype == gc.TINT64 {
...@@ -1143,7 +1143,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool { ...@@ -1143,7 +1143,7 @@ func sudoaddable(as obj.As, n *gc.Node, a *obj.Addr) bool {
if !gc.Isconst(n, gc.CTINT) { if !gc.Isconst(n, gc.CTINT) {
break break
} }
v := n.Int() v := n.Int64()
if v >= 32000 || v <= -32000 { if v >= 32000 || v <= -32000 {
break break
} }
......
...@@ -151,9 +151,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { ...@@ -151,9 +151,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) {
check := false check := false
if t.IsSigned() { if t.IsSigned() {
check = true check = true
if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<<uint64(t.Width*8-1)) { if gc.Isconst(nl, gc.CTINT) && nl.Int64() != -(1<<uint64(t.Width*8-1)) {
check = false check = false
} else if gc.Isconst(nr, gc.CTINT) && nr.Int() != -1 { } else if gc.Isconst(nr, gc.CTINT) && nr.Int64() != -1 {
check = false check = false
} }
} }
...@@ -314,7 +314,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -314,7 +314,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gc.Cgen(nl, &n1) gc.Cgen(nl, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc >= uint64(nl.Type.Width)*8 { if sc >= uint64(nl.Type.Width)*8 {
// large shift gets 2 shifts by width-1 // large shift gets 2 shifts by width-1
var n3 gc.Node var n3 gc.Node
......
...@@ -115,7 +115,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { ...@@ -115,7 +115,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog {
gc.Cgen(n1, &g1) gc.Cgen(n1, &g1)
gmove(&g1, &r1) gmove(&g1, &r1)
if t.IsInteger() && gc.Isconst(n2, gc.CTINT) { if t.IsInteger() && gc.Isconst(n2, gc.CTINT) {
ginscon2(optoas(gc.OCMP, t), &r1, n2.Int()) ginscon2(optoas(gc.OCMP, t), &r1, n2.Int64())
} else { } else {
gc.Regalloc(&r2, t, n2) gc.Regalloc(&r2, t, n2)
gc.Regalloc(&g2, n1.Type, &r2) gc.Regalloc(&g2, n1.Type, &r2)
......
...@@ -1032,7 +1032,7 @@ func Agenr(n *Node, a *Node, res *Node) { ...@@ -1032,7 +1032,7 @@ func Agenr(n *Node, a *Node, res *Node) {
if Isconst(nl, CTSTR) { if Isconst(nl, CTSTR) {
Fatalf("constant string constant index") Fatalf("constant string constant index")
} }
v := uint64(nr.Int()) v := uint64(nr.Int64())
var n2 Node var n2 Node
if nl.Type.IsSlice() || nl.Type.IsString() { if nl.Type.IsSlice() || nl.Type.IsString() {
if Debug['B'] == 0 && !n.Bounded { if Debug['B'] == 0 && !n.Bounded {
...@@ -1184,7 +1184,7 @@ func Agenr(n *Node, a *Node, res *Node) { ...@@ -1184,7 +1184,7 @@ func Agenr(n *Node, a *Node, res *Node) {
if Isconst(nl, CTSTR) { if Isconst(nl, CTSTR) {
Fatalf("constant string constant index") // front end should handle Fatalf("constant string constant index") // front end should handle
} }
v := uint64(nr.Int()) v := uint64(nr.Int64())
if nl.Type.IsSlice() || nl.Type.IsString() { if nl.Type.IsSlice() || nl.Type.IsString() {
if Debug['B'] == 0 && !n.Bounded { if Debug['B'] == 0 && !n.Bounded {
nlen := n3 nlen := n3
...@@ -1374,7 +1374,7 @@ func Agenr(n *Node, a *Node, res *Node) { ...@@ -1374,7 +1374,7 @@ func Agenr(n *Node, a *Node, res *Node) {
if Isconst(nl, CTSTR) { if Isconst(nl, CTSTR) {
Fatalf("constant string constant index") // front end should handle Fatalf("constant string constant index") // front end should handle
} }
v := uint64(nr.Int()) v := uint64(nr.Int64())
if nl.Type.IsSlice() || nl.Type.IsString() { if nl.Type.IsSlice() || nl.Type.IsString() {
if Debug['B'] == 0 && !n.Bounded { if Debug['B'] == 0 && !n.Bounded {
p1 := Thearch.Ginscmp(OGT, Types[Simtype[TUINT]], &nlen, Nodintconst(int64(v)), +1) p1 := Thearch.Ginscmp(OGT, Types[Simtype[TUINT]], &nlen, Nodintconst(int64(v)), +1)
...@@ -1708,7 +1708,7 @@ func Igen(n *Node, a *Node, res *Node) { ...@@ -1708,7 +1708,7 @@ func Igen(n *Node, a *Node, res *Node) {
// Compute &a[i] as &a + i*width. // Compute &a[i] as &a + i*width.
a.Type = n.Type a.Type = n.Type
a.Xoffset += n.Right.Int() * n.Type.Width a.Xoffset += n.Right.Int64() * n.Type.Width
Fixlargeoffset(a) Fixlargeoffset(a)
return return
} }
...@@ -2214,7 +2214,7 @@ func stkof(n *Node) int64 { ...@@ -2214,7 +2214,7 @@ func stkof(n *Node) int64 {
return off return off
} }
if Isconst(n.Right, CTINT) { if Isconst(n.Right, CTINT) {
return off + t.Elem().Width*n.Right.Int() return off + t.Elem().Width*n.Right.Int64()
} }
return +1000 // on stack but not sure exactly where return +1000 // on stack but not sure exactly where
...@@ -2645,7 +2645,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) { ...@@ -2645,7 +2645,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) {
case TUINT64: case TUINT64:
var m Magic var m Magic
m.W = w m.W = w
m.Ud = uint64(nr.Int()) m.Ud = uint64(nr.Int64())
Umagic(&m) Umagic(&m)
if m.Bad != 0 { if m.Bad != 0 {
break break
...@@ -2683,7 +2683,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) { ...@@ -2683,7 +2683,7 @@ func cgen_div(op Op, nl *Node, nr *Node, res *Node) {
case TINT64: case TINT64:
var m Magic var m Magic
m.W = w m.W = w
m.Sd = nr.Int() m.Sd = nr.Int64()
Smagic(&m) Smagic(&m)
if m.Bad != 0 { if m.Bad != 0 {
break break
...@@ -3243,7 +3243,7 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3243,7 +3243,7 @@ func cgen_slice(n, res *Node, wb bool) {
Fatalf("missed slice out of bounds check") Fatalf("missed slice out of bounds check")
} }
var tmp Node var tmp Node
Nodconst(&tmp, indexRegType, n1.Int()) Nodconst(&tmp, indexRegType, n1.Int64())
n1 = &tmp n1 = &tmp
} }
p := Thearch.Ginscmp(OGT, indexRegType, n1, n2, -1) p := Thearch.Ginscmp(OGT, indexRegType, n1, n2, -1)
...@@ -3327,9 +3327,9 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3327,9 +3327,9 @@ func cgen_slice(n, res *Node, wb bool) {
switch j.Op { switch j.Op {
case OLITERAL: case OLITERAL:
if Isconst(&i, CTINT) { if Isconst(&i, CTINT) {
Nodconst(&j, indexRegType, j.Int()-i.Int()) Nodconst(&j, indexRegType, j.Int64()-i.Int64())
if Debug_slice > 0 { if Debug_slice > 0 {
Warn("slice: result len == %d", j.Int()) Warn("slice: result len == %d", j.Int64())
} }
break break
} }
...@@ -3344,7 +3344,7 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3344,7 +3344,7 @@ func cgen_slice(n, res *Node, wb bool) {
fallthrough fallthrough
case OREGISTER: case OREGISTER:
if i.Op == OLITERAL { if i.Op == OLITERAL {
v := i.Int() v := i.Int64()
if v != 0 { if v != 0 {
ginscon(Thearch.Optoas(OSUB, indexRegType), v, &j) ginscon(Thearch.Optoas(OSUB, indexRegType), v, &j)
} }
...@@ -3387,9 +3387,9 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3387,9 +3387,9 @@ func cgen_slice(n, res *Node, wb bool) {
switch k.Op { switch k.Op {
case OLITERAL: case OLITERAL:
if Isconst(&i, CTINT) { if Isconst(&i, CTINT) {
Nodconst(&k, indexRegType, k.Int()-i.Int()) Nodconst(&k, indexRegType, k.Int64()-i.Int64())
if Debug_slice > 0 { if Debug_slice > 0 {
Warn("slice: result cap == %d", k.Int()) Warn("slice: result cap == %d", k.Int64())
} }
break break
} }
...@@ -3410,7 +3410,7 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3410,7 +3410,7 @@ func cgen_slice(n, res *Node, wb bool) {
Warn("slice: result cap == 0") Warn("slice: result cap == 0")
} }
} else if i.Op == OLITERAL { } else if i.Op == OLITERAL {
v := i.Int() v := i.Int64()
if v != 0 { if v != 0 {
ginscon(Thearch.Optoas(OSUB, indexRegType), v, &k) ginscon(Thearch.Optoas(OSUB, indexRegType), v, &k)
} }
...@@ -3503,7 +3503,7 @@ func cgen_slice(n, res *Node, wb bool) { ...@@ -3503,7 +3503,7 @@ func cgen_slice(n, res *Node, wb bool) {
w = res.Type.Elem().Width // res is []T, elem size is T.width w = res.Type.Elem().Width // res is []T, elem size is T.width
} }
if Isconst(&i, CTINT) { if Isconst(&i, CTINT) {
ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int()*w, &xbase) ginscon(Thearch.Optoas(OADD, xbase.Type), i.Int64()*w, &xbase)
} else if Thearch.AddIndex != nil && Thearch.AddIndex(&i, w, &xbase) { } else if Thearch.AddIndex != nil && Thearch.AddIndex(&i, w, &xbase) {
// done by back end // done by back end
} else if w == 1 { } else if w == 1 {
......
...@@ -16,16 +16,16 @@ func (n *Node) IntLiteral() (x int64, ok bool) { ...@@ -16,16 +16,16 @@ func (n *Node) IntLiteral() (x int64, ok bool) {
case n == nil: case n == nil:
return return
case Isconst(n, CTINT): case Isconst(n, CTINT):
return n.Int(), true return n.Int64(), true
case Isconst(n, CTBOOL): case Isconst(n, CTBOOL):
return int64(obj.Bool2int(n.Bool())), true return int64(obj.Bool2int(n.Bool())), true
} }
return return
} }
// Int returns n as an int. // Int64 returns n as an int64.
// n must be an integer constant. // n must be an integer or rune constant.
func (n *Node) Int() int64 { func (n *Node) Int64() int64 {
if !Isconst(n, CTINT) { if !Isconst(n, CTINT) {
Fatalf("Int(%v)", n) Fatalf("Int(%v)", n)
} }
...@@ -1455,7 +1455,7 @@ func nonnegconst(n *Node) int { ...@@ -1455,7 +1455,7 @@ func nonnegconst(n *Node) int {
if n.Val().U.(*Mpint).Cmp(Minintval[TUINT32]) < 0 || n.Val().U.(*Mpint).Cmp(Maxintval[TINT32]) > 0 { if n.Val().U.(*Mpint).Cmp(Minintval[TUINT32]) < 0 || n.Val().U.(*Mpint).Cmp(Maxintval[TINT32]) > 0 {
break break
} }
return int(n.Int()) return int(n.Int64())
} }
} }
...@@ -1510,7 +1510,7 @@ func (n *Node) Convconst(con *Node, t *Type) { ...@@ -1510,7 +1510,7 @@ func (n *Node) Convconst(con *Node, t *Type) {
Fatalf("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, FmtLong)) Fatalf("convconst ctype=%d %v", n.Val().Ctype(), Tconv(t, FmtLong))
case CTINT, CTRUNE: case CTINT, CTRUNE:
i = n.Int() i = n.Int64()
case CTBOOL: case CTBOOL:
i = int64(obj.Bool2int(n.Val().U.(bool))) i = int64(obj.Bool2int(n.Val().U.(bool)))
......
...@@ -438,7 +438,7 @@ func Naddr(a *obj.Addr, n *Node) { ...@@ -438,7 +438,7 @@ func Naddr(a *obj.Addr, n *Node) {
case CTINT, CTRUNE: case CTINT, CTRUNE:
a.Sym = nil a.Sym = nil
a.Type = obj.TYPE_CONST a.Type = obj.TYPE_CONST
a.Offset = n.Int() a.Offset = n.Int64()
case CTSTR: case CTSTR:
datagostring(n.Val().U.(string), a) datagostring(n.Val().U.(string), a)
......
...@@ -433,7 +433,7 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool { ...@@ -433,7 +433,7 @@ func staticassign(l *Node, r *Node, out *[]*Node) bool {
initplan(r) initplan(r)
if r.Type.IsSlice() { if r.Type.IsSlice() {
// Init slice. // Init slice.
bound := r.Right.Int() bound := r.Right.Int64()
ta := typArray(r.Type.Elem(), bound) ta := typArray(r.Type.Elem(), bound)
a := staticname(ta, 1) a := staticname(ta, 1)
inittemps[r] = a inittemps[r] = a
...@@ -689,7 +689,7 @@ func arraylit(ctxt int, pass int, n *Node, var_ *Node, init *Nodes) { ...@@ -689,7 +689,7 @@ func arraylit(ctxt int, pass int, n *Node, var_ *Node, init *Nodes) {
func slicelit(ctxt int, n *Node, var_ *Node, init *Nodes) { func slicelit(ctxt int, n *Node, var_ *Node, init *Nodes) {
// make an array type corresponding the number of elements we have // make an array type corresponding the number of elements we have
t := typArray(n.Type.Elem(), n.Right.Int()) t := typArray(n.Type.Elem(), n.Right.Int64())
dowidth(t) dowidth(t)
if ctxt != 0 { if ctxt != 0 {
...@@ -1171,7 +1171,7 @@ func oaslit(n *Node, init *Nodes) bool { ...@@ -1171,7 +1171,7 @@ func oaslit(n *Node, init *Nodes) bool {
func getlit(lit *Node) int { func getlit(lit *Node) int {
if Smallintconst(lit) { if Smallintconst(lit) {
return int(lit.Int()) return int(lit.Int64())
} }
return -1 return -1
} }
...@@ -1234,7 +1234,7 @@ func initplan(n *Node) { ...@@ -1234,7 +1234,7 @@ func initplan(n *Node) {
if a.Op != OKEY || !Smallintconst(a.Left) { if a.Op != OKEY || !Smallintconst(a.Left) {
Fatalf("initplan arraylit") Fatalf("initplan arraylit")
} }
addvalue(p, n.Type.Elem().Width*a.Left.Int(), a.Right) addvalue(p, n.Type.Elem().Width*a.Left.Int64(), a.Right)
} }
case OSTRUCTLIT: case OSTRUCTLIT:
......
...@@ -717,7 +717,7 @@ func (s *state) stmt(n *Node) { ...@@ -717,7 +717,7 @@ func (s *state) stmt(n *Node) {
} else { } else {
j = rhs.Right.Right j = rhs.Right.Right
} }
if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int() == 0) { if i != nil && (i.Op == OLITERAL && i.Val().Ctype() == CTINT && i.Int64() == 0) {
// [0:...] is the same as [:...] // [0:...] is the same as [:...]
i = nil i = nil
} }
...@@ -1423,7 +1423,7 @@ func (s *state) expr(n *Node) *ssa.Value { ...@@ -1423,7 +1423,7 @@ func (s *state) expr(n *Node) *ssa.Value {
case OLITERAL: case OLITERAL:
switch n.Val().Ctype() { switch n.Val().Ctype() {
case CTINT: case CTINT:
i := n.Int() i := n.Int64()
switch n.Type.Size() { switch n.Type.Size() {
case 1: case 1:
return s.constInt8(n.Type, int8(i)) return s.constInt8(n.Type, int8(i))
...@@ -1825,7 +1825,7 @@ func (s *state) expr(n *Node) *ssa.Value { ...@@ -1825,7 +1825,7 @@ func (s *state) expr(n *Node) *ssa.Value {
return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b) return s.newValue2(s.ssaShiftOp(n.Op, n.Type, n.Right.Type), a.Type, a, b)
case OLROT: case OLROT:
a := s.expr(n.Left) a := s.expr(n.Left)
i := n.Right.Int() i := n.Right.Int64()
if i <= 0 || i >= n.Type.Size()*8 { if i <= 0 || i >= n.Type.Size()*8 {
s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i) s.Fatalf("Wrong rotate distance for LROT, expected 1 through %d, saw %d", n.Type.Size()*8-1, i)
} }
...@@ -1943,7 +1943,7 @@ func (s *state) expr(n *Node) *ssa.Value { ...@@ -1943,7 +1943,7 @@ func (s *state) expr(n *Node) *ssa.Value {
ptrtyp := Ptrto(Types[TUINT8]) ptrtyp := Ptrto(Types[TUINT8])
ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a) ptr := s.newValue1(ssa.OpStringPtr, ptrtyp, a)
if Isconst(n.Right, CTINT) { if Isconst(n.Right, CTINT) {
ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int(), ptr) ptr = s.newValue1I(ssa.OpOffPtr, ptrtyp, n.Right.Int64(), ptr)
} else { } else {
ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i) ptr = s.newValue2(ssa.OpAddPtr, ptrtyp, ptr, i)
} }
......
...@@ -484,7 +484,7 @@ func aindex(b *Node, t *Type) *Type { ...@@ -484,7 +484,7 @@ func aindex(b *Node, t *Type) *Type {
case CTINT, CTRUNE: case CTINT, CTRUNE:
hasbound = true hasbound = true
bound = b.Int() bound = b.Int64()
if bound < 0 { if bound < 0 {
Yyerror("array bound must be non negative") Yyerror("array bound must be non negative")
} }
...@@ -2031,7 +2031,7 @@ func powtwo(n *Node) int { ...@@ -2031,7 +2031,7 @@ func powtwo(n *Node) int {
return -1 return -1
} }
v := uint64(n.Int()) v := uint64(n.Int64())
b := uint64(1) b := uint64(1)
for i := 0; i < 64; i++ { for i := 0; i < 64; i++ {
if b == v { if b == v {
......
...@@ -1008,7 +1008,7 @@ OpSwitch: ...@@ -1008,7 +1008,7 @@ OpSwitch:
} }
if !n.Bounded && Isconst(n.Right, CTINT) { if !n.Bounded && Isconst(n.Right, CTINT) {
x := n.Right.Int() x := n.Right.Int64()
if x < 0 { if x < 0 {
Yyerror("invalid %s index %v (index must be non-negative)", why, n.Right) Yyerror("invalid %s index %v (index must be non-negative)", why, n.Right)
} else if t.IsArray() && x >= t.NumElem() { } else if t.IsArray() && x >= t.NumElem() {
...@@ -2212,13 +2212,13 @@ func checksliceindex(l *Node, r *Node, tp *Type) bool { ...@@ -2212,13 +2212,13 @@ func checksliceindex(l *Node, r *Node, tp *Type) bool {
} }
if r.Op == OLITERAL { if r.Op == OLITERAL {
if r.Int() < 0 { if r.Int64() < 0 {
Yyerror("invalid slice index %v (index must be non-negative)", r) Yyerror("invalid slice index %v (index must be non-negative)", r)
return false return false
} else if tp != nil && tp.NumElem() > 0 && r.Int() > tp.NumElem() { } else if tp != nil && tp.NumElem() > 0 && r.Int64() > tp.NumElem() {
Yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem()) Yyerror("invalid slice index %v (out of bounds for %d-element array)", r, tp.NumElem())
return false return false
} else if Isconst(l, CTSTR) && r.Int() > int64(len(l.Val().U.(string))) { } else if Isconst(l, CTSTR) && r.Int64() > int64(len(l.Val().U.(string))) {
Yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.Val().U.(string))) Yyerror("invalid slice index %v (out of bounds for %d-byte string)", r, len(l.Val().U.(string)))
return false return false
} else if r.Val().U.(*Mpint).Cmp(Maxintval[TINT]) > 0 { } else if r.Val().U.(*Mpint).Cmp(Maxintval[TINT]) > 0 {
...@@ -2834,7 +2834,7 @@ func indexdup(n *Node, hash map[int64]*Node) { ...@@ -2834,7 +2834,7 @@ func indexdup(n *Node, hash map[int64]*Node) {
Fatalf("indexdup: not OLITERAL") Fatalf("indexdup: not OLITERAL")
} }
v := n.Int() v := n.Int64()
if hash[v] != nil { if hash[v] != nil {
Yyerror("duplicate index in array literal: %d", v) Yyerror("duplicate index in array literal: %d", v)
return return
......
...@@ -365,7 +365,7 @@ func isSmallMakeSlice(n *Node) bool { ...@@ -365,7 +365,7 @@ func isSmallMakeSlice(n *Node) bool {
} }
t := n.Type t := n.Type
return Smallintconst(l) && Smallintconst(r) && (t.Elem().Width == 0 || r.Int() < (1<<16)/t.Elem().Width) return Smallintconst(l) && Smallintconst(r) && (t.Elem().Width == 0 || r.Int64() < (1<<16)/t.Elem().Width)
} }
// walk the whole tree of the body of an // walk the whole tree of the body of an
...@@ -1177,7 +1177,7 @@ opswitch: ...@@ -1177,7 +1177,7 @@ opswitch:
// replace "abc"[1] with 'b'. // replace "abc"[1] with 'b'.
// delayed until now because "abc"[1] is not // delayed until now because "abc"[1] is not
// an ideal constant. // an ideal constant.
v := n.Right.Int() v := n.Right.Int64()
Nodconst(n, n.Type, int64(n.Left.Val().U.(string)[v])) Nodconst(n, n.Type, int64(n.Left.Val().U.(string)[v]))
n.Typecheck = 1 n.Typecheck = 1
...@@ -3299,9 +3299,9 @@ func walkrotate(n *Node) *Node { ...@@ -3299,9 +3299,9 @@ func walkrotate(n *Node) *Node {
w := int(l.Type.Width * 8) w := int(l.Type.Width * 8)
if Smallintconst(l.Right) && Smallintconst(r.Right) { if Smallintconst(l.Right) && Smallintconst(r.Right) {
sl := int(l.Right.Int()) sl := int(l.Right.Int64())
if sl >= 0 { if sl >= 0 {
sr := int(r.Right.Int()) sr := int(r.Right.Int64())
if sr >= 0 && sl+sr == w { if sr >= 0 && sl+sr == w {
// Rewrite left shift half to left rotate. // Rewrite left shift half to left rotate.
if l.Op == OLSH { if l.Op == OLSH {
...@@ -3312,7 +3312,7 @@ func walkrotate(n *Node) *Node { ...@@ -3312,7 +3312,7 @@ func walkrotate(n *Node) *Node {
n.Op = OLROT n.Op = OLROT
// Remove rotate 0 and rotate w. // Remove rotate 0 and rotate w.
s := int(n.Right.Int()) s := int(n.Right.Int64())
if s == 0 || s == w { if s == 0 || s == w {
n = n.Left n = n.Left
...@@ -3352,7 +3352,7 @@ func walkmul(n *Node, init *Nodes) *Node { ...@@ -3352,7 +3352,7 @@ func walkmul(n *Node, init *Nodes) *Node {
// x*0 is 0 (and side effects of x). // x*0 is 0 (and side effects of x).
var pow int var pow int
var w int var w int
if nr.Int() == 0 { if nr.Int64() == 0 {
cheapexpr(nl, init) cheapexpr(nl, init)
Nodconst(n, n.Type, 0) Nodconst(n, n.Type, 0)
goto ret goto ret
...@@ -3444,10 +3444,10 @@ func walkdiv(n *Node, init *Nodes) *Node { ...@@ -3444,10 +3444,10 @@ func walkdiv(n *Node, init *Nodes) *Node {
m.W = w m.W = w
if nl.Type.IsSigned() { if nl.Type.IsSigned() {
m.Sd = nr.Int() m.Sd = nr.Int64()
Smagic(&m) Smagic(&m)
} else { } else {
m.Ud = uint64(nr.Int()) m.Ud = uint64(nr.Int64())
Umagic(&m) Umagic(&m)
} }
...@@ -3639,7 +3639,7 @@ func walkdiv(n *Node, init *Nodes) *Node { ...@@ -3639,7 +3639,7 @@ func walkdiv(n *Node, init *Nodes) *Node {
// n = nl & (nr-1) // n = nl & (nr-1)
n.Op = OAND n.Op = OAND
Nodconst(&nc, nl.Type, nr.Int()-1) Nodconst(&nc, nl.Type, nr.Int64()-1)
} else { } else {
// n = nl >> pow // n = nl >> pow
n.Op = ORSH n.Op = ORSH
...@@ -3669,7 +3669,7 @@ func bounded(n *Node, max int64) bool { ...@@ -3669,7 +3669,7 @@ func bounded(n *Node, max int64) bool {
bits := int32(8 * n.Type.Width) bits := int32(8 * n.Type.Width)
if Smallintconst(n) { if Smallintconst(n) {
v := n.Int() v := n.Int64()
return 0 <= v && v < max return 0 <= v && v < max
} }
...@@ -3677,9 +3677,9 @@ func bounded(n *Node, max int64) bool { ...@@ -3677,9 +3677,9 @@ func bounded(n *Node, max int64) bool {
case OAND: case OAND:
v := int64(-1) v := int64(-1)
if Smallintconst(n.Left) { if Smallintconst(n.Left) {
v = n.Left.Int() v = n.Left.Int64()
} else if Smallintconst(n.Right) { } else if Smallintconst(n.Right) {
v = n.Right.Int() v = n.Right.Int64()
} }
if 0 <= v && v < max { if 0 <= v && v < max {
...@@ -3688,7 +3688,7 @@ func bounded(n *Node, max int64) bool { ...@@ -3688,7 +3688,7 @@ func bounded(n *Node, max int64) bool {
case OMOD: case OMOD:
if !sign && Smallintconst(n.Right) { if !sign && Smallintconst(n.Right) {
v := n.Right.Int() v := n.Right.Int64()
if 0 <= v && v <= max { if 0 <= v && v <= max {
return true return true
} }
...@@ -3696,7 +3696,7 @@ func bounded(n *Node, max int64) bool { ...@@ -3696,7 +3696,7 @@ func bounded(n *Node, max int64) bool {
case ODIV: case ODIV:
if !sign && Smallintconst(n.Right) { if !sign && Smallintconst(n.Right) {
v := n.Right.Int() v := n.Right.Int64()
for bits > 0 && v >= 2 { for bits > 0 && v >= 2 {
bits-- bits--
v >>= 1 v >>= 1
...@@ -3705,7 +3705,7 @@ func bounded(n *Node, max int64) bool { ...@@ -3705,7 +3705,7 @@ func bounded(n *Node, max int64) bool {
case ORSH: case ORSH:
if !sign && Smallintconst(n.Right) { if !sign && Smallintconst(n.Right) {
v := n.Right.Int() v := n.Right.Int64()
if v > int64(bits) { if v > int64(bits) {
return true return true
} }
......
...@@ -264,7 +264,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -264,7 +264,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gc.Cgen(nl, &n1) gc.Cgen(nl, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc >= uint64(nl.Type.Width*8) { if sc >= uint64(nl.Type.Width*8) {
// large shift gets 2 shifts by width-1 // large shift gets 2 shifts by width-1
var n3 gc.Node var n3 gc.Node
......
...@@ -138,9 +138,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) { ...@@ -138,9 +138,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node) {
check := false check := false
if t.IsSigned() { if t.IsSigned() {
check = true check = true
if gc.Isconst(nl, gc.CTINT) && nl.Int() != -(1<<uint64(t.Width*8-1)) { if gc.Isconst(nl, gc.CTINT) && nl.Int64() != -(1<<uint64(t.Width*8-1)) {
check = false check = false
} else if gc.Isconst(nr, gc.CTINT) && nr.Int() != -1 { } else if gc.Isconst(nr, gc.CTINT) && nr.Int64() != -1 {
check = false check = false
} }
} }
...@@ -303,7 +303,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -303,7 +303,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gc.Cgen(nl, &n1) gc.Cgen(nl, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc >= uint64(nl.Type.Width*8) { if sc >= uint64(nl.Type.Width*8) {
// large shift gets 2 shifts by width-1 // large shift gets 2 shifts by width-1
var n3 gc.Node var n3 gc.Node
......
...@@ -130,7 +130,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog { ...@@ -130,7 +130,7 @@ func ginscmp(op gc.Op, t *gc.Type, n1, n2 *gc.Node, likely int) *obj.Prog {
gc.Cgen(n1, &g1) gc.Cgen(n1, &g1)
gmove(&g1, &r1) gmove(&g1, &r1)
if t.IsInteger() && gc.Isconst(n2, gc.CTINT) { if t.IsInteger() && gc.Isconst(n2, gc.CTINT) {
ginscon2(optoas(gc.OCMP, t), &r1, n2.Int()) ginscon2(optoas(gc.OCMP, t), &r1, n2.Int64())
} else { } else {
gc.Regalloc(&r2, t, n2) gc.Regalloc(&r2, t, n2)
gc.Regalloc(&g2, n1.Type, &r2) gc.Regalloc(&g2, n1.Type, &r2)
......
...@@ -162,7 +162,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -162,7 +162,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
// shld hi:lo, c // shld hi:lo, c
// shld lo:t, c // shld lo:t, c
case gc.OLROT: case gc.OLROT:
v := uint64(r.Int()) v := uint64(r.Int64())
if v >= 32 { if v >= 32 {
// reverse during load to do the first 32 bits of rotate // reverse during load to do the first 32 bits of rotate
...@@ -189,7 +189,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -189,7 +189,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
case gc.OLSH: case gc.OLSH:
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v := uint64(r.Int()) v := uint64(r.Int64())
if v >= 64 { if v >= 64 {
if gc.Is64(r.Type) { if gc.Is64(r.Type) {
splitclean() splitclean()
...@@ -278,7 +278,7 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -278,7 +278,7 @@ func cgen64(n *gc.Node, res *gc.Node) {
case gc.ORSH: case gc.ORSH:
if r.Op == gc.OLITERAL { if r.Op == gc.OLITERAL {
v := uint64(r.Int()) v := uint64(r.Int64())
if v >= 64 { if v >= 64 {
if gc.Is64(r.Type) { if gc.Is64(r.Type) {
splitclean() splitclean()
...@@ -400,8 +400,8 @@ func cgen64(n *gc.Node, res *gc.Node) { ...@@ -400,8 +400,8 @@ func cgen64(n *gc.Node, res *gc.Node) {
if lo2.Op == gc.OLITERAL { if lo2.Op == gc.OLITERAL {
// special cases for constants. // special cases for constants.
lv := uint32(lo2.Int()) lv := uint32(lo2.Int64())
hv := uint32(hi2.Int()) hv := uint32(hi2.Int64())
splitclean() // right side splitclean() // right side
split64(res, &lo2, &hi2) split64(res, &lo2, &hi2)
switch n.Op { switch n.Op {
......
...@@ -203,9 +203,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node, ax *gc.Node, dx *gc ...@@ -203,9 +203,9 @@ func dodiv(op gc.Op, nl *gc.Node, nr *gc.Node, res *gc.Node, ax *gc.Node, dx *gc
check := false check := false
if t.IsSigned() { if t.IsSigned() {
check = true check = true
if gc.Isconst(nl, gc.CTINT) && nl.Int() != -1<<uint64(t.Width*8-1) { if gc.Isconst(nl, gc.CTINT) && nl.Int64() != -1<<uint64(t.Width*8-1) {
check = false check = false
} else if gc.Isconst(nr, gc.CTINT) && nr.Int() != -1 { } else if gc.Isconst(nr, gc.CTINT) && nr.Int64() != -1 {
check = false check = false
} }
} }
...@@ -378,7 +378,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node) ...@@ -378,7 +378,7 @@ func cgen_shift(op gc.Op, bounded bool, nl *gc.Node, nr *gc.Node, res *gc.Node)
var n1 gc.Node var n1 gc.Node
gc.Regalloc(&n1, nl.Type, res) gc.Regalloc(&n1, nl.Type, res)
gmove(&n2, &n1) gmove(&n2, &n1)
sc := uint64(nr.Int()) sc := uint64(nr.Int64())
if sc >= uint64(nl.Type.Width*8) { if sc >= uint64(nl.Type.Width*8) {
// large shift gets 2 shifts by width-1 // large shift gets 2 shifts by width-1
gins(a, ncon(uint32(w)-1), &n1) gins(a, ncon(uint32(w)-1), &n1)
......
...@@ -750,7 +750,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) { ...@@ -750,7 +750,7 @@ func split64(n *gc.Node, lo *gc.Node, hi *gc.Node) {
case gc.OLITERAL: case gc.OLITERAL:
var n1 gc.Node var n1 gc.Node
n.Convconst(&n1, n.Type) n.Convconst(&n1, n.Type)
i := n1.Int() i := n1.Int64()
gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i))) gc.Nodconst(lo, gc.Types[gc.TUINT32], int64(uint32(i)))
i >>= 32 i >>= 32
if n.Type.Etype == gc.TINT64 { if n.Type.Etype == gc.TINT64 {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment