Commit 8e714aab authored by Robert Griesemer's avatar Robert Griesemer

- removed exp/parser (support for old semicolon syntax)

- go/ast: removed StringList (not needed anymore)
- go/ast: changed import path and field list tag to a single string
- updated all dependencies

R=rsc
CC=golang-dev
https://golang.org/cl/217056
parent cf743c1b
...@@ -90,14 +90,14 @@ func openProg(name string, p *Prog) { ...@@ -90,14 +90,14 @@ func openProg(name string, p *Prog) {
ws := 0 ws := 0
for _, spec := range d.Specs { for _, spec := range d.Specs {
s, ok := spec.(*ast.ImportSpec) s, ok := spec.(*ast.ImportSpec)
if !ok || len(s.Path) != 1 || string(s.Path[0].Value) != `"C"` { if !ok || string(s.Path.Value) != `"C"` {
d.Specs[ws] = spec d.Specs[ws] = spec
ws++ ws++
continue continue
} }
sawC = true sawC = true
if s.Name != nil { if s.Name != nil {
error(s.Path[0].Pos(), `cannot rename import "C"`) error(s.Path.Pos(), `cannot rename import "C"`)
} }
if s.Doc != nil { if s.Doc != nil {
p.Preamble += doc.CommentText(s.Doc) + "\n" p.Preamble += doc.CommentText(s.Doc) + "\n"
...@@ -168,7 +168,6 @@ func walk(x interface{}, p *Prog, context string) { ...@@ -168,7 +168,6 @@ func walk(x interface{}, p *Prog, context string) {
case *ast.Ident: case *ast.Ident:
case *ast.Ellipsis: case *ast.Ellipsis:
case *ast.BasicLit: case *ast.BasicLit:
case *ast.StringList:
case *ast.FuncLit: case *ast.FuncLit:
walk(n.Type, p, "type") walk(n.Type, p, "type")
walk(n.Body, p, "stmt") walk(n.Body, p, "stmt")
......
...@@ -479,9 +479,7 @@ func (x *Indexer) visitSpec(spec ast.Spec, isVarDecl bool) { ...@@ -479,9 +479,7 @@ func (x *Indexer) visitSpec(spec ast.Spec, isVarDecl bool) {
case *ast.ImportSpec: case *ast.ImportSpec:
x.visitComment(n.Doc) x.visitComment(n.Doc)
x.visitIdent(ImportDecl, n.Name) x.visitIdent(ImportDecl, n.Name)
for _, s := range n.Path { ast.Walk(x, n.Path)
ast.Walk(x, s)
}
x.visitComment(n.Comment) x.visitComment(n.Comment)
case *ast.ValueSpec: case *ast.ValueSpec:
...@@ -524,9 +522,7 @@ func (x *Indexer) Visit(node interface{}) ast.Visitor { ...@@ -524,9 +522,7 @@ func (x *Indexer) Visit(node interface{}) ast.Visitor {
x.visitIdent(VarDecl, m) x.visitIdent(VarDecl, m)
} }
ast.Walk(x, n.Type) ast.Walk(x, n.Type)
for _, s := range n.Tag { ast.Walk(x, n.Tag)
ast.Walk(x, s)
}
x.visitComment(n.Comment) x.visitComment(n.Comment)
case *ast.DeclStmt: case *ast.DeclStmt:
......
...@@ -29,14 +29,6 @@ The flags are: ...@@ -29,14 +29,6 @@ The flags are:
-tabwidth=8 -tabwidth=8
tab width in spaces. tab width in spaces.
Flags to aid the transition to the new semicolon-free syntax (these flags will be
removed eventually):
-oldparser=true
parse old syntax (required semicolons).
-oldprinter=true
print old syntax (required semicolons).
Debugging flags: Debugging flags:
-trace -trace
......
...@@ -6,7 +6,6 @@ package main ...@@ -6,7 +6,6 @@ package main
import ( import (
"bytes" "bytes"
oldParser "exp/parser"
"flag" "flag"
"fmt" "fmt"
"go/ast" "go/ast"
...@@ -35,9 +34,6 @@ var ( ...@@ -35,9 +34,6 @@ var (
tabWidth = flag.Int("tabwidth", 8, "tab width") tabWidth = flag.Int("tabwidth", 8, "tab width")
tabIndent = flag.Bool("tabindent", true, "indent with tabs independent of -spaces") tabIndent = flag.Bool("tabindent", true, "indent with tabs independent of -spaces")
useSpaces = flag.Bool("spaces", true, "align with spaces instead of tabs") useSpaces = flag.Bool("spaces", true, "align with spaces instead of tabs")
// semicolon transition
useOldParser = flag.Bool("oldparser", false, "parse old syntax (required semicolons)")
) )
...@@ -96,16 +92,12 @@ func processFile(f *os.File) os.Error { ...@@ -96,16 +92,12 @@ func processFile(f *os.File) os.Error {
return err return err
} }
var file *ast.File var scope *ast.Scope
if *useOldParser { if *debug {
file, err = oldParser.ParseFile(f.Name(), src, parserMode) scope = ast.NewScope(nil)
} else {
var scope *ast.Scope
if *debug {
scope = ast.NewScope(nil)
}
file, err = parser.ParseFile(f.Name(), src, scope, parserMode)
} }
file, err := parser.ParseFile(f.Name(), src, scope, parserMode)
if err != nil { if err != nil {
return err return err
} }
......
...@@ -64,7 +64,6 @@ DIRS=\ ...@@ -64,7 +64,6 @@ DIRS=\
exp/eval\ exp/eval\
exp/exception\ exp/exception\
exp/iterable\ exp/iterable\
exp/parser\
expvar\ expvar\
flag\ flag\
fmt\ fmt\
......
...@@ -43,7 +43,7 @@ func runTests(t *testing.T, baseName string, tests []test) { ...@@ -43,7 +43,7 @@ func runTests(t *testing.T, baseName string, tests []test) {
func (a test) run(t *testing.T, name string) { func (a test) run(t *testing.T, name string) {
w := newTestWorld() w := newTestWorld()
for _, j := range a { for _, j := range a {
src := j.code src := j.code + ";" // trailing semicolon to finish statement
if noisy { if noisy {
println("code:", src) println("code:", src)
} }
......
...@@ -627,20 +627,6 @@ func (a *exprCompiler) compile(x ast.Expr, callCtx bool) *expr { ...@@ -627,20 +627,6 @@ func (a *exprCompiler) compile(x ast.Expr, callCtx bool) *expr {
} }
return ei.compileStarExpr(v) return ei.compileStarExpr(v)
case *ast.StringList:
strings := make([]*expr, len(x.Strings))
bad := false
for i, s := range x.Strings {
strings[i] = a.compile(s, false)
if strings[i] == nil {
bad = true
}
}
if bad {
return nil
}
return ei.compileStringList(strings)
case *ast.StructType: case *ast.StructType:
goto notimpl goto notimpl
......
...@@ -51,9 +51,6 @@ var exprTests = []test{ ...@@ -51,9 +51,6 @@ var exprTests = []test{
CErr("\"\\z\"", illegalEscape), CErr("\"\\z\"", illegalEscape),
CErr("\"abc", "string not terminated"), CErr("\"abc", "string not terminated"),
Val("\"abc\" \"def\"", "abcdef"),
CErr("\"abc\" \"\\z\"", illegalEscape),
Val("(i)", 1), Val("(i)", 1),
Val("ai[0]", 1), Val("ai[0]", 1),
......
...@@ -202,8 +202,8 @@ var stmtTests = []test{ ...@@ -202,8 +202,8 @@ var stmtTests = []test{
Run("fn1 := func() int { L: goto L; i = 2 }"), Run("fn1 := func() int { L: goto L; i = 2 }"),
Run("fn1 := func() int { return 1; L: goto L }"), Run("fn1 := func() int { return 1; L: goto L }"),
// Scope checking // Scope checking
Run("fn1 := func() { { L: x:=1 } goto L }"), Run("fn1 := func() { { L: x:=1 }; goto L }"),
CErr("fn1 := func() { { x:=1; L: } goto L }", "into scope"), CErr("fn1 := func() { { x:=1; L: }; goto L }", "into scope"),
CErr("fn1 := func() { goto L; x:=1; L: }", "into scope"), CErr("fn1 := func() { goto L; x:=1; L: }", "into scope"),
Run("fn1 := func() { goto L; { L: x:=1 } }"), Run("fn1 := func() { goto L; { L: x:=1 } }"),
CErr("fn1 := func() { goto L; { x:=1; L: } }", "into scope"), CErr("fn1 := func() { goto L; { x:=1; L: } }", "into scope"),
...@@ -279,10 +279,10 @@ var stmtTests = []test{ ...@@ -279,10 +279,10 @@ var stmtTests = []test{
// Scoping // Scoping
Val2("for i := 2; true; { i2 = i; i := 3; break }", "i", 1, "i2", 2), Val2("for i := 2; true; { i2 = i; i := 3; break }", "i", 1, "i2", 2),
// Labeled break/continue // Labeled break/continue
Val1("L1: for { L2: for { i+=2; break L1; i+=4 } i+=8 }", "i", 1+2), Val1("L1: for { L2: for { i+=2; break L1; i+=4 }; i+=8 }", "i", 1+2),
Val1("L1: for { L2: for { i+=2; break L2; i+=4 } i+=8; break; i+=16 }", "i", 1+2+8), Val1("L1: for { L2: for { i+=2; break L2; i+=4 }; i+=8; break; i+=16 }", "i", 1+2+8),
CErr("L1: { for { break L1 } }", "break.*not defined"), CErr("L1: { for { break L1 } }", "break.*not defined"),
CErr("L1: for {} for { break L1 }", "break.*not defined"), CErr("L1: for {}; for { break L1 }", "break.*not defined"),
CErr("L1:; for { break L1 }", "break.*not defined"), CErr("L1:; for { break L1 }", "break.*not defined"),
Val2("L1: for i = 0; i < 2; i++ { L2: for { i2++; continue L1; i2++ } }", "i", 2, "i2", 4), Val2("L1: for i = 0; i < 2; i++ { L2: for { i2++; continue L1; i2++ } }", "i", 2, "i2", 4),
CErr("L1: { for { continue L1 } }", "continue.*not defined"), CErr("L1: { for { continue L1 } }", "continue.*not defined"),
...@@ -294,7 +294,7 @@ var stmtTests = []test{ ...@@ -294,7 +294,7 @@ var stmtTests = []test{
CErr("fn1 := func() int{ for {break} }", "return"), CErr("fn1 := func() int{ for {break} }", "return"),
Run("fn1 := func() int{ for { for {break} } }"), Run("fn1 := func() int{ for { for {break} } }"),
CErr("fn1 := func() int{ L1: for { for {break L1} } }", "return"), CErr("fn1 := func() int{ L1: for { for {break L1} } }", "return"),
Run("fn1 := func() int{ for true {} return 1 }"), Run("fn1 := func() int{ for true {}; return 1 }"),
// Selectors // Selectors
Val1("var x struct { a int; b int }; x.a = 42; i = x.a", "i", 42), Val1("var x struct { a int; b int }; x.a = 42; i = x.a", "i", 42),
...@@ -305,7 +305,7 @@ var stmtTests = []test{ ...@@ -305,7 +305,7 @@ var stmtTests = []test{
CErr("type T struct { x int }; type U struct { x int }; var y struct { T; U }; y.x = 42", "ambiguous.*\tT\\.x\n\tU\\.x"), CErr("type T struct { x int }; type U struct { x int }; var y struct { T; U }; y.x = 42", "ambiguous.*\tT\\.x\n\tU\\.x"),
CErr("type T struct { *T }; var x T; x.foo", "no field"), CErr("type T struct { *T }; var x T; x.foo", "no field"),
Val1("fib := func(int) int{return 0;}; fib = func(v int) int { if v < 2 { return 1 } return fib(v-1)+fib(v-2) }; i = fib(20)", "i", 10946), Val1("fib := func(int) int{return 0;}; fib = func(v int) int { if v < 2 { return 1 }; return fib(v-1)+fib(v-2) }; i = fib(20)", "i", 10946),
// Make slice // Make slice
Val2("x := make([]int, 2); x[0] = 42; i, i2 = x[0], x[1]", "i", 42, "i2", 0), Val2("x := make([]int, 2); x[0] = 42; i, i2 = x[0], x[1]", "i", 42, "i2", 0),
...@@ -335,7 +335,7 @@ var stmtTests = []test{ ...@@ -335,7 +335,7 @@ var stmtTests = []test{
RErr("x := make(map[int] int); i = x[1]", "key '1' not found"), RErr("x := make(map[int] int); i = x[1]", "key '1' not found"),
// Functions // Functions
Val2("func fib(n int) int { if n <= 2 { return n } return fib(n-1) + fib(n-2) }", "fib(4)", 5, "fib(10)", 89), Val2("func fib(n int) int { if n <= 2 { return n }; return fib(n-1) + fib(n-2) }", "fib(4)", 5, "fib(10)", 89),
Run("func f1(){}"), Run("func f1(){}"),
Run2("func f1(){}", "f1()"), Run2("func f1(){}", "f1()"),
} }
......
...@@ -9,7 +9,7 @@ package eval ...@@ -9,7 +9,7 @@ package eval
import ( import (
"go/ast" "go/ast"
parser "exp/parser" "go/parser"
"go/scanner" "go/scanner"
"go/token" "go/token"
"os" "os"
...@@ -136,13 +136,13 @@ func (e *exprCode) Run() (Value, os.Error) { ...@@ -136,13 +136,13 @@ func (e *exprCode) Run() (Value, os.Error) {
} }
func (w *World) Compile(text string) (Code, os.Error) { func (w *World) Compile(text string) (Code, os.Error) {
stmts, err := parser.ParseStmtList("input", text) stmts, err := parser.ParseStmtList("input", text, nil)
if err == nil { if err == nil {
return w.CompileStmtList(stmts) return w.CompileStmtList(stmts)
} }
// Otherwise try as DeclList. // Otherwise try as DeclList.
decls, err1 := parser.ParseDeclList("input", text) decls, err1 := parser.ParseDeclList("input", text, nil)
if err1 == nil { if err1 == nil {
return w.CompileDeclList(decls) return w.CompileDeclList(decls)
} }
......
# Copyright 2009 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
include ../../../Make.$(GOARCH)
TARG=exp/parser
GOFILES=\
interface.go\
parser.go\
include ../../../Make.pkg
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains the exported entry points for invoking the parser.
package oldParser
import (
"bytes"
"fmt"
"go/ast"
"go/scanner"
"io"
"io/ioutil"
"os"
pathutil "path"
"strings"
)
// If src != nil, readSource converts src to a []byte if possible;
// otherwise it returns an error. If src == nil, readSource returns
// the result of reading the file specified by filename.
//
func readSource(filename string, src interface{}) ([]byte, os.Error) {
if src != nil {
switch s := src.(type) {
case string:
return strings.Bytes(s), nil
case []byte:
return s, nil
case *bytes.Buffer:
// is io.Reader, but src is already available in []byte form
if s != nil {
return s.Bytes(), nil
}
case io.Reader:
var buf bytes.Buffer
_, err := io.Copy(&buf, s)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
default:
return nil, os.ErrorString("invalid source")
}
}
return ioutil.ReadFile(filename)
}
// ParseExpr parses a Go expression and returns the corresponding
// AST node. The filename and src arguments have the same interpretation
// as for ParseFile. If there is an error, the result expression
// may be nil or contain a partial AST.
//
func ParseExpr(filename string, src interface{}) (ast.Expr, os.Error) {
data, err := readSource(filename, src)
if err != nil {
return nil, err
}
var p parser
p.init(filename, data, 0)
return p.parseExpr(), p.GetError(scanner.Sorted)
}
// ParseStmtList parses a list of Go statements and returns the list
// of corresponding AST nodes. The filename and src arguments have the same
// interpretation as for ParseFile. If there is an error, the node
// list may be nil or contain partial ASTs.
//
func ParseStmtList(filename string, src interface{}) ([]ast.Stmt, os.Error) {
data, err := readSource(filename, src)
if err != nil {
return nil, err
}
var p parser
p.init(filename, data, 0)
return p.parseStmtList(), p.GetError(scanner.Sorted)
}
// ParseDeclList parses a list of Go declarations and returns the list
// of corresponding AST nodes. The filename and src arguments have the same
// interpretation as for ParseFile. If there is an error, the node
// list may be nil or contain partial ASTs.
//
func ParseDeclList(filename string, src interface{}) ([]ast.Decl, os.Error) {
data, err := readSource(filename, src)
if err != nil {
return nil, err
}
var p parser
p.init(filename, data, 0)
return p.parseDeclList(), p.GetError(scanner.Sorted)
}
// ParseFile parses a Go source file and returns a File node.
//
// If src != nil, ParseFile parses the file source from src. src may
// be provided in a variety of formats. At the moment the following types
// are supported: string, []byte, and io.Reader. In this case, filename is
// only used for source position information and error messages.
//
// If src == nil, ParseFile parses the file specified by filename.
//
// The mode parameter controls the amount of source text parsed and other
// optional parser functionality.
//
// If the source couldn't be read, the returned AST is nil and the error
// indicates the specific failure. If the source was read but syntax
// errors were found, the result is a partial AST (with ast.BadX nodes
// representing the fragments of erroneous source code). Multiple errors
// are returned via a scanner.ErrorList which is sorted by file position.
//
func ParseFile(filename string, src interface{}, mode uint) (*ast.File, os.Error) {
data, err := readSource(filename, src)
if err != nil {
return nil, err
}
var p parser
p.init(filename, data, mode)
return p.parseFile(), p.GetError(scanner.NoMultiples)
}
// ParsePkgFile parses the file specified by filename and returns the
// corresponding AST. If the file cannot be read, has syntax errors, or
// does not belong to the package (i.e., pkgname != "" and the package
// name in the file doesn't match pkkname), an error is returned.
//
func ParsePkgFile(pkgname, filename string, mode uint) (*ast.File, os.Error) {
src, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
if pkgname != "" {
prog, err := ParseFile(filename, src, PackageClauseOnly)
if err != nil {
return nil, err
}
if prog.Name.Name() != pkgname {
return nil, os.NewError(fmt.Sprintf("multiple packages found: %s, %s", prog.Name.Name(), pkgname))
}
if mode == PackageClauseOnly {
return prog, nil
}
}
return ParseFile(filename, src, mode)
}
// ParsePackage parses all files in the directory specified by path and
// returns an AST representing the package found. The set of files may be
// restricted by providing a non-nil filter function; only the files with
// os.Dir entries passing through the filter are considered.
// If ParsePackage does not find exactly one package, it returns an error.
//
func ParsePackage(path string, filter func(*os.Dir) bool, mode uint) (*ast.Package, os.Error) {
fd, err := os.Open(path, os.O_RDONLY, 0)
if err != nil {
return nil, err
}
defer fd.Close()
list, err := fd.Readdir(-1)
if err != nil {
return nil, err
}
name := ""
files := make(map[string]*ast.File)
for i := 0; i < len(list); i++ {
entry := &list[i]
if filter == nil || filter(entry) {
filename := pathutil.Join(path, entry.Name)
src, err := ParsePkgFile(name, filename, mode)
if err != nil {
return nil, err
}
files[filename] = src
if name == "" {
name = src.Name.Name()
}
}
}
if len(files) == 0 {
return nil, os.NewError(path + ": no package found")
}
return &ast.Package{name, nil, files}, nil
}
This diff is collapsed.
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package oldParser
import (
"os"
"testing"
)
var illegalInputs = []interface{}{
nil,
3.14,
[]byte(nil),
"foo!",
}
func TestParseIllegalInputs(t *testing.T) {
for _, src := range illegalInputs {
_, err := ParseFile("", src, 0)
if err == nil {
t.Errorf("ParseFile(%v) should have failed", src)
}
}
}
var validPrograms = []interface{}{
`package main`,
`package main import "fmt" func main() { fmt.Println("Hello, World!") }`,
`package main func main() { if f(T{}) {} }`,
}
func TestParseValidPrograms(t *testing.T) {
for _, src := range validPrograms {
_, err := ParseFile("", src, 0)
if err != nil {
t.Errorf("ParseFile(%q): %v", src, err)
}
}
}
var validFiles = []string{
"parser.go",
"parser_test.go",
}
func TestParse3(t *testing.T) {
return // disabled since the parser only accepts old syntax
for _, filename := range validFiles {
_, err := ParseFile(filename, nil, 0)
if err != nil {
t.Errorf("ParseFile(%s): %v", filename, err)
}
}
}
func nameFilter(filename string) bool {
switch filename {
case "parser.go":
case "interface.go":
case "parser_test.go":
default:
return false
}
return true
}
func dirFilter(d *os.Dir) bool { return nameFilter(d.Name) }
func TestParse4(t *testing.T) {
return // disabled since the parser only accepts old syntax
path := "."
pkg, err := ParsePackage(path, dirFilter, 0)
if err != nil {
t.Fatalf("ParsePackage(%s): %v", path, err)
}
if pkg.Name != "oldParser" {
t.Errorf("incorrect package name: %s", pkg.Name)
}
for filename, _ := range pkg.Files {
if !nameFilter(filename) {
t.Errorf("unexpected package file: %s", filename)
}
}
}
...@@ -90,7 +90,7 @@ type Field struct { ...@@ -90,7 +90,7 @@ type Field struct {
Doc *CommentGroup // associated documentation; or nil Doc *CommentGroup // associated documentation; or nil
Names []*Ident // field/method/parameter names; or nil if anonymous field Names []*Ident // field/method/parameter names; or nil if anonymous field
Type Expr // field/method/parameter type Type Expr // field/method/parameter type
Tag []*BasicLit // field tag; or nil Tag *BasicLit // field tag; or nil
Comment *CommentGroup // line comments; or nil Comment *CommentGroup // line comments; or nil
} }
...@@ -136,17 +136,6 @@ type ( ...@@ -136,17 +136,6 @@ type (
Value []byte // literal string; e.g. 42, 0x7f, 3.14, 1e-9, 'a', '\x7f', "foo" or `\m\n\o` Value []byte // literal string; e.g. 42, 0x7f, 3.14, 1e-9, 'a', '\x7f', "foo" or `\m\n\o`
} }
// A StringList node represents a sequence of adjacent string literals.
// A single string literal (common case) is represented by a BasicLit
// node; StringList nodes are used only if there are two or more string
// literals in a sequence.
// TODO(gri) Deprecated. StringLists are only created by exp/parser;
// Remove when exp/parser is removed.
//
StringList struct {
Strings []*BasicLit // list of strings, len(Strings) > 1
}
// A FuncLit node represents a function literal. // A FuncLit node represents a function literal.
FuncLit struct { FuncLit struct {
Type *FuncType // function type Type *FuncType // function type
...@@ -309,7 +298,6 @@ type ( ...@@ -309,7 +298,6 @@ type (
// Pos() implementations for expression/type where the position // Pos() implementations for expression/type where the position
// corresponds to the position of a sub-node. // corresponds to the position of a sub-node.
// //
func (x *StringList) Pos() token.Position { return x.Strings[0].Pos() }
func (x *FuncLit) Pos() token.Position { return x.Type.Pos() } func (x *FuncLit) Pos() token.Position { return x.Type.Pos() }
func (x *CompositeLit) Pos() token.Position { return x.Type.Pos() } func (x *CompositeLit) Pos() token.Position { return x.Type.Pos() }
func (x *SelectorExpr) Pos() token.Position { return x.X.Pos() } func (x *SelectorExpr) Pos() token.Position { return x.X.Pos() }
...@@ -327,7 +315,6 @@ func (x *BadExpr) exprNode() {} ...@@ -327,7 +315,6 @@ func (x *BadExpr) exprNode() {}
func (x *Ident) exprNode() {} func (x *Ident) exprNode() {}
func (x *Ellipsis) exprNode() {} func (x *Ellipsis) exprNode() {}
func (x *BasicLit) exprNode() {} func (x *BasicLit) exprNode() {}
func (x *StringList) exprNode() {}
func (x *FuncLit) exprNode() {} func (x *FuncLit) exprNode() {}
func (x *CompositeLit) exprNode() {} func (x *CompositeLit) exprNode() {}
func (x *ParenExpr) exprNode() {} func (x *ParenExpr) exprNode() {}
...@@ -604,7 +591,7 @@ type ( ...@@ -604,7 +591,7 @@ type (
ImportSpec struct { ImportSpec struct {
Doc *CommentGroup // associated documentation; or nil Doc *CommentGroup // associated documentation; or nil
Name *Ident // local package name (including "."); or nil Name *Ident // local package name (including "."); or nil
Path []*BasicLit // package path Path *BasicLit // package path
Comment *CommentGroup // line comments; or nil Comment *CommentGroup // line comments; or nil
} }
...@@ -634,7 +621,7 @@ func (s *ImportSpec) Pos() token.Position { ...@@ -634,7 +621,7 @@ func (s *ImportSpec) Pos() token.Position {
if s.Name != nil { if s.Name != nil {
return s.Name.Pos() return s.Name.Pos()
} }
return s.Path[0].Pos() return s.Path.Pos()
} }
func (s *ValueSpec) Pos() token.Position { return s.Names[0].Pos() } func (s *ValueSpec) Pos() token.Position { return s.Names[0].Pos() }
......
...@@ -74,20 +74,13 @@ func Walk(v Visitor, node interface{}) { ...@@ -74,20 +74,13 @@ func Walk(v Visitor, node interface{}) {
walkCommentGroup(v, n.Doc) walkCommentGroup(v, n.Doc)
Walk(v, n.Names) Walk(v, n.Names)
Walk(v, n.Type) Walk(v, n.Type)
for _, x := range n.Tag { Walk(v, n.Tag)
Walk(v, x)
}
walkCommentGroup(v, n.Comment) walkCommentGroup(v, n.Comment)
// Expressions // Expressions
case *BadExpr, *Ident, *Ellipsis, *BasicLit: case *BadExpr, *Ident, *Ellipsis, *BasicLit:
// nothing to do // nothing to do
case *StringList:
for _, x := range n.Strings {
Walk(v, x)
}
case *FuncLit: case *FuncLit:
if n != nil { if n != nil {
Walk(v, n.Type) Walk(v, n.Type)
...@@ -249,9 +242,7 @@ func Walk(v Visitor, node interface{}) { ...@@ -249,9 +242,7 @@ func Walk(v Visitor, node interface{}) {
case *ImportSpec: case *ImportSpec:
walkCommentGroup(v, n.Doc) walkCommentGroup(v, n.Doc)
walkIdent(v, n.Name) walkIdent(v, n.Name)
for _, x := range n.Path { Walk(v, n.Path)
Walk(v, x)
}
walkCommentGroup(v, n.Comment) walkCommentGroup(v, n.Comment)
case *ValueSpec: case *ValueSpec:
......
...@@ -526,11 +526,10 @@ func (p *parser) parseFieldDecl() *ast.Field { ...@@ -526,11 +526,10 @@ func (p *parser) parseFieldDecl() *ast.Field {
typ := p.tryType() typ := p.tryType()
// optional tag // optional tag
var tag []*ast.BasicLit var tag *ast.BasicLit
if p.tok == token.STRING { if p.tok == token.STRING {
x := &ast.BasicLit{p.pos, p.tok, p.lit} tag = &ast.BasicLit{p.pos, p.tok, p.lit}
p.next() p.next()
tag = []*ast.BasicLit{x}
} }
// analyze case // analyze case
...@@ -1129,7 +1128,6 @@ func (p *parser) checkExpr(x ast.Expr) ast.Expr { ...@@ -1129,7 +1128,6 @@ func (p *parser) checkExpr(x ast.Expr) ast.Expr {
case *ast.BadExpr: case *ast.BadExpr:
case *ast.Ident: case *ast.Ident:
case *ast.BasicLit: case *ast.BasicLit:
case *ast.StringList:
case *ast.FuncLit: case *ast.FuncLit:
case *ast.CompositeLit: case *ast.CompositeLit:
case *ast.ParenExpr: case *ast.ParenExpr:
...@@ -1827,11 +1825,10 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup) ast.Spec { ...@@ -1827,11 +1825,10 @@ func parseImportSpec(p *parser, doc *ast.CommentGroup) ast.Spec {
p.declIdent(p.fileScope, ident) p.declIdent(p.fileScope, ident)
} }
var path []*ast.BasicLit var path *ast.BasicLit
if p.tok == token.STRING { if p.tok == token.STRING {
x := &ast.BasicLit{p.pos, p.tok, p.lit} path = &ast.BasicLit{p.pos, p.tok, p.lit}
p.next() p.next()
path = []*ast.BasicLit{x}
} else { } else {
p.expect(token.STRING) // use expect() error handling p.expect(token.STRING) // use expect() error handling
} }
......
...@@ -396,7 +396,7 @@ func (p *printer) fieldList(lbrace token.Position, list []*ast.Field, rbrace tok ...@@ -396,7 +396,7 @@ func (p *printer) fieldList(lbrace token.Position, list []*ast.Field, rbrace tok
p.print(sep) p.print(sep)
} }
p.print(sep) p.print(sep)
p.expr(&ast.StringList{f.Tag}, &ml) p.expr(f.Tag, &ml)
extraTabs = 0 extraTabs = 0
} }
if f.Comment != nil { if f.Comment != nil {
...@@ -680,9 +680,6 @@ func (p *printer) expr1(expr ast.Expr, prec1, depth int, ctxt exprContext, multi ...@@ -680,9 +680,6 @@ func (p *printer) expr1(expr ast.Expr, prec1, depth int, ctxt exprContext, multi
case *ast.BasicLit: case *ast.BasicLit:
p.print(x) p.print(x)
case *ast.StringList:
p.stringList(x.Strings, multiLine)
case *ast.FuncLit: case *ast.FuncLit:
p.expr(x.Type, multiLine) p.expr(x.Type, multiLine)
p.funcBody(x.Body, distance(x.Type.Pos(), p.pos), true, multiLine) p.funcBody(x.Body, distance(x.Type.Pos(), p.pos), true, multiLine)
...@@ -1117,9 +1114,9 @@ func (p *printer) spec(spec ast.Spec, n int, context declContext, multiLine *boo ...@@ -1117,9 +1114,9 @@ func (p *printer) spec(spec ast.Spec, n int, context declContext, multiLine *boo
if s.Name != nil { if s.Name != nil {
p.expr(s.Name, multiLine) p.expr(s.Name, multiLine)
p.print(blank) p.print(blank)
p.moveCommentsAfter(s.Path[0].Pos()) p.moveCommentsAfter(s.Path.Pos())
} }
p.expr(&ast.StringList{s.Path}, multiLine) p.expr(s.Path, multiLine)
comment = s.Comment comment = s.Comment
case *ast.ValueSpec: case *ast.ValueSpec:
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment