LEFT | RIGHT |
1 ÿ// Copyright 2009 The Go Authors. All rights reserved.ÿ | 1 // Copyright 2009 The Go Authors. All rights reserved. |
2 ÿ// Use of this source code is governed by a BSD-styleÿ | 2 // Use of this source code is governed by a BSD-style |
3 ÿ// license that can be found in the LICENSE file.ÿ | 3 // license that can be found in the LICENSE file. |
4 | 4 |
5 ÿ// A parser for Go source files. Input may be provided in a variety ofÿ | 5 // A parser for Go source files. Input may be provided in a variety of |
6 ÿ// forms (see the various Parse* functions); the output is an abstractÿ | 6 // forms (see the various Parse* functions); the output is an abstract |
7 ÿ// syntax tree (AST) representing the Go source. The parser is invokedÿ | 7 // syntax tree (AST) representing the Go source. The parser is invoked |
8 ÿ// through one of the Parse* functions.ÿ | 8 // through one of the Parse* functions. |
9 ÿ//ÿ | 9 // |
10 package parser | 10 package parser |
11 | 11 |
12 import ( | 12 import ( |
13 "fmt" | 13 "fmt" |
14 "go/ast" | 14 "go/ast" |
15 "go/scanner" | 15 "go/scanner" |
16 "go/token" | 16 "go/token" |
17 ) | 17 ) |
18 | 18 |
19 | 19 |
20 ÿ// The mode parameter to the Parse* functions is a set of flags (or 0).ÿ | 20 // The mode parameter to the Parse* functions is a set of flags (or 0). |
21 ÿ// They control the amount of source code parsed and other optionalÿ | 21 // They control the amount of source code parsed and other optional |
22 ÿ// parser functionality.ÿ | 22 // parser functionality. |
23 ÿ//ÿ | 23 // |
24 const ( | 24 const ( |
25 » PackageClauseOnly uint = 1 << iota ÿ// parsing stops after package claus
eÿ | 25 » PackageClauseOnly uint = 1 << iota // parsing stops after package clause |
26 » ImportsOnly ÿ// parsing stops after import declar
ationsÿ | 26 » ImportsOnly // parsing stops after import declara
tions |
27 » ParseComments ÿ// parse comments and add them to AS
Tÿ | 27 » ParseComments // parse comments and add them to AST |
28 » Trace ÿ// print a trace of parsed productio
nsÿ | 28 » Trace // print a trace of parsed production
s |
29 » DeclarationErrors ÿ// report declaration errorsÿ | 29 » DeclarationErrors // report declaration errors |
30 ) | 30 ) |
31 | 31 |
32 | 32 |
33 ÿ// The parser structure holds the parser's internal state.ÿ | 33 // The parser structure holds the parser's internal state. |
34 type parser struct { | 34 type parser struct { |
35 file *token.File | 35 file *token.File |
36 scanner.ErrorVector | 36 scanner.ErrorVector |
37 scanner scanner.Scanner | 37 scanner scanner.Scanner |
38 | 38 |
39 » ÿ// Tracing/debuggingÿ | 39 » // Tracing/debugging |
40 » mode uint ÿ// parsing modeÿ | 40 » mode uint // parsing mode |
41 » trace bool ÿ// == (mode & Trace != 0)ÿ | 41 » trace bool // == (mode & Trace != 0) |
42 » indent uint ÿ// indentation used for tracing outputÿ | 42 » indent uint // indentation used for tracing output |
43 | 43 |
44 » ÿ// Commentsÿ | 44 » // Comments |
45 comments []*ast.CommentGroup | 45 comments []*ast.CommentGroup |
46 » leadComment *ast.CommentGroup ÿ// last lead commentÿ | 46 » leadComment *ast.CommentGroup // last lead comment |
47 » lineComment *ast.CommentGroup ÿ// last line commentÿ | 47 » lineComment *ast.CommentGroup // last line comment |
48 | 48 |
49 » ÿ// Next tokenÿ | 49 » // Next token |
50 » pos token.Pos ÿ// token positionÿ | 50 » pos token.Pos // token position |
51 » tok token.Token ÿ// one token look-aheadÿ | 51 » tok token.Token // one token look-ahead |
52 » lit string ÿ// token literalÿ | 52 » lit string // token literal |
53 | 53 |
54 » ÿ// Non-syntactic parser controlÿ | 54 » // Non-syntactic parser control |
55 » exprLev int ÿ// < 0: in control clause, >= 0: in expressionÿ | 55 » exprLev int // < 0: in control clause, >= 0: in expression |
56 | 56 |
57 » ÿ// Ordinary identifer scopesÿ | 57 » // Ordinary identifer scopes |
58 » pkgScope *ast.Scope ÿ// pkgScope.Outer == nilÿ | 58 » pkgScope *ast.Scope // pkgScope.Outer == nil |
59 » topScope *ast.Scope ÿ// top-most scope; may be pkgScopeÿ | 59 » topScope *ast.Scope // top-most scope; may be pkgScope |
60 » unresolved []*ast.Ident ÿ// unresolved identifiersÿ | 60 » unresolved []*ast.Ident // unresolved identifiers |
61 » imports []*ast.ImportSpec ÿ// list of importsÿ | 61 » imports []*ast.ImportSpec // list of imports |
62 | 62 |
63 » ÿ// Label scopeÿ | 63 » // Label scope |
64 » ÿ// (maintained by open/close LabelScope)ÿ | 64 » // (maintained by open/close LabelScope) |
65 » labelScope *ast.Scope ÿ// label scope for current functionÿ | 65 » labelScope *ast.Scope // label scope for current function |
66 » targetStack [][]*ast.Ident ÿ// stack of unresolved labelsÿ | 66 » targetStack [][]*ast.Ident // stack of unresolved labels |
67 } | 67 } |
68 | 68 |
69 | 69 |
70 ÿ// scannerMode returns the scanner mode bits given the parser's mode bits.ÿ | 70 // scannerMode returns the scanner mode bits given the parser's mode bits. |
71 func scannerMode(mode uint) uint { | 71 func scannerMode(mode uint) uint { |
72 var m uint = scanner.InsertSemis | 72 var m uint = scanner.InsertSemis |
73 if mode&ParseComments != 0 { | 73 if mode&ParseComments != 0 { |
74 m |= scanner.ScanComments | 74 m |= scanner.ScanComments |
75 } | 75 } |
76 return m | 76 return m |
77 } | 77 } |
78 | 78 |
79 | 79 |
80 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode uin
t) { | 80 func (p *parser) init(fset *token.FileSet, filename string, src []byte, mode uin
t) { |
81 p.file = fset.AddFile(filename, fset.Base(), len(src)) | 81 p.file = fset.AddFile(filename, fset.Base(), len(src)) |
82 p.scanner.Init(p.file, src, p, scannerMode(mode)) | 82 p.scanner.Init(p.file, src, p, scannerMode(mode)) |
83 | 83 |
84 p.mode = mode | 84 p.mode = mode |
85 » p.trace = mode&Trace != 0 ÿ// for convenience (p.trace is used frequentl
y)ÿ | 85 » p.trace = mode&Trace != 0 // for convenience (p.trace is used frequently
) |
86 | 86 |
87 p.next() | 87 p.next() |
88 | 88 |
89 » ÿ// set up the pkgScope here (as opposed to in parseFile) becauseÿ | 89 » // set up the pkgScope here (as opposed to in parseFile) because |
90 » ÿ// there are other parser entry points (ParseExpr, etc.)ÿ | 90 » // there are other parser entry points (ParseExpr, etc.) |
91 p.openScope() | 91 p.openScope() |
92 p.pkgScope = p.topScope | 92 p.pkgScope = p.topScope |
93 | 93 |
94 » ÿ// for the same reason, set up a label scopeÿ | 94 » // for the same reason, set up a label scope |
95 p.openLabelScope() | 95 p.openLabelScope() |
96 } | 96 } |
97 | 97 |
98 | 98 |
99 ÿ// ----------------------------------------------------------------------------
ÿ | 99 // ---------------------------------------------------------------------------- |
100 ÿ// Scoping supportÿ | 100 // Scoping support |
101 | 101 |
102 func (p *parser) openScope() { | 102 func (p *parser) openScope() { |
103 p.topScope = ast.NewScope(p.topScope) | 103 p.topScope = ast.NewScope(p.topScope) |
104 } | 104 } |
105 | 105 |
106 | 106 |
107 func (p *parser) closeScope() { | 107 func (p *parser) closeScope() { |
108 p.topScope = p.topScope.Outer | 108 p.topScope = p.topScope.Outer |
109 } | 109 } |
110 | 110 |
111 | 111 |
112 func (p *parser) openLabelScope() { | 112 func (p *parser) openLabelScope() { |
113 p.labelScope = ast.NewScope(p.labelScope) | 113 p.labelScope = ast.NewScope(p.labelScope) |
114 p.targetStack = append(p.targetStack, nil) | 114 p.targetStack = append(p.targetStack, nil) |
115 } | 115 } |
116 | 116 |
117 | 117 |
118 func (p *parser) closeLabelScope() { | 118 func (p *parser) closeLabelScope() { |
119 » ÿ// resolve labelsÿ | 119 » // resolve labels |
120 n := len(p.targetStack) - 1 | 120 n := len(p.targetStack) - 1 |
121 scope := p.labelScope | 121 scope := p.labelScope |
122 for _, ident := range p.targetStack[n] { | 122 for _, ident := range p.targetStack[n] { |
123 ident.Obj = scope.Lookup(ident.Name) | 123 ident.Obj = scope.Lookup(ident.Name) |
124 if ident.Obj == nil && p.mode&DeclarationErrors != 0 { | 124 if ident.Obj == nil && p.mode&DeclarationErrors != 0 { |
125 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", i
dent.Name)) | 125 p.error(ident.Pos(), fmt.Sprintf("label %s undefined", i
dent.Name)) |
126 } | 126 } |
127 } | 127 } |
128 » ÿ// pop label scopeÿ | 128 » // pop label scope |
129 p.targetStack = p.targetStack[0:n] | 129 p.targetStack = p.targetStack[0:n] |
130 p.labelScope = p.labelScope.Outer | 130 p.labelScope = p.labelScope.Outer |
131 } | 131 } |
132 | 132 |
133 | 133 |
134 func (p *parser) declare(decl interface{}, scope *ast.Scope, kind ast.ObjKind, i
dents ...*ast.Ident) { | 134 func (p *parser) declare(decl interface{}, scope *ast.Scope, kind ast.ObjKind, i
dents ...*ast.Ident) { |
135 for _, ident := range idents { | 135 for _, ident := range idents { |
136 assert(ident.Obj == nil, "identifier already declared or resolve
d") | 136 assert(ident.Obj == nil, "identifier already declared or resolve
d") |
137 if ident.Name != "_" { | 137 if ident.Name != "_" { |
138 obj := ast.NewObj(kind, ident.Name) | 138 obj := ast.NewObj(kind, ident.Name) |
139 » » » ÿ// remember the corresponding declaration for redeclara
tionÿ | 139 » » » // remember the corresponding declaration for redeclarat
ion |
140 » » » ÿ// errors and global variable resolution/typechecking p
haseÿ | 140 » » » // errors and global variable resolution/typechecking ph
ase |
141 obj.Decl = decl | 141 obj.Decl = decl |
142 » » » alt := scope.Insert(obj) | 142 » » » if alt := scope.Insert(obj); alt != nil && p.mode&Declar
ationErrors != 0 { |
143 » » » if alt != obj && p.mode&DeclarationErrors != 0 { | |
144 prevDecl := "" | 143 prevDecl := "" |
145 if pos := alt.Pos(); pos.IsValid() { | 144 if pos := alt.Pos(); pos.IsValid() { |
146 prevDecl = fmt.Sprintf("\n\tprevious dec
laration at %s", p.file.Position(pos)) | 145 prevDecl = fmt.Sprintf("\n\tprevious dec
laration at %s", p.file.Position(pos)) |
147 } | 146 } |
148 p.error(ident.Pos(), fmt.Sprintf("%s redeclared
in this block%s", ident.Name, prevDecl)) | 147 p.error(ident.Pos(), fmt.Sprintf("%s redeclared
in this block%s", ident.Name, prevDecl)) |
149 } | 148 } |
150 ident.Obj = obj | 149 ident.Obj = obj |
151 } | 150 } |
152 } | 151 } |
153 } | 152 } |
154 | 153 |
155 | 154 |
156 func (p *parser) shortVarDecl(idents []*ast.Ident) { | 155 func (p *parser) shortVarDecl(idents []*ast.Ident) { |
157 » ÿ// Go spec: A short variable declaration may redeclare variablesÿ | 156 » // Go spec: A short variable declaration may redeclare variables |
158 » ÿ// provided they were originally declared in the same block withÿ | 157 » // provided they were originally declared in the same block with |
159 » ÿ// the same type, and at least one of the non-blank variables is new.ÿ | 158 » // the same type, and at least one of the non-blank variables is new. |
160 » n := 0 ÿ// number of new variablesÿ | 159 » n := 0 // number of new variables |
161 for _, ident := range idents { | 160 for _, ident := range idents { |
162 assert(ident.Obj == nil, "identifier already declared or resolve
d") | 161 assert(ident.Obj == nil, "identifier already declared or resolve
d") |
163 if ident.Name != "_" { | 162 if ident.Name != "_" { |
164 obj := ast.NewObj(ast.Var, ident.Name) | 163 obj := ast.NewObj(ast.Var, ident.Name) |
165 » » » ÿ// short var declarations cannot have redeclaration err
orsÿ | 164 » » » // short var declarations cannot have redeclaration erro
rs |
166 » » » ÿ// and are not global => no need to remember the respec
tiveÿ | 165 » » » // and are not global => no need to remember the respect
ive |
167 » » » ÿ// declarationÿ | 166 » » » // declaration |
168 alt := p.topScope.Insert(obj) | 167 alt := p.topScope.Insert(obj) |
169 » » » if alt == obj { | 168 » » » if alt == nil { |
170 » » » » n++ ÿ// new declarationÿ | 169 » » » » n++ // new declaration |
| 170 » » » » alt = obj |
171 } | 171 } |
172 ident.Obj = alt | 172 ident.Obj = alt |
173 } | 173 } |
174 } | 174 } |
175 if n == 0 && p.mode&DeclarationErrors != 0 { | 175 if n == 0 && p.mode&DeclarationErrors != 0 { |
176 p.error(idents[0].Pos(), "no new variables on left side of :=") | 176 p.error(idents[0].Pos(), "no new variables on left side of :=") |
177 } | 177 } |
178 } | 178 } |
179 | 179 |
180 | 180 |
181 ÿ// The unresolved object is a sentinel to mark identifiers that have been added
ÿ | 181 // The unresolved object is a sentinel to mark identifiers that have been added |
182 ÿ// to the list of unresolved identifiers. The sentinel is only used for verifyi
ngÿ | 182 // to the list of unresolved identifiers. The sentinel is only used for verifyin
g |
183 ÿ// internal consistency.ÿ | 183 // internal consistency. |
184 var unresolved = new(ast.Object) | 184 var unresolved = new(ast.Object) |
185 | 185 |
186 | 186 |
187 func (p *parser) resolve(x ast.Expr) { | 187 func (p *parser) resolve(x ast.Expr) { |
188 » ÿ// nothing to do if x is not an identifier or the blank identifierÿ | 188 » // nothing to do if x is not an identifier or the blank identifier |
189 ident, _ := x.(*ast.Ident) | 189 ident, _ := x.(*ast.Ident) |
190 if ident == nil { | 190 if ident == nil { |
191 return | 191 return |
192 } | 192 } |
193 assert(ident.Obj == nil, "identifier already declared or resolved") | 193 assert(ident.Obj == nil, "identifier already declared or resolved") |
194 if ident.Name == "_" { | 194 if ident.Name == "_" { |
195 return | 195 return |
196 } | 196 } |
197 » ÿ// try to resolve the identifierÿ | 197 » // try to resolve the identifier |
198 for s := p.topScope; s != nil; s = s.Outer { | 198 for s := p.topScope; s != nil; s = s.Outer { |
199 if obj := s.Lookup(ident.Name); obj != nil { | 199 if obj := s.Lookup(ident.Name); obj != nil { |
200 ident.Obj = obj | 200 ident.Obj = obj |
201 return | 201 return |
202 } | 202 } |
203 } | 203 } |
204 » ÿ// all local scopes are known, so any unresolved identifierÿ | 204 » // all local scopes are known, so any unresolved identifier |
205 » ÿ// must be found either in the file scope, package scopeÿ | 205 » // must be found either in the file scope, package scope |
206 » ÿ// (perhaps in another file), or universe scope --- collectÿ | 206 » // (perhaps in another file), or universe scope --- collect |
207 » ÿ// them so that they can be resolved laterÿ | 207 » // them so that they can be resolved later |
208 ident.Obj = unresolved | 208 ident.Obj = unresolved |
209 p.unresolved = append(p.unresolved, ident) | 209 p.unresolved = append(p.unresolved, ident) |
210 } | 210 } |
211 | 211 |
212 | 212 |
213 ÿ// ----------------------------------------------------------------------------
ÿ | 213 // ---------------------------------------------------------------------------- |
214 ÿ// Parsing supportÿ | 214 // Parsing support |
215 | 215 |
216 func (p *parser) printTrace(a ...interface{}) { | 216 func (p *parser) printTrace(a ...interface{}) { |
217 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . .
. . . " + | 217 const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . .
. . . " + |
218 ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
" | 218 ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
" |
219 const n = uint(len(dots)) | 219 const n = uint(len(dots)) |
220 pos := p.file.Position(p.pos) | 220 pos := p.file.Position(p.pos) |
221 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) | 221 fmt.Printf("%5d:%3d: ", pos.Line, pos.Column) |
222 i := 2 * p.indent | 222 i := 2 * p.indent |
223 for ; i > n; i -= n { | 223 for ; i > n; i -= n { |
224 fmt.Print(dots) | 224 fmt.Print(dots) |
225 } | 225 } |
226 fmt.Print(dots[0:i]) | 226 fmt.Print(dots[0:i]) |
227 fmt.Println(a...) | 227 fmt.Println(a...) |
228 } | 228 } |
229 | 229 |
230 | 230 |
231 func trace(p *parser, msg string) *parser { | 231 func trace(p *parser, msg string) *parser { |
232 p.printTrace(msg, "(") | 232 p.printTrace(msg, "(") |
233 p.indent++ | 233 p.indent++ |
234 return p | 234 return p |
235 } | 235 } |
236 | 236 |
237 | 237 |
238 ÿ// Usage pattern: defer un(trace(p, "..."));ÿ | 238 // Usage pattern: defer un(trace(p, "...")); |
239 func un(p *parser) { | 239 func un(p *parser) { |
240 p.indent-- | 240 p.indent-- |
241 p.printTrace(")") | 241 p.printTrace(")") |
242 } | 242 } |
243 | 243 |
244 | 244 |
245 ÿ// Advance to the next token.ÿ | 245 // Advance to the next token. |
246 func (p *parser) next0() { | 246 func (p *parser) next0() { |
247 » ÿ// Because of one-token look-ahead, print the previous tokenÿ | 247 » // Because of one-token look-ahead, print the previous token |
248 » ÿ// when tracing as it provides a more readable output. Theÿ | 248 » // when tracing as it provides a more readable output. The |
249 » ÿ// very first token (!p.pos.IsValid()) is not initializedÿ | 249 » // very first token (!p.pos.IsValid()) is not initialized |
250 » ÿ// (it is token.ILLEGAL), so don't print it .ÿ | 250 » // (it is token.ILLEGAL), so don't print it . |
251 if p.trace && p.pos.IsValid() { | 251 if p.trace && p.pos.IsValid() { |
252 s := p.tok.String() | 252 s := p.tok.String() |
253 switch { | 253 switch { |
254 case p.tok.IsLiteral(): | 254 case p.tok.IsLiteral(): |
255 p.printTrace(s, p.lit) | 255 p.printTrace(s, p.lit) |
256 case p.tok.IsOperator(), p.tok.IsKeyword(): | 256 case p.tok.IsOperator(), p.tok.IsKeyword(): |
257 p.printTrace("\"" + s + "\"") | 257 p.printTrace("\"" + s + "\"") |
258 default: | 258 default: |
259 p.printTrace(s) | 259 p.printTrace(s) |
260 } | 260 } |
261 } | 261 } |
262 | 262 |
263 p.pos, p.tok, p.lit = p.scanner.Scan() | 263 p.pos, p.tok, p.lit = p.scanner.Scan() |
264 } | 264 } |
265 | 265 |
266 ÿ// Consume a comment and return it and the line on which it ends.ÿ | 266 // Consume a comment and return it and the line on which it ends. |
267 func (p *parser) consumeComment() (comment *ast.Comment, endline int) { | 267 func (p *parser) consumeComment() (comment *ast.Comment, endline int) { |
268 » ÿ// /*-style comments may end on a different line than where they start.
ÿ | 268 » // /*-style comments may end on a different line than where they start. |
269 » ÿ// Scan the comment for '\n' chars and adjust endline accordingly.ÿ | 269 » // Scan the comment for '\n' chars and adjust endline accordingly. |
270 endline = p.file.Line(p.pos) | 270 endline = p.file.Line(p.pos) |
271 if p.lit[1] == '*' { | 271 if p.lit[1] == '*' { |
272 » » ÿ// don't use range here - no need to decode Unicode code points
ÿ | 272 » » // don't use range here - no need to decode Unicode code points |
273 for i := 0; i < len(p.lit); i++ { | 273 for i := 0; i < len(p.lit); i++ { |
274 if p.lit[i] == '\n' { | 274 if p.lit[i] == '\n' { |
275 endline++ | 275 endline++ |
276 } | 276 } |
277 } | 277 } |
278 } | 278 } |
279 | 279 |
280 comment = &ast.Comment{p.pos, p.lit} | 280 comment = &ast.Comment{p.pos, p.lit} |
281 p.next0() | 281 p.next0() |
282 | 282 |
283 return | 283 return |
284 } | 284 } |
285 | 285 |
286 | 286 |
287 ÿ// Consume a group of adjacent comments, add it to the parser'sÿ | 287 // Consume a group of adjacent comments, add it to the parser's |
288 ÿ// comments list, and return it together with the line at whichÿ | 288 // comments list, and return it together with the line at which |
289 ÿ// the last comment in the group ends. An empty line or non-commentÿ | 289 // the last comment in the group ends. An empty line or non-comment |
290 ÿ// token terminates a comment group.ÿ | 290 // token terminates a comment group. |
291 ÿ//ÿ | 291 // |
292 func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int)
{ | 292 func (p *parser) consumeCommentGroup() (comments *ast.CommentGroup, endline int)
{ |
293 var list []*ast.Comment | 293 var list []*ast.Comment |
294 endline = p.file.Line(p.pos) | 294 endline = p.file.Line(p.pos) |
295 for p.tok == token.COMMENT && endline+1 >= p.file.Line(p.pos) { | 295 for p.tok == token.COMMENT && endline+1 >= p.file.Line(p.pos) { |
296 var comment *ast.Comment | 296 var comment *ast.Comment |
297 comment, endline = p.consumeComment() | 297 comment, endline = p.consumeComment() |
298 list = append(list, comment) | 298 list = append(list, comment) |
299 } | 299 } |
300 | 300 |
301 » ÿ// add comment group to the comments listÿ | 301 » // add comment group to the comments list |
302 comments = &ast.CommentGroup{list} | 302 comments = &ast.CommentGroup{list} |
303 p.comments = append(p.comments, comments) | 303 p.comments = append(p.comments, comments) |
304 | 304 |
305 return | 305 return |
306 } | 306 } |
307 | 307 |
308 | 308 |
309 ÿ// Advance to the next non-comment token. In the process, collectÿ | 309 // Advance to the next non-comment token. In the process, collect |
310 ÿ// any comment groups encountered, and remember the last lead andÿ | 310 // any comment groups encountered, and remember the last lead and |
311 ÿ// and line comments.ÿ | 311 // and line comments. |
312 ÿ//ÿ | 312 // |
313 ÿ// A lead comment is a comment group that starts and ends in aÿ | 313 // A lead comment is a comment group that starts and ends in a |
314 ÿ// line without any other tokens and that is followed by a non-commentÿ | 314 // line without any other tokens and that is followed by a non-comment |
315 ÿ// token on the line immediately after the comment group.ÿ | 315 // token on the line immediately after the comment group. |
316 ÿ//ÿ | 316 // |
317 ÿ// A line comment is a comment group that follows a non-commentÿ | 317 // A line comment is a comment group that follows a non-comment |
318 ÿ// token on the same line, and that has no tokens after it on the lineÿ | 318 // token on the same line, and that has no tokens after it on the line |
319 ÿ// where it ends.ÿ | 319 // where it ends. |
320 ÿ//ÿ | 320 // |
321 ÿ// Lead and line comments may be considered documentation that isÿ | 321 // Lead and line comments may be considered documentation that is |
322 ÿ// stored in the AST.ÿ | 322 // stored in the AST. |
323 ÿ//ÿ | 323 // |
324 func (p *parser) next() { | 324 func (p *parser) next() { |
325 p.leadComment = nil | 325 p.leadComment = nil |
326 p.lineComment = nil | 326 p.lineComment = nil |
327 » line := p.file.Line(p.pos) ÿ// current lineÿ | 327 » line := p.file.Line(p.pos) // current line |
328 p.next0() | 328 p.next0() |
329 | 329 |
330 if p.tok == token.COMMENT { | 330 if p.tok == token.COMMENT { |
331 var comment *ast.CommentGroup | 331 var comment *ast.CommentGroup |
332 var endline int | 332 var endline int |
333 | 333 |
334 if p.file.Line(p.pos) == line { | 334 if p.file.Line(p.pos) == line { |
335 » » » ÿ// The comment is on same line as previous token; itÿ | 335 » » » // The comment is on same line as previous token; it |
336 » » » ÿ// cannot be a lead comment but may be a line comment.ÿ | 336 » » » // cannot be a lead comment but may be a line comment. |
337 comment, endline = p.consumeCommentGroup() | 337 comment, endline = p.consumeCommentGroup() |
338 if p.file.Line(p.pos) != endline { | 338 if p.file.Line(p.pos) != endline { |
339 » » » » ÿ// The next token is on a different line, thusÿ | 339 » » » » // The next token is on a different line, thus |
340 » » » » ÿ// the last comment group is a line comment.ÿ | 340 » » » » // the last comment group is a line comment. |
341 p.lineComment = comment | 341 p.lineComment = comment |
342 } | 342 } |
343 } | 343 } |
344 | 344 |
345 » » ÿ// consume successor comments, if anyÿ | 345 » » // consume successor comments, if any |
346 endline = -1 | 346 endline = -1 |
347 for p.tok == token.COMMENT { | 347 for p.tok == token.COMMENT { |
348 comment, endline = p.consumeCommentGroup() | 348 comment, endline = p.consumeCommentGroup() |
349 } | 349 } |
350 | 350 |
351 if endline+1 == p.file.Line(p.pos) { | 351 if endline+1 == p.file.Line(p.pos) { |
352 » » » ÿ// The next token is following on the line immediately
after theÿ | 352 » » » // The next token is following on the line immediately a
fter the |
353 » » » ÿ// comment group, thus the last comment group is a lead
comment.ÿ | 353 » » » // comment group, thus the last comment group is a lead
comment. |
354 p.leadComment = comment | 354 p.leadComment = comment |
355 } | 355 } |
356 } | 356 } |
357 } | 357 } |
358 | 358 |
359 | 359 |
360 func (p *parser) error(pos token.Pos, msg string) { | 360 func (p *parser) error(pos token.Pos, msg string) { |
361 p.Error(p.file.Position(pos), msg) | 361 p.Error(p.file.Position(pos), msg) |
362 } | 362 } |
363 | 363 |
364 | 364 |
365 func (p *parser) errorExpected(pos token.Pos, msg string) { | 365 func (p *parser) errorExpected(pos token.Pos, msg string) { |
366 msg = "expected " + msg | 366 msg = "expected " + msg |
367 if pos == p.pos { | 367 if pos == p.pos { |
368 » » ÿ// the error happened at the current position;ÿ | 368 » » // the error happened at the current position; |
369 » » ÿ// make the error message more specificÿ | 369 » » // make the error message more specific |
370 if p.tok == token.SEMICOLON && p.lit[0] == '\n' { | 370 if p.tok == token.SEMICOLON && p.lit[0] == '\n' { |
371 msg += ", found newline" | 371 msg += ", found newline" |
372 } else { | 372 } else { |
373 msg += ", found '" + p.tok.String() + "'" | 373 msg += ", found '" + p.tok.String() + "'" |
374 if p.tok.IsLiteral() { | 374 if p.tok.IsLiteral() { |
375 msg += " " + p.lit | 375 msg += " " + p.lit |
376 } | 376 } |
377 } | 377 } |
378 } | 378 } |
379 p.error(pos, msg) | 379 p.error(pos, msg) |
380 } | 380 } |
381 | 381 |
382 | 382 |
383 func (p *parser) expect(tok token.Token) token.Pos { | 383 func (p *parser) expect(tok token.Token) token.Pos { |
384 pos := p.pos | 384 pos := p.pos |
385 if p.tok != tok { | 385 if p.tok != tok { |
386 p.errorExpected(pos, "'"+tok.String()+"'") | 386 p.errorExpected(pos, "'"+tok.String()+"'") |
387 } | 387 } |
388 » p.next() ÿ// make progressÿ | 388 » p.next() // make progress |
389 return pos | 389 return pos |
390 } | 390 } |
391 | 391 |
392 | 392 |
393 func (p *parser) expectSemi() { | 393 func (p *parser) expectSemi() { |
394 if p.tok != token.RPAREN && p.tok != token.RBRACE { | 394 if p.tok != token.RPAREN && p.tok != token.RBRACE { |
395 p.expect(token.SEMICOLON) | 395 p.expect(token.SEMICOLON) |
396 } | 396 } |
397 } | 397 } |
398 | 398 |
399 | 399 |
400 func assert(cond bool, msg string) { | 400 func assert(cond bool, msg string) { |
401 if !cond { | 401 if !cond { |
402 panic("go/parser internal error: " + msg) | 402 panic("go/parser internal error: " + msg) |
403 } | 403 } |
404 } | 404 } |
405 | 405 |
406 | 406 |
407 ÿ// ----------------------------------------------------------------------------
ÿ | 407 // ---------------------------------------------------------------------------- |
408 ÿ// Identifiersÿ | 408 // Identifiers |
409 | 409 |
410 func (p *parser) parseIdent() *ast.Ident { | 410 func (p *parser) parseIdent() *ast.Ident { |
411 pos := p.pos | 411 pos := p.pos |
412 name := "_" | 412 name := "_" |
413 if p.tok == token.IDENT { | 413 if p.tok == token.IDENT { |
414 name = p.lit | 414 name = p.lit |
415 p.next() | 415 p.next() |
416 } else { | 416 } else { |
417 » » p.expect(token.IDENT) ÿ// use expect() error handlingÿ | 417 » » p.expect(token.IDENT) // use expect() error handling |
418 } | 418 } |
419 return &ast.Ident{pos, name, nil} | 419 return &ast.Ident{pos, name, nil} |
420 } | 420 } |
421 | 421 |
422 | 422 |
423 func (p *parser) parseIdentList() (list []*ast.Ident) { | 423 func (p *parser) parseIdentList() (list []*ast.Ident) { |
424 if p.trace { | 424 if p.trace { |
425 defer un(trace(p, "IdentList")) | 425 defer un(trace(p, "IdentList")) |
426 } | 426 } |
427 | 427 |
428 list = append(list, p.parseIdent()) | 428 list = append(list, p.parseIdent()) |
429 for p.tok == token.COMMA { | 429 for p.tok == token.COMMA { |
430 p.next() | 430 p.next() |
431 list = append(list, p.parseIdent()) | 431 list = append(list, p.parseIdent()) |
432 } | 432 } |
433 | 433 |
434 return | 434 return |
435 } | 435 } |
436 | 436 |
437 | 437 |
438 ÿ// ----------------------------------------------------------------------------
ÿ | 438 // ---------------------------------------------------------------------------- |
439 ÿ// Common productionsÿ | 439 // Common productions |
440 | 440 |
441 ÿ// If lhs is set, result list elements which are identifiers are not resolved.ÿ | 441 // If lhs is set, result list elements which are identifiers are not resolved. |
442 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) { | 442 func (p *parser) parseExprList(lhs bool) (list []ast.Expr) { |
443 if p.trace { | 443 if p.trace { |
444 defer un(trace(p, "ExpressionList")) | 444 defer un(trace(p, "ExpressionList")) |
445 } | 445 } |
446 | 446 |
447 list = append(list, p.parseExpr(lhs)) | 447 list = append(list, p.parseExpr(lhs)) |
448 for p.tok == token.COMMA { | 448 for p.tok == token.COMMA { |
449 p.next() | 449 p.next() |
450 list = append(list, p.parseExpr(lhs)) | 450 list = append(list, p.parseExpr(lhs)) |
451 } | 451 } |
452 | 452 |
453 return | 453 return |
454 } | 454 } |
455 | 455 |
456 | 456 |
457 func (p *parser) parseLhsList() []ast.Expr { | 457 func (p *parser) parseLhsList() []ast.Expr { |
458 list := p.parseExprList(true) | 458 list := p.parseExprList(true) |
459 switch p.tok { | 459 switch p.tok { |
460 case token.DEFINE: | 460 case token.DEFINE: |
461 » » ÿ// lhs of a short variable declarationÿ | 461 » » // lhs of a short variable declaration |
462 p.shortVarDecl(p.makeIdentList(list)) | 462 p.shortVarDecl(p.makeIdentList(list)) |
463 case token.COLON: | 463 case token.COLON: |
464 » » ÿ// lhs of a label declaration or a communication clause of a se
lectÿ | 464 » » // lhs of a label declaration or a communication clause of a sel
ect |
465 » » ÿ// statement (parseLhsList is not called when parsing the case
clauseÿ | 465 » » // statement (parseLhsList is not called when parsing the case c
lause |
466 » » ÿ// of a switch statement):ÿ | 466 » » // of a switch statement): |
467 » » ÿ// - labels are declared by the caller of parseLhsListÿ | 467 » » // - labels are declared by the caller of parseLhsList |
468 » » ÿ// - for communication clauses, if there is a stand-alone ident
ifierÿ | 468 » » // - for communication clauses, if there is a stand-alone identi
fier |
469 » » ÿ// followed by a colon, we have a syntax error; there is no n
eedÿ | 469 » » // followed by a colon, we have a syntax error; there is no ne
ed |
470 » » ÿ// to resolve the identifier in that caseÿ | 470 » » // to resolve the identifier in that case |
471 default: | 471 default: |
472 » » ÿ// identifiers must be declared elsewhereÿ | 472 » » // identifiers must be declared elsewhere |
473 for _, x := range list { | 473 for _, x := range list { |
474 p.resolve(x) | 474 p.resolve(x) |
475 } | 475 } |
476 } | 476 } |
477 return list | 477 return list |
478 } | 478 } |
479 | 479 |
480 | 480 |
481 func (p *parser) parseRhsList() []ast.Expr { | 481 func (p *parser) parseRhsList() []ast.Expr { |
482 return p.parseExprList(false) | 482 return p.parseExprList(false) |
483 } | 483 } |
484 | 484 |
485 | 485 |
486 ÿ// ----------------------------------------------------------------------------
ÿ | 486 // ---------------------------------------------------------------------------- |
487 ÿ// Typesÿ | 487 // Types |
488 | 488 |
489 func (p *parser) parseType() ast.Expr { | 489 func (p *parser) parseType() ast.Expr { |
490 if p.trace { | 490 if p.trace { |
491 defer un(trace(p, "Type")) | 491 defer un(trace(p, "Type")) |
492 } | 492 } |
493 | 493 |
494 typ := p.tryType() | 494 typ := p.tryType() |
495 | 495 |
496 if typ == nil { | 496 if typ == nil { |
497 pos := p.pos | 497 pos := p.pos |
498 p.errorExpected(pos, "type") | 498 p.errorExpected(pos, "type") |
499 » » p.next() ÿ// make progressÿ | 499 » » p.next() // make progress |
500 return &ast.BadExpr{pos, p.pos} | 500 return &ast.BadExpr{pos, p.pos} |
501 } | 501 } |
502 | 502 |
503 return typ | 503 return typ |
504 } | 504 } |
505 | 505 |
506 | 506 |
507 ÿ// If the result is an identifier, it is not resolved.ÿ | 507 // If the result is an identifier, it is not resolved. |
508 func (p *parser) parseTypeName() ast.Expr { | 508 func (p *parser) parseTypeName() ast.Expr { |
509 if p.trace { | 509 if p.trace { |
510 defer un(trace(p, "TypeName")) | 510 defer un(trace(p, "TypeName")) |
511 } | 511 } |
512 | 512 |
513 ident := p.parseIdent() | 513 ident := p.parseIdent() |
514 » ÿ// don't resolve ident yet - it may be a parameter or field nameÿ | 514 » // don't resolve ident yet - it may be a parameter or field name |
515 | 515 |
516 if p.tok == token.PERIOD { | 516 if p.tok == token.PERIOD { |
517 » » ÿ// ident is a package nameÿ | 517 » » // ident is a package name |
518 p.next() | 518 p.next() |
519 p.resolve(ident) | 519 p.resolve(ident) |
520 sel := p.parseIdent() | 520 sel := p.parseIdent() |
521 return &ast.SelectorExpr{ident, sel} | 521 return &ast.SelectorExpr{ident, sel} |
522 } | 522 } |
523 | 523 |
524 return ident | 524 return ident |
525 } | 525 } |
526 | 526 |
527 | 527 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
560 } | 560 } |
561 | 561 |
562 | 562 |
563 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field { | 563 func (p *parser) parseFieldDecl(scope *ast.Scope) *ast.Field { |
564 if p.trace { | 564 if p.trace { |
565 defer un(trace(p, "FieldDecl")) | 565 defer un(trace(p, "FieldDecl")) |
566 } | 566 } |
567 | 567 |
568 doc := p.leadComment | 568 doc := p.leadComment |
569 | 569 |
570 » ÿ// fieldsÿ | 570 » // fields |
571 list, typ := p.parseVarList(false) | 571 list, typ := p.parseVarList(false) |
572 | 572 |
573 » ÿ// optional tagÿ | 573 » // optional tag |
574 var tag *ast.BasicLit | 574 var tag *ast.BasicLit |
575 if p.tok == token.STRING { | 575 if p.tok == token.STRING { |
576 tag = &ast.BasicLit{p.pos, p.tok, p.lit} | 576 tag = &ast.BasicLit{p.pos, p.tok, p.lit} |
577 p.next() | 577 p.next() |
578 } | 578 } |
579 | 579 |
580 » ÿ// analyze caseÿ | 580 » // analyze case |
581 var idents []*ast.Ident | 581 var idents []*ast.Ident |
582 if typ != nil { | 582 if typ != nil { |
583 » » ÿ// IdentifierList Typeÿ | 583 » » // IdentifierList Type |
584 idents = p.makeIdentList(list) | 584 idents = p.makeIdentList(list) |
585 } else { | 585 } else { |
586 » » ÿ// ["*"] TypeName (AnonymousField)ÿ | 586 » » // ["*"] TypeName (AnonymousField) |
587 » » typ = list[0] ÿ// we always have at least one elementÿ | 587 » » typ = list[0] // we always have at least one element |
588 p.resolve(typ) | 588 p.resolve(typ) |
589 if n := len(list); n > 1 || !isTypeName(deref(typ)) { | 589 if n := len(list); n > 1 || !isTypeName(deref(typ)) { |
590 pos := typ.Pos() | 590 pos := typ.Pos() |
591 p.errorExpected(pos, "anonymous field") | 591 p.errorExpected(pos, "anonymous field") |
592 typ = &ast.BadExpr{pos, list[n-1].End()} | 592 typ = &ast.BadExpr{pos, list[n-1].End()} |
593 } | 593 } |
594 } | 594 } |
595 | 595 |
596 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 596 » p.expectSemi() // call before accessing p.linecomment |
597 | 597 |
598 field := &ast.Field{doc, idents, typ, tag, p.lineComment} | 598 field := &ast.Field{doc, idents, typ, tag, p.lineComment} |
599 p.declare(field, scope, ast.Var, idents...) | 599 p.declare(field, scope, ast.Var, idents...) |
600 | 600 |
601 return field | 601 return field |
602 } | 602 } |
603 | 603 |
604 | 604 |
605 func (p *parser) parseStructType() *ast.StructType { | 605 func (p *parser) parseStructType() *ast.StructType { |
606 if p.trace { | 606 if p.trace { |
607 defer un(trace(p, "StructType")) | 607 defer un(trace(p, "StructType")) |
608 } | 608 } |
609 | 609 |
610 pos := p.expect(token.STRUCT) | 610 pos := p.expect(token.STRUCT) |
611 lbrace := p.expect(token.LBRACE) | 611 lbrace := p.expect(token.LBRACE) |
612 » scope := ast.NewScope(nil) ÿ// struct scopeÿ | 612 » scope := ast.NewScope(nil) // struct scope |
613 var list []*ast.Field | 613 var list []*ast.Field |
614 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN
{ | 614 for p.tok == token.IDENT || p.tok == token.MUL || p.tok == token.LPAREN
{ |
615 » » ÿ// a field declaration cannot start with a '(' but we acceptÿ | 615 » » // a field declaration cannot start with a '(' but we accept |
616 » » ÿ// it here for more robust parsing and better error messagesÿ | 616 » » // it here for more robust parsing and better error messages |
617 » » ÿ// (parseFieldDecl will check and complain if necessary)ÿ | 617 » » // (parseFieldDecl will check and complain if necessary) |
618 list = append(list, p.parseFieldDecl(scope)) | 618 list = append(list, p.parseFieldDecl(scope)) |
619 } | 619 } |
620 rbrace := p.expect(token.RBRACE) | 620 rbrace := p.expect(token.RBRACE) |
621 | 621 |
622 » ÿ// TODO(gri): store struct scope in ASTÿ | 622 » // TODO(gri): store struct scope in AST |
623 return &ast.StructType{pos, &ast.FieldList{lbrace, list, rbrace}, false} | 623 return &ast.StructType{pos, &ast.FieldList{lbrace, list, rbrace}, false} |
624 } | 624 } |
625 | 625 |
626 | 626 |
627 func (p *parser) parsePointerType() *ast.StarExpr { | 627 func (p *parser) parsePointerType() *ast.StarExpr { |
628 if p.trace { | 628 if p.trace { |
629 defer un(trace(p, "PointerType")) | 629 defer un(trace(p, "PointerType")) |
630 } | 630 } |
631 | 631 |
632 star := p.expect(token.MUL) | 632 star := p.expect(token.MUL) |
633 base := p.parseType() | 633 base := p.parseType() |
634 | 634 |
635 return &ast.StarExpr{star, base} | 635 return &ast.StarExpr{star, base} |
636 } | 636 } |
637 | 637 |
638 | 638 |
639 func (p *parser) tryVarType(isParam bool) ast.Expr { | 639 func (p *parser) tryVarType(isParam bool) ast.Expr { |
640 if isParam && p.tok == token.ELLIPSIS { | 640 if isParam && p.tok == token.ELLIPSIS { |
641 pos := p.pos | 641 pos := p.pos |
642 p.next() | 642 p.next() |
643 » » typ := p.tryIdentOrType(isParam) ÿ// don't use parseType so we c
an provide better error messageÿ | 643 » » typ := p.tryIdentOrType(isParam) // don't use parseType so we ca
n provide better error message |
644 if typ == nil { | 644 if typ == nil { |
645 p.error(pos, "'...' parameter is missing type") | 645 p.error(pos, "'...' parameter is missing type") |
646 typ = &ast.BadExpr{pos, p.pos} | 646 typ = &ast.BadExpr{pos, p.pos} |
647 } | 647 } |
648 if p.tok != token.RPAREN { | 648 if p.tok != token.RPAREN { |
649 p.error(pos, "can use '...' with last parameter type onl
y") | 649 p.error(pos, "can use '...' with last parameter type onl
y") |
650 } | 650 } |
651 return &ast.Ellipsis{pos, typ} | 651 return &ast.Ellipsis{pos, typ} |
652 } | 652 } |
653 return p.tryIdentOrType(false) | 653 return p.tryIdentOrType(false) |
654 } | 654 } |
655 | 655 |
656 | 656 |
657 func (p *parser) parseVarType(isParam bool) ast.Expr { | 657 func (p *parser) parseVarType(isParam bool) ast.Expr { |
658 typ := p.tryVarType(isParam) | 658 typ := p.tryVarType(isParam) |
659 if typ == nil { | 659 if typ == nil { |
660 pos := p.pos | 660 pos := p.pos |
661 p.errorExpected(pos, "type") | 661 p.errorExpected(pos, "type") |
662 » » p.next() ÿ// make progressÿ | 662 » » p.next() // make progress |
663 typ = &ast.BadExpr{pos, p.pos} | 663 typ = &ast.BadExpr{pos, p.pos} |
664 } | 664 } |
665 return typ | 665 return typ |
666 } | 666 } |
667 | 667 |
668 | 668 |
669 func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) { | 669 func (p *parser) parseVarList(isParam bool) (list []ast.Expr, typ ast.Expr) { |
670 if p.trace { | 670 if p.trace { |
671 defer un(trace(p, "VarList")) | 671 defer un(trace(p, "VarList")) |
672 } | 672 } |
673 | 673 |
674 » ÿ// a list of identifiers looks like a list of type namesÿ | 674 » // a list of identifiers looks like a list of type names |
675 for { | 675 for { |
676 » » ÿ// parseVarType accepts any type (including parenthesized ones)
ÿ | 676 » » // parseVarType accepts any type (including parenthesized ones) |
677 » » ÿ// even though the syntax does not permit them here: weÿ | 677 » » // even though the syntax does not permit them here: we |
678 » » ÿ// accept them all for more robust parsing and complainÿ | 678 » » // accept them all for more robust parsing and complain |
679 » » ÿ// afterwardsÿ | 679 » » // afterwards |
680 list = append(list, p.parseVarType(isParam)) | 680 list = append(list, p.parseVarType(isParam)) |
681 if p.tok != token.COMMA { | 681 if p.tok != token.COMMA { |
682 break | 682 break |
683 } | 683 } |
684 p.next() | 684 p.next() |
685 } | 685 } |
686 | 686 |
687 » ÿ// if we had a list of identifiers, it must be followed by a typeÿ | 687 » // if we had a list of identifiers, it must be followed by a type |
688 typ = p.tryVarType(isParam) | 688 typ = p.tryVarType(isParam) |
689 if typ != nil { | 689 if typ != nil { |
690 p.resolve(typ) | 690 p.resolve(typ) |
691 } | 691 } |
692 | 692 |
693 return | 693 return |
694 } | 694 } |
695 | 695 |
696 | 696 |
697 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
]*ast.Field) { | 697 func (p *parser) parseParameterList(scope *ast.Scope, ellipsisOk bool) (params [
]*ast.Field) { |
698 if p.trace { | 698 if p.trace { |
699 defer un(trace(p, "ParameterList")) | 699 defer un(trace(p, "ParameterList")) |
700 } | 700 } |
701 | 701 |
702 list, typ := p.parseVarList(ellipsisOk) | 702 list, typ := p.parseVarList(ellipsisOk) |
703 if typ != nil { | 703 if typ != nil { |
704 » » ÿ// IdentifierList Typeÿ | 704 » » // IdentifierList Type |
705 idents := p.makeIdentList(list) | 705 idents := p.makeIdentList(list) |
706 field := &ast.Field{nil, idents, typ, nil, nil} | 706 field := &ast.Field{nil, idents, typ, nil, nil} |
707 params = append(params, field) | 707 params = append(params, field) |
708 » » ÿ// Go spec: The scope of an identifier denoting a functionÿ | 708 » » // Go spec: The scope of an identifier denoting a function |
709 » » ÿ// parameter or result variable is the function body.ÿ | 709 » » // parameter or result variable is the function body. |
710 p.declare(field, scope, ast.Var, idents...) | 710 p.declare(field, scope, ast.Var, idents...) |
711 if p.tok == token.COMMA { | 711 if p.tok == token.COMMA { |
712 p.next() | 712 p.next() |
713 } | 713 } |
714 | 714 |
715 for p.tok != token.RPAREN && p.tok != token.EOF { | 715 for p.tok != token.RPAREN && p.tok != token.EOF { |
716 idents := p.parseIdentList() | 716 idents := p.parseIdentList() |
717 typ := p.parseVarType(ellipsisOk) | 717 typ := p.parseVarType(ellipsisOk) |
718 field := &ast.Field{nil, idents, typ, nil, nil} | 718 field := &ast.Field{nil, idents, typ, nil, nil} |
719 params = append(params, field) | 719 params = append(params, field) |
720 » » » ÿ// Go spec: The scope of an identifier denoting a funct
ionÿ | 720 » » » // Go spec: The scope of an identifier denoting a functi
on |
721 » » » ÿ// parameter or result variable is the function body.ÿ | 721 » » » // parameter or result variable is the function body. |
722 p.declare(field, scope, ast.Var, idents...) | 722 p.declare(field, scope, ast.Var, idents...) |
723 if p.tok != token.COMMA { | 723 if p.tok != token.COMMA { |
724 break | 724 break |
725 } | 725 } |
726 p.next() | 726 p.next() |
727 } | 727 } |
728 | 728 |
729 } else { | 729 } else { |
730 » » ÿ// Type { "," Type } (anonymous parameters)ÿ | 730 » » // Type { "," Type } (anonymous parameters) |
731 params = make([]*ast.Field, len(list)) | 731 params = make([]*ast.Field, len(list)) |
732 for i, x := range list { | 732 for i, x := range list { |
733 p.resolve(x) | 733 p.resolve(x) |
734 params[i] = &ast.Field{Type: x} | 734 params[i] = &ast.Field{Type: x} |
735 } | 735 } |
736 } | 736 } |
737 | 737 |
738 return | 738 return |
739 } | 739 } |
740 | 740 |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
786 return | 786 return |
787 } | 787 } |
788 | 788 |
789 | 789 |
790 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) { | 790 func (p *parser) parseFuncType() (*ast.FuncType, *ast.Scope) { |
791 if p.trace { | 791 if p.trace { |
792 defer un(trace(p, "FuncType")) | 792 defer un(trace(p, "FuncType")) |
793 } | 793 } |
794 | 794 |
795 pos := p.expect(token.FUNC) | 795 pos := p.expect(token.FUNC) |
796 » scope := ast.NewScope(p.topScope) ÿ// function scopeÿ | 796 » scope := ast.NewScope(p.topScope) // function scope |
797 params, results := p.parseSignature(scope) | 797 params, results := p.parseSignature(scope) |
798 | 798 |
799 return &ast.FuncType{pos, params, results}, scope | 799 return &ast.FuncType{pos, params, results}, scope |
800 } | 800 } |
801 | 801 |
802 | 802 |
803 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field { | 803 func (p *parser) parseMethodSpec(scope *ast.Scope) *ast.Field { |
804 if p.trace { | 804 if p.trace { |
805 defer un(trace(p, "MethodSpec")) | 805 defer un(trace(p, "MethodSpec")) |
806 } | 806 } |
807 | 807 |
808 doc := p.leadComment | 808 doc := p.leadComment |
809 var idents []*ast.Ident | 809 var idents []*ast.Ident |
810 var typ ast.Expr | 810 var typ ast.Expr |
811 x := p.parseTypeName() | 811 x := p.parseTypeName() |
812 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN { | 812 if ident, isIdent := x.(*ast.Ident); isIdent && p.tok == token.LPAREN { |
813 » » ÿ// methodÿ | 813 » » // method |
814 idents = []*ast.Ident{ident} | 814 idents = []*ast.Ident{ident} |
815 » » scope := ast.NewScope(nil) ÿ// method scopeÿ | 815 » » scope := ast.NewScope(nil) // method scope |
816 params, results := p.parseSignature(scope) | 816 params, results := p.parseSignature(scope) |
817 typ = &ast.FuncType{token.NoPos, params, results} | 817 typ = &ast.FuncType{token.NoPos, params, results} |
818 } else { | 818 } else { |
819 » » ÿ// embedded interfaceÿ | 819 » » // embedded interface |
820 typ = x | 820 typ = x |
821 } | 821 } |
822 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 822 » p.expectSemi() // call before accessing p.linecomment |
823 | 823 |
824 spec := &ast.Field{doc, idents, typ, nil, p.lineComment} | 824 spec := &ast.Field{doc, idents, typ, nil, p.lineComment} |
825 p.declare(spec, scope, ast.Fun, idents...) | 825 p.declare(spec, scope, ast.Fun, idents...) |
826 | 826 |
827 return spec | 827 return spec |
828 } | 828 } |
829 | 829 |
830 | 830 |
831 func (p *parser) parseInterfaceType() *ast.InterfaceType { | 831 func (p *parser) parseInterfaceType() *ast.InterfaceType { |
832 if p.trace { | 832 if p.trace { |
833 defer un(trace(p, "InterfaceType")) | 833 defer un(trace(p, "InterfaceType")) |
834 } | 834 } |
835 | 835 |
836 pos := p.expect(token.INTERFACE) | 836 pos := p.expect(token.INTERFACE) |
837 lbrace := p.expect(token.LBRACE) | 837 lbrace := p.expect(token.LBRACE) |
838 » scope := ast.NewScope(nil) ÿ// interface scopeÿ | 838 » scope := ast.NewScope(nil) // interface scope |
839 var list []*ast.Field | 839 var list []*ast.Field |
840 for p.tok == token.IDENT { | 840 for p.tok == token.IDENT { |
841 list = append(list, p.parseMethodSpec(scope)) | 841 list = append(list, p.parseMethodSpec(scope)) |
842 } | 842 } |
843 rbrace := p.expect(token.RBRACE) | 843 rbrace := p.expect(token.RBRACE) |
844 | 844 |
845 » ÿ// TODO(gri): store interface scope in ASTÿ | 845 » // TODO(gri): store interface scope in AST |
846 return &ast.InterfaceType{pos, &ast.FieldList{lbrace, list, rbrace}, fal
se} | 846 return &ast.InterfaceType{pos, &ast.FieldList{lbrace, list, rbrace}, fal
se} |
847 } | 847 } |
848 | 848 |
849 | 849 |
850 func (p *parser) parseMapType() *ast.MapType { | 850 func (p *parser) parseMapType() *ast.MapType { |
851 if p.trace { | 851 if p.trace { |
852 defer un(trace(p, "MapType")) | 852 defer un(trace(p, "MapType")) |
853 } | 853 } |
854 | 854 |
855 pos := p.expect(token.MAP) | 855 pos := p.expect(token.MAP) |
(...skipping 23 matching lines...) Expand all Loading... |
879 p.expect(token.ARROW) | 879 p.expect(token.ARROW) |
880 p.expect(token.CHAN) | 880 p.expect(token.CHAN) |
881 dir = ast.RECV | 881 dir = ast.RECV |
882 } | 882 } |
883 value := p.parseType() | 883 value := p.parseType() |
884 | 884 |
885 return &ast.ChanType{pos, dir, value} | 885 return &ast.ChanType{pos, dir, value} |
886 } | 886 } |
887 | 887 |
888 | 888 |
889 ÿ// If the result is an identifier, it is not resolved.ÿ | 889 // If the result is an identifier, it is not resolved. |
890 func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr { | 890 func (p *parser) tryIdentOrType(ellipsisOk bool) ast.Expr { |
891 switch p.tok { | 891 switch p.tok { |
892 case token.IDENT: | 892 case token.IDENT: |
893 return p.parseTypeName() | 893 return p.parseTypeName() |
894 case token.LBRACK: | 894 case token.LBRACK: |
895 return p.parseArrayType(ellipsisOk) | 895 return p.parseArrayType(ellipsisOk) |
896 case token.STRUCT: | 896 case token.STRUCT: |
897 return p.parseStructType() | 897 return p.parseStructType() |
898 case token.MUL: | 898 case token.MUL: |
899 return p.parsePointerType() | 899 return p.parsePointerType() |
900 case token.FUNC: | 900 case token.FUNC: |
901 typ, _ := p.parseFuncType() | 901 typ, _ := p.parseFuncType() |
902 return typ | 902 return typ |
903 case token.INTERFACE: | 903 case token.INTERFACE: |
904 return p.parseInterfaceType() | 904 return p.parseInterfaceType() |
905 case token.MAP: | 905 case token.MAP: |
906 return p.parseMapType() | 906 return p.parseMapType() |
907 case token.CHAN, token.ARROW: | 907 case token.CHAN, token.ARROW: |
908 return p.parseChanType() | 908 return p.parseChanType() |
909 case token.LPAREN: | 909 case token.LPAREN: |
910 lparen := p.pos | 910 lparen := p.pos |
911 p.next() | 911 p.next() |
912 typ := p.parseType() | 912 typ := p.parseType() |
913 rparen := p.expect(token.RPAREN) | 913 rparen := p.expect(token.RPAREN) |
914 return &ast.ParenExpr{lparen, typ, rparen} | 914 return &ast.ParenExpr{lparen, typ, rparen} |
915 } | 915 } |
916 | 916 |
917 » ÿ// no type foundÿ | 917 » // no type found |
918 return nil | 918 return nil |
919 } | 919 } |
920 | 920 |
921 | 921 |
922 func (p *parser) tryType() ast.Expr { | 922 func (p *parser) tryType() ast.Expr { |
923 typ := p.tryIdentOrType(false) | 923 typ := p.tryIdentOrType(false) |
924 if typ != nil { | 924 if typ != nil { |
925 p.resolve(typ) | 925 p.resolve(typ) |
926 } | 926 } |
927 return typ | 927 return typ |
928 } | 928 } |
929 | 929 |
930 | 930 |
931 ÿ// ----------------------------------------------------------------------------
ÿ | 931 // ---------------------------------------------------------------------------- |
932 ÿ// Blocksÿ | 932 // Blocks |
933 | 933 |
934 func (p *parser) parseStmtList() (list []ast.Stmt) { | 934 func (p *parser) parseStmtList() (list []ast.Stmt) { |
935 if p.trace { | 935 if p.trace { |
936 defer un(trace(p, "StatementList")) | 936 defer un(trace(p, "StatementList")) |
937 } | 937 } |
938 | 938 |
939 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRA
CE && p.tok != token.EOF { | 939 for p.tok != token.CASE && p.tok != token.DEFAULT && p.tok != token.RBRA
CE && p.tok != token.EOF { |
940 list = append(list, p.parseStmt()) | 940 list = append(list, p.parseStmt()) |
941 } | 941 } |
942 | 942 |
943 return | 943 return |
944 } | 944 } |
945 | 945 |
946 | 946 |
947 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt { | 947 func (p *parser) parseBody(scope *ast.Scope) *ast.BlockStmt { |
948 if p.trace { | 948 if p.trace { |
949 defer un(trace(p, "Body")) | 949 defer un(trace(p, "Body")) |
950 } | 950 } |
951 | 951 |
952 lbrace := p.expect(token.LBRACE) | 952 lbrace := p.expect(token.LBRACE) |
953 » p.topScope = scope ÿ// open function scopeÿ | 953 » p.topScope = scope // open function scope |
954 p.openLabelScope() | 954 p.openLabelScope() |
955 list := p.parseStmtList() | 955 list := p.parseStmtList() |
956 p.closeLabelScope() | 956 p.closeLabelScope() |
957 p.closeScope() | 957 p.closeScope() |
958 rbrace := p.expect(token.RBRACE) | 958 rbrace := p.expect(token.RBRACE) |
959 | 959 |
960 return &ast.BlockStmt{lbrace, list, rbrace} | 960 return &ast.BlockStmt{lbrace, list, rbrace} |
961 } | 961 } |
962 | 962 |
963 | 963 |
964 func (p *parser) parseBlockStmt() *ast.BlockStmt { | 964 func (p *parser) parseBlockStmt() *ast.BlockStmt { |
965 if p.trace { | 965 if p.trace { |
966 defer un(trace(p, "BlockStmt")) | 966 defer un(trace(p, "BlockStmt")) |
967 } | 967 } |
968 | 968 |
969 lbrace := p.expect(token.LBRACE) | 969 lbrace := p.expect(token.LBRACE) |
970 p.openScope() | 970 p.openScope() |
971 list := p.parseStmtList() | 971 list := p.parseStmtList() |
972 p.closeScope() | 972 p.closeScope() |
973 rbrace := p.expect(token.RBRACE) | 973 rbrace := p.expect(token.RBRACE) |
974 | 974 |
975 return &ast.BlockStmt{lbrace, list, rbrace} | 975 return &ast.BlockStmt{lbrace, list, rbrace} |
976 } | 976 } |
977 | 977 |
978 | 978 |
979 ÿ// ----------------------------------------------------------------------------
ÿ | 979 // ---------------------------------------------------------------------------- |
980 ÿ// Expressionsÿ | 980 // Expressions |
981 | 981 |
982 func (p *parser) parseFuncTypeOrLit() ast.Expr { | 982 func (p *parser) parseFuncTypeOrLit() ast.Expr { |
983 if p.trace { | 983 if p.trace { |
984 defer un(trace(p, "FuncTypeOrLit")) | 984 defer un(trace(p, "FuncTypeOrLit")) |
985 } | 985 } |
986 | 986 |
987 typ, scope := p.parseFuncType() | 987 typ, scope := p.parseFuncType() |
988 if p.tok != token.LBRACE { | 988 if p.tok != token.LBRACE { |
989 » » ÿ// function type onlyÿ | 989 » » // function type only |
990 return typ | 990 return typ |
991 } | 991 } |
992 | 992 |
993 p.exprLev++ | 993 p.exprLev++ |
994 body := p.parseBody(scope) | 994 body := p.parseBody(scope) |
995 p.exprLev-- | 995 p.exprLev-- |
996 | 996 |
997 return &ast.FuncLit{typ, body} | 997 return &ast.FuncLit{typ, body} |
998 } | 998 } |
999 | 999 |
1000 | 1000 |
1001 ÿ// parseOperand may return an expression or a raw type (incl. arrayÿ | 1001 // parseOperand may return an expression or a raw type (incl. array |
1002 ÿ// types of the form [...]T. Callers must verify the result.ÿ | 1002 // types of the form [...]T. Callers must verify the result. |
1003 ÿ// If lhs is set and the result is an identifier, it is not resolved.ÿ | 1003 // If lhs is set and the result is an identifier, it is not resolved. |
1004 ÿ//ÿ | 1004 // |
1005 func (p *parser) parseOperand(lhs bool) ast.Expr { | 1005 func (p *parser) parseOperand(lhs bool) ast.Expr { |
1006 if p.trace { | 1006 if p.trace { |
1007 defer un(trace(p, "Operand")) | 1007 defer un(trace(p, "Operand")) |
1008 } | 1008 } |
1009 | 1009 |
1010 switch p.tok { | 1010 switch p.tok { |
1011 case token.IDENT: | 1011 case token.IDENT: |
1012 x := p.parseIdent() | 1012 x := p.parseIdent() |
1013 if !lhs { | 1013 if !lhs { |
1014 p.resolve(x) | 1014 p.resolve(x) |
(...skipping 12 matching lines...) Expand all Loading... |
1027 x := p.parseRhs() | 1027 x := p.parseRhs() |
1028 p.exprLev-- | 1028 p.exprLev-- |
1029 rparen := p.expect(token.RPAREN) | 1029 rparen := p.expect(token.RPAREN) |
1030 return &ast.ParenExpr{lparen, x, rparen} | 1030 return &ast.ParenExpr{lparen, x, rparen} |
1031 | 1031 |
1032 case token.FUNC: | 1032 case token.FUNC: |
1033 return p.parseFuncTypeOrLit() | 1033 return p.parseFuncTypeOrLit() |
1034 | 1034 |
1035 default: | 1035 default: |
1036 if typ := p.tryIdentOrType(true); typ != nil { | 1036 if typ := p.tryIdentOrType(true); typ != nil { |
1037 » » » ÿ// could be type for composite literal or conversionÿ | 1037 » » » // could be type for composite literal or conversion |
1038 _, isIdent := typ.(*ast.Ident) | 1038 _, isIdent := typ.(*ast.Ident) |
1039 assert(!isIdent, "type cannot be identifier") | 1039 assert(!isIdent, "type cannot be identifier") |
1040 return typ | 1040 return typ |
1041 } | 1041 } |
1042 } | 1042 } |
1043 | 1043 |
1044 pos := p.pos | 1044 pos := p.pos |
1045 p.errorExpected(pos, "operand") | 1045 p.errorExpected(pos, "operand") |
1046 » p.next() ÿ// make progressÿ | 1046 » p.next() // make progress |
1047 return &ast.BadExpr{pos, p.pos} | 1047 return &ast.BadExpr{pos, p.pos} |
1048 } | 1048 } |
1049 | 1049 |
1050 | 1050 |
1051 func (p *parser) parseSelector(x ast.Expr) ast.Expr { | 1051 func (p *parser) parseSelector(x ast.Expr) ast.Expr { |
1052 if p.trace { | 1052 if p.trace { |
1053 defer un(trace(p, "Selector")) | 1053 defer un(trace(p, "Selector")) |
1054 } | 1054 } |
1055 | 1055 |
1056 sel := p.parseIdent() | 1056 sel := p.parseIdent() |
1057 | 1057 |
1058 return &ast.SelectorExpr{x, sel} | 1058 return &ast.SelectorExpr{x, sel} |
1059 } | 1059 } |
1060 | 1060 |
1061 | 1061 |
1062 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr { | 1062 func (p *parser) parseTypeAssertion(x ast.Expr) ast.Expr { |
1063 if p.trace { | 1063 if p.trace { |
1064 defer un(trace(p, "TypeAssertion")) | 1064 defer un(trace(p, "TypeAssertion")) |
1065 } | 1065 } |
1066 | 1066 |
1067 p.expect(token.LPAREN) | 1067 p.expect(token.LPAREN) |
1068 var typ ast.Expr | 1068 var typ ast.Expr |
1069 if p.tok == token.TYPE { | 1069 if p.tok == token.TYPE { |
1070 » » ÿ// type switch: typ == nilÿ | 1070 » » // type switch: typ == nil |
1071 p.next() | 1071 p.next() |
1072 } else { | 1072 } else { |
1073 typ = p.parseType() | 1073 typ = p.parseType() |
1074 } | 1074 } |
1075 p.expect(token.RPAREN) | 1075 p.expect(token.RPAREN) |
1076 | 1076 |
1077 return &ast.TypeAssertExpr{x, typ} | 1077 return &ast.TypeAssertExpr{x, typ} |
1078 } | 1078 } |
1079 | 1079 |
1080 | 1080 |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1136 | 1136 |
1137 func (p *parser) parseElement(keyOk bool) ast.Expr { | 1137 func (p *parser) parseElement(keyOk bool) ast.Expr { |
1138 if p.trace { | 1138 if p.trace { |
1139 defer un(trace(p, "Element")) | 1139 defer un(trace(p, "Element")) |
1140 } | 1140 } |
1141 | 1141 |
1142 if p.tok == token.LBRACE { | 1142 if p.tok == token.LBRACE { |
1143 return p.parseLiteralValue(nil) | 1143 return p.parseLiteralValue(nil) |
1144 } | 1144 } |
1145 | 1145 |
1146 » x := p.parseRhs() | 1146 » x := p.parseExpr(keyOk) // don't resolve if map key |
1147 » if keyOk && p.tok == token.COLON { | 1147 » if keyOk { |
1148 » » colon := p.pos | 1148 » » if p.tok == token.COLON { |
1149 » » p.next() | 1149 » » » colon := p.pos |
1150 » » x = &ast.KeyValueExpr{x, colon, p.parseElement(false)} | 1150 » » » p.next() |
1151 » } | 1151 » » » return &ast.KeyValueExpr{x, colon, p.parseElement(false)
} |
| 1152 » » } |
| 1153 » » p.resolve(x) // not a map key |
| 1154 » } |
| 1155 |
1152 return x | 1156 return x |
1153 } | 1157 } |
1154 | 1158 |
1155 | 1159 |
1156 func (p *parser) parseElementList() (list []ast.Expr) { | 1160 func (p *parser) parseElementList() (list []ast.Expr) { |
1157 if p.trace { | 1161 if p.trace { |
1158 defer un(trace(p, "ElementList")) | 1162 defer un(trace(p, "ElementList")) |
1159 } | 1163 } |
1160 | 1164 |
1161 for p.tok != token.RBRACE && p.tok != token.EOF { | 1165 for p.tok != token.RBRACE && p.tok != token.EOF { |
(...skipping 18 matching lines...) Expand all Loading... |
1180 p.exprLev++ | 1184 p.exprLev++ |
1181 if p.tok != token.RBRACE { | 1185 if p.tok != token.RBRACE { |
1182 elts = p.parseElementList() | 1186 elts = p.parseElementList() |
1183 } | 1187 } |
1184 p.exprLev-- | 1188 p.exprLev-- |
1185 rbrace := p.expect(token.RBRACE) | 1189 rbrace := p.expect(token.RBRACE) |
1186 return &ast.CompositeLit{typ, lbrace, elts, rbrace} | 1190 return &ast.CompositeLit{typ, lbrace, elts, rbrace} |
1187 } | 1191 } |
1188 | 1192 |
1189 | 1193 |
1190 ÿ// checkExpr checks that x is an expression (and not a type).ÿ | 1194 // checkExpr checks that x is an expression (and not a type). |
1191 func (p *parser) checkExpr(x ast.Expr) ast.Expr { | 1195 func (p *parser) checkExpr(x ast.Expr) ast.Expr { |
1192 switch t := unparen(x).(type) { | 1196 switch t := unparen(x).(type) { |
1193 case *ast.BadExpr: | 1197 case *ast.BadExpr: |
1194 case *ast.Ident: | 1198 case *ast.Ident: |
1195 case *ast.BasicLit: | 1199 case *ast.BasicLit: |
1196 case *ast.FuncLit: | 1200 case *ast.FuncLit: |
1197 case *ast.CompositeLit: | 1201 case *ast.CompositeLit: |
1198 case *ast.ParenExpr: | 1202 case *ast.ParenExpr: |
1199 panic("unreachable") | 1203 panic("unreachable") |
1200 case *ast.SelectorExpr: | 1204 case *ast.SelectorExpr: |
1201 case *ast.IndexExpr: | 1205 case *ast.IndexExpr: |
1202 case *ast.SliceExpr: | 1206 case *ast.SliceExpr: |
1203 case *ast.TypeAssertExpr: | 1207 case *ast.TypeAssertExpr: |
1204 if t.Type == nil { | 1208 if t.Type == nil { |
1205 » » » ÿ// the form X.(type) is only allowed in type switch exp
ressionsÿ | 1209 » » » // the form X.(type) is only allowed in type switch expr
essions |
1206 p.errorExpected(x.Pos(), "expression") | 1210 p.errorExpected(x.Pos(), "expression") |
1207 x = &ast.BadExpr{x.Pos(), x.End()} | 1211 x = &ast.BadExpr{x.Pos(), x.End()} |
1208 } | 1212 } |
1209 case *ast.CallExpr: | 1213 case *ast.CallExpr: |
1210 case *ast.StarExpr: | 1214 case *ast.StarExpr: |
1211 case *ast.UnaryExpr: | 1215 case *ast.UnaryExpr: |
1212 if t.Op == token.RANGE { | 1216 if t.Op == token.RANGE { |
1213 » » » ÿ// the range operator is only allowed at the top of a f
or statementÿ | 1217 » » » // the range operator is only allowed at the top of a fo
r statement |
1214 p.errorExpected(x.Pos(), "expression") | 1218 p.errorExpected(x.Pos(), "expression") |
1215 x = &ast.BadExpr{x.Pos(), x.End()} | 1219 x = &ast.BadExpr{x.Pos(), x.End()} |
1216 } | 1220 } |
1217 case *ast.BinaryExpr: | 1221 case *ast.BinaryExpr: |
1218 default: | 1222 default: |
1219 » » ÿ// all other nodes are not proper expressionsÿ | 1223 » » // all other nodes are not proper expressions |
1220 p.errorExpected(x.Pos(), "expression") | 1224 p.errorExpected(x.Pos(), "expression") |
1221 x = &ast.BadExpr{x.Pos(), x.End()} | 1225 x = &ast.BadExpr{x.Pos(), x.End()} |
1222 } | 1226 } |
1223 return x | 1227 return x |
1224 } | 1228 } |
1225 | 1229 |
1226 | 1230 |
1227 ÿ// isTypeName returns true iff x is a (qualified) TypeName.ÿ | 1231 // isTypeName returns true iff x is a (qualified) TypeName. |
1228 func isTypeName(x ast.Expr) bool { | 1232 func isTypeName(x ast.Expr) bool { |
1229 switch t := x.(type) { | 1233 switch t := x.(type) { |
1230 case *ast.BadExpr: | 1234 case *ast.BadExpr: |
1231 case *ast.Ident: | 1235 case *ast.Ident: |
1232 case *ast.SelectorExpr: | 1236 case *ast.SelectorExpr: |
1233 _, isIdent := t.X.(*ast.Ident) | 1237 _, isIdent := t.X.(*ast.Ident) |
1234 return isIdent | 1238 return isIdent |
1235 default: | 1239 default: |
1236 » » return false ÿ// all other nodes are not type namesÿ | 1240 » » return false // all other nodes are not type names |
1237 } | 1241 } |
1238 return true | 1242 return true |
1239 } | 1243 } |
1240 | 1244 |
1241 | 1245 |
1242 ÿ// isLiteralType returns true iff x is a legal composite literal type.ÿ | 1246 // isLiteralType returns true iff x is a legal composite literal type. |
1243 func isLiteralType(x ast.Expr) bool { | 1247 func isLiteralType(x ast.Expr) bool { |
1244 switch t := x.(type) { | 1248 switch t := x.(type) { |
1245 case *ast.BadExpr: | 1249 case *ast.BadExpr: |
1246 case *ast.Ident: | 1250 case *ast.Ident: |
1247 case *ast.SelectorExpr: | 1251 case *ast.SelectorExpr: |
1248 _, isIdent := t.X.(*ast.Ident) | 1252 _, isIdent := t.X.(*ast.Ident) |
1249 return isIdent | 1253 return isIdent |
1250 case *ast.ArrayType: | 1254 case *ast.ArrayType: |
1251 case *ast.StructType: | 1255 case *ast.StructType: |
1252 case *ast.MapType: | 1256 case *ast.MapType: |
1253 default: | 1257 default: |
1254 » » return false ÿ// all other nodes are not legal composite literal
typesÿ | 1258 » » return false // all other nodes are not legal composite literal
types |
1255 } | 1259 } |
1256 return true | 1260 return true |
1257 } | 1261 } |
1258 | 1262 |
1259 | 1263 |
1260 ÿ// If x is of the form *T, deref returns T, otherwise it returns x.ÿ | 1264 // If x is of the form *T, deref returns T, otherwise it returns x. |
1261 func deref(x ast.Expr) ast.Expr { | 1265 func deref(x ast.Expr) ast.Expr { |
1262 if p, isPtr := x.(*ast.StarExpr); isPtr { | 1266 if p, isPtr := x.(*ast.StarExpr); isPtr { |
1263 x = p.X | 1267 x = p.X |
1264 } | 1268 } |
1265 return x | 1269 return x |
1266 } | 1270 } |
1267 | 1271 |
1268 | 1272 |
1269 ÿ// If x is of the form (T), unparen returns unparen(T), otherwise it returns x.
ÿ | 1273 // If x is of the form (T), unparen returns unparen(T), otherwise it returns x. |
1270 func unparen(x ast.Expr) ast.Expr { | 1274 func unparen(x ast.Expr) ast.Expr { |
1271 if p, isParen := x.(*ast.ParenExpr); isParen { | 1275 if p, isParen := x.(*ast.ParenExpr); isParen { |
1272 x = unparen(p.X) | 1276 x = unparen(p.X) |
1273 } | 1277 } |
1274 return x | 1278 return x |
1275 } | 1279 } |
1276 | 1280 |
1277 | 1281 |
1278 ÿ// checkExprOrType checks that x is an expression or a typeÿ | 1282 // checkExprOrType checks that x is an expression or a type |
1279 ÿ// (and not a raw type such as [...]T).ÿ | 1283 // (and not a raw type such as [...]T). |
1280 ÿ//ÿ | 1284 // |
1281 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr { | 1285 func (p *parser) checkExprOrType(x ast.Expr) ast.Expr { |
1282 switch t := unparen(x).(type) { | 1286 switch t := unparen(x).(type) { |
1283 case *ast.ParenExpr: | 1287 case *ast.ParenExpr: |
1284 panic("unreachable") | 1288 panic("unreachable") |
1285 case *ast.UnaryExpr: | 1289 case *ast.UnaryExpr: |
1286 if t.Op == token.RANGE { | 1290 if t.Op == token.RANGE { |
1287 » » » ÿ// the range operator is only allowed at the top of a f
or statementÿ | 1291 » » » // the range operator is only allowed at the top of a fo
r statement |
1288 p.errorExpected(x.Pos(), "expression") | 1292 p.errorExpected(x.Pos(), "expression") |
1289 x = &ast.BadExpr{x.Pos(), x.End()} | 1293 x = &ast.BadExpr{x.Pos(), x.End()} |
1290 } | 1294 } |
1291 case *ast.ArrayType: | 1295 case *ast.ArrayType: |
1292 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis { | 1296 if len, isEllipsis := t.Len.(*ast.Ellipsis); isEllipsis { |
1293 p.error(len.Pos(), "expected array length, found '...'") | 1297 p.error(len.Pos(), "expected array length, found '...'") |
1294 x = &ast.BadExpr{x.Pos(), x.End()} | 1298 x = &ast.BadExpr{x.Pos(), x.End()} |
1295 } | 1299 } |
1296 } | 1300 } |
1297 | 1301 |
1298 » ÿ// all other nodes are expressions or typesÿ | 1302 » // all other nodes are expressions or types |
1299 return x | 1303 return x |
1300 } | 1304 } |
1301 | 1305 |
1302 | 1306 |
1303 ÿ// If lhs is set and the result is an identifier, it is not resolved.ÿ | 1307 // If lhs is set and the result is an identifier, it is not resolved. |
1304 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr { | 1308 func (p *parser) parsePrimaryExpr(lhs bool) ast.Expr { |
1305 if p.trace { | 1309 if p.trace { |
1306 defer un(trace(p, "PrimaryExpr")) | 1310 defer un(trace(p, "PrimaryExpr")) |
1307 } | 1311 } |
1308 | 1312 |
1309 x := p.parseOperand(lhs) | 1313 x := p.parseOperand(lhs) |
1310 L: | 1314 L: |
1311 for { | 1315 for { |
1312 switch p.tok { | 1316 switch p.tok { |
1313 case token.PERIOD: | 1317 case token.PERIOD: |
1314 p.next() | 1318 p.next() |
1315 if lhs { | 1319 if lhs { |
1316 p.resolve(x) | 1320 p.resolve(x) |
1317 } | 1321 } |
1318 switch p.tok { | 1322 switch p.tok { |
1319 case token.IDENT: | 1323 case token.IDENT: |
1320 x = p.parseSelector(p.checkExpr(x)) | 1324 x = p.parseSelector(p.checkExpr(x)) |
1321 case token.LPAREN: | 1325 case token.LPAREN: |
1322 x = p.parseTypeAssertion(p.checkExpr(x)) | 1326 x = p.parseTypeAssertion(p.checkExpr(x)) |
1323 default: | 1327 default: |
1324 pos := p.pos | 1328 pos := p.pos |
1325 » » » » p.next() ÿ// make progressÿ | 1329 » » » » p.next() // make progress |
1326 p.errorExpected(pos, "selector or type assertion
") | 1330 p.errorExpected(pos, "selector or type assertion
") |
1327 x = &ast.BadExpr{pos, p.pos} | 1331 x = &ast.BadExpr{pos, p.pos} |
1328 } | 1332 } |
1329 case token.LBRACK: | 1333 case token.LBRACK: |
1330 if lhs { | 1334 if lhs { |
1331 p.resolve(x) | 1335 p.resolve(x) |
1332 } | 1336 } |
1333 x = p.parseIndexOrSlice(p.checkExpr(x)) | 1337 x = p.parseIndexOrSlice(p.checkExpr(x)) |
1334 case token.LPAREN: | 1338 case token.LPAREN: |
1335 if lhs { | 1339 if lhs { |
1336 p.resolve(x) | 1340 p.resolve(x) |
1337 } | 1341 } |
1338 x = p.parseCallOrConversion(p.checkExprOrType(x)) | 1342 x = p.parseCallOrConversion(p.checkExprOrType(x)) |
1339 case token.LBRACE: | 1343 case token.LBRACE: |
1340 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)
) { | 1344 if isLiteralType(x) && (p.exprLev >= 0 || !isTypeName(x)
) { |
1341 if lhs { | 1345 if lhs { |
1342 p.resolve(x) | 1346 p.resolve(x) |
1343 } | 1347 } |
1344 x = p.parseLiteralValue(x) | 1348 x = p.parseLiteralValue(x) |
1345 } else { | 1349 } else { |
1346 break L | 1350 break L |
1347 } | 1351 } |
1348 default: | 1352 default: |
1349 break L | 1353 break L |
1350 } | 1354 } |
1351 » » lhs = false ÿ// no need to try to resolve againÿ | 1355 » » lhs = false // no need to try to resolve again |
1352 } | 1356 } |
1353 | 1357 |
1354 return x | 1358 return x |
1355 } | 1359 } |
1356 | 1360 |
1357 | 1361 |
1358 ÿ// If lhs is set and the result is an identifier, it is not resolved.ÿ | 1362 // If lhs is set and the result is an identifier, it is not resolved. |
1359 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { | 1363 func (p *parser) parseUnaryExpr(lhs bool) ast.Expr { |
1360 if p.trace { | 1364 if p.trace { |
1361 defer un(trace(p, "UnaryExpr")) | 1365 defer un(trace(p, "UnaryExpr")) |
1362 } | 1366 } |
1363 | 1367 |
1364 switch p.tok { | 1368 switch p.tok { |
1365 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.RANGE: | 1369 case token.ADD, token.SUB, token.NOT, token.XOR, token.AND, token.RANGE: |
1366 pos, op := p.pos, p.tok | 1370 pos, op := p.pos, p.tok |
1367 p.next() | 1371 p.next() |
1368 x := p.parseUnaryExpr(false) | 1372 x := p.parseUnaryExpr(false) |
1369 return &ast.UnaryExpr{pos, op, p.checkExpr(x)} | 1373 return &ast.UnaryExpr{pos, op, p.checkExpr(x)} |
1370 | 1374 |
1371 case token.ARROW: | 1375 case token.ARROW: |
1372 » » ÿ// channel type or receive expressionÿ | 1376 » » // channel type or receive expression |
1373 pos := p.pos | 1377 pos := p.pos |
1374 p.next() | 1378 p.next() |
1375 if p.tok == token.CHAN { | 1379 if p.tok == token.CHAN { |
1376 p.next() | 1380 p.next() |
1377 value := p.parseType() | 1381 value := p.parseType() |
1378 return &ast.ChanType{pos, ast.RECV, value} | 1382 return &ast.ChanType{pos, ast.RECV, value} |
1379 } | 1383 } |
1380 | 1384 |
1381 x := p.parseUnaryExpr(false) | 1385 x := p.parseUnaryExpr(false) |
1382 return &ast.UnaryExpr{pos, token.ARROW, p.checkExpr(x)} | 1386 return &ast.UnaryExpr{pos, token.ARROW, p.checkExpr(x)} |
1383 | 1387 |
1384 case token.MUL: | 1388 case token.MUL: |
1385 » » ÿ// pointer type or unary "*" expressionÿ | 1389 » » // pointer type or unary "*" expression |
1386 pos := p.pos | 1390 pos := p.pos |
1387 p.next() | 1391 p.next() |
1388 x := p.parseUnaryExpr(false) | 1392 x := p.parseUnaryExpr(false) |
1389 return &ast.StarExpr{pos, p.checkExprOrType(x)} | 1393 return &ast.StarExpr{pos, p.checkExprOrType(x)} |
1390 } | 1394 } |
1391 | 1395 |
1392 return p.parsePrimaryExpr(lhs) | 1396 return p.parsePrimaryExpr(lhs) |
1393 } | 1397 } |
1394 | 1398 |
1395 | 1399 |
1396 ÿ// If lhs is set and the result is an identifier, it is not resolved.ÿ | 1400 // If lhs is set and the result is an identifier, it is not resolved. |
1397 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr { | 1401 func (p *parser) parseBinaryExpr(lhs bool, prec1 int) ast.Expr { |
1398 if p.trace { | 1402 if p.trace { |
1399 defer un(trace(p, "BinaryExpr")) | 1403 defer un(trace(p, "BinaryExpr")) |
1400 } | 1404 } |
1401 | 1405 |
1402 x := p.parseUnaryExpr(lhs) | 1406 x := p.parseUnaryExpr(lhs) |
1403 for prec := p.tok.Precedence(); prec >= prec1; prec-- { | 1407 for prec := p.tok.Precedence(); prec >= prec1; prec-- { |
1404 for p.tok.Precedence() == prec { | 1408 for p.tok.Precedence() == prec { |
1405 pos, op := p.pos, p.tok | 1409 pos, op := p.pos, p.tok |
1406 p.next() | 1410 p.next() |
1407 if lhs { | 1411 if lhs { |
1408 p.resolve(x) | 1412 p.resolve(x) |
1409 lhs = false | 1413 lhs = false |
1410 } | 1414 } |
1411 y := p.parseBinaryExpr(false, prec+1) | 1415 y := p.parseBinaryExpr(false, prec+1) |
1412 x = &ast.BinaryExpr{p.checkExpr(x), pos, op, p.checkExpr
(y)} | 1416 x = &ast.BinaryExpr{p.checkExpr(x), pos, op, p.checkExpr
(y)} |
1413 } | 1417 } |
1414 } | 1418 } |
1415 | 1419 |
1416 return x | 1420 return x |
1417 } | 1421 } |
1418 | 1422 |
1419 | 1423 |
1420 ÿ// If lhs is set and the result is an identifier, it is not resolved.ÿ | 1424 // If lhs is set and the result is an identifier, it is not resolved. |
1421 ÿ// TODO(gri): parseExpr may return a type or even a raw type ([..]int) -ÿ | 1425 // TODO(gri): parseExpr may return a type or even a raw type ([..]int) - |
1422 ÿ// should reject when a type/raw type is obviously not allowedÿ | 1426 // should reject when a type/raw type is obviously not allowed |
1423 func (p *parser) parseExpr(lhs bool) ast.Expr { | 1427 func (p *parser) parseExpr(lhs bool) ast.Expr { |
1424 if p.trace { | 1428 if p.trace { |
1425 defer un(trace(p, "Expression")) | 1429 defer un(trace(p, "Expression")) |
1426 } | 1430 } |
1427 | 1431 |
1428 return p.parseBinaryExpr(lhs, token.LowestPrec+1) | 1432 return p.parseBinaryExpr(lhs, token.LowestPrec+1) |
1429 } | 1433 } |
1430 | 1434 |
1431 | 1435 |
1432 func (p *parser) parseRhs() ast.Expr { | 1436 func (p *parser) parseRhs() ast.Expr { |
1433 return p.parseExpr(false) | 1437 return p.parseExpr(false) |
1434 } | 1438 } |
1435 | 1439 |
1436 | 1440 |
1437 ÿ// ----------------------------------------------------------------------------
ÿ | 1441 // ---------------------------------------------------------------------------- |
1438 ÿ// Statementsÿ | 1442 // Statements |
1439 | 1443 |
1440 func (p *parser) parseSimpleStmt(labelOk bool) ast.Stmt { | 1444 func (p *parser) parseSimpleStmt(labelOk bool) ast.Stmt { |
1441 if p.trace { | 1445 if p.trace { |
1442 defer un(trace(p, "SimpleStmt")) | 1446 defer un(trace(p, "SimpleStmt")) |
1443 } | 1447 } |
1444 | 1448 |
1445 x := p.parseLhsList() | 1449 x := p.parseLhsList() |
1446 | 1450 |
1447 switch p.tok { | 1451 switch p.tok { |
1448 case | 1452 case |
1449 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN, | 1453 token.DEFINE, token.ASSIGN, token.ADD_ASSIGN, |
1450 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN, | 1454 token.SUB_ASSIGN, token.MUL_ASSIGN, token.QUO_ASSIGN, |
1451 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN, | 1455 token.REM_ASSIGN, token.AND_ASSIGN, token.OR_ASSIGN, |
1452 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_
NOT_ASSIGN: | 1456 token.XOR_ASSIGN, token.SHL_ASSIGN, token.SHR_ASSIGN, token.AND_
NOT_ASSIGN: |
1453 » » ÿ// assignment statementÿ | 1457 » » // assignment statement |
1454 pos, tok := p.pos, p.tok | 1458 pos, tok := p.pos, p.tok |
1455 p.next() | 1459 p.next() |
1456 y := p.parseRhsList() | 1460 y := p.parseRhsList() |
1457 return &ast.AssignStmt{x, pos, tok, y} | 1461 return &ast.AssignStmt{x, pos, tok, y} |
1458 } | 1462 } |
1459 | 1463 |
1460 if len(x) > 1 { | 1464 if len(x) > 1 { |
1461 p.errorExpected(x[0].Pos(), "1 expression") | 1465 p.errorExpected(x[0].Pos(), "1 expression") |
1462 » » ÿ// continue with first expressionÿ | 1466 » » // continue with first expression |
1463 } | 1467 } |
1464 | 1468 |
1465 switch p.tok { | 1469 switch p.tok { |
1466 case token.COLON: | 1470 case token.COLON: |
1467 » » ÿ// labeled statementÿ | 1471 » » // labeled statement |
1468 colon := p.pos | 1472 colon := p.pos |
1469 p.next() | 1473 p.next() |
1470 if label, isIdent := x[0].(*ast.Ident); labelOk && isIdent { | 1474 if label, isIdent := x[0].(*ast.Ident); labelOk && isIdent { |
1471 » » » ÿ// Go spec: The scope of a label is the body of the fun
ctionÿ | 1475 » » » // Go spec: The scope of a label is the body of the func
tion |
1472 » » » ÿ// in which it is declared and excludes the body of any
nestedÿ | 1476 » » » // in which it is declared and excludes the body of any
nested |
1473 » » » ÿ// function.ÿ | 1477 » » » // function. |
1474 stmt := &ast.LabeledStmt{label, colon, p.parseStmt()} | 1478 stmt := &ast.LabeledStmt{label, colon, p.parseStmt()} |
1475 p.declare(stmt, p.labelScope, ast.Lbl, label) | 1479 p.declare(stmt, p.labelScope, ast.Lbl, label) |
1476 return stmt | 1480 return stmt |
1477 } | 1481 } |
1478 p.error(x[0].Pos(), "illegal label declaration") | 1482 p.error(x[0].Pos(), "illegal label declaration") |
1479 return &ast.BadStmt{x[0].Pos(), colon + 1} | 1483 return &ast.BadStmt{x[0].Pos(), colon + 1} |
1480 | 1484 |
1481 case token.ARROW: | 1485 case token.ARROW: |
1482 » » ÿ// send statementÿ | 1486 » » // send statement |
1483 arrow := p.pos | 1487 arrow := p.pos |
1484 » » p.next() ÿ// consume "<-"ÿ | 1488 » » p.next() // consume "<-" |
1485 y := p.parseRhs() | 1489 y := p.parseRhs() |
1486 return &ast.SendStmt{x[0], arrow, y} | 1490 return &ast.SendStmt{x[0], arrow, y} |
1487 | 1491 |
1488 case token.INC, token.DEC: | 1492 case token.INC, token.DEC: |
1489 » » ÿ// increment or decrementÿ | 1493 » » // increment or decrement |
1490 s := &ast.IncDecStmt{x[0], p.pos, p.tok} | 1494 s := &ast.IncDecStmt{x[0], p.pos, p.tok} |
1491 » » p.next() ÿ// consume "++" or "--"ÿ | 1495 » » p.next() // consume "++" or "--" |
1492 return s | 1496 return s |
1493 } | 1497 } |
1494 | 1498 |
1495 » ÿ// expressionÿ | 1499 » // expression |
1496 return &ast.ExprStmt{x[0]} | 1500 return &ast.ExprStmt{x[0]} |
1497 } | 1501 } |
1498 | 1502 |
1499 | 1503 |
1500 func (p *parser) parseCallExpr() *ast.CallExpr { | 1504 func (p *parser) parseCallExpr() *ast.CallExpr { |
1501 x := p.parseRhs() | 1505 x := p.parseRhs() |
1502 if call, isCall := x.(*ast.CallExpr); isCall { | 1506 if call, isCall := x.(*ast.CallExpr); isCall { |
1503 return call | 1507 return call |
1504 } | 1508 } |
1505 p.errorExpected(x.Pos(), "function/method call") | 1509 p.errorExpected(x.Pos(), "function/method call") |
1506 return nil | 1510 return nil |
1507 } | 1511 } |
1508 | 1512 |
1509 | 1513 |
1510 func (p *parser) parseGoStmt() ast.Stmt { | 1514 func (p *parser) parseGoStmt() ast.Stmt { |
1511 if p.trace { | 1515 if p.trace { |
1512 defer un(trace(p, "GoStmt")) | 1516 defer un(trace(p, "GoStmt")) |
1513 } | 1517 } |
1514 | 1518 |
1515 pos := p.expect(token.GO) | 1519 pos := p.expect(token.GO) |
1516 call := p.parseCallExpr() | 1520 call := p.parseCallExpr() |
1517 p.expectSemi() | 1521 p.expectSemi() |
1518 if call == nil { | 1522 if call == nil { |
1519 » » return &ast.BadStmt{pos, pos + 2} ÿ// len("go")ÿ | 1523 » » return &ast.BadStmt{pos, pos + 2} // len("go") |
1520 } | 1524 } |
1521 | 1525 |
1522 return &ast.GoStmt{pos, call} | 1526 return &ast.GoStmt{pos, call} |
1523 } | 1527 } |
1524 | 1528 |
1525 | 1529 |
1526 func (p *parser) parseDeferStmt() ast.Stmt { | 1530 func (p *parser) parseDeferStmt() ast.Stmt { |
1527 if p.trace { | 1531 if p.trace { |
1528 defer un(trace(p, "DeferStmt")) | 1532 defer un(trace(p, "DeferStmt")) |
1529 } | 1533 } |
1530 | 1534 |
1531 pos := p.expect(token.DEFER) | 1535 pos := p.expect(token.DEFER) |
1532 call := p.parseCallExpr() | 1536 call := p.parseCallExpr() |
1533 p.expectSemi() | 1537 p.expectSemi() |
1534 if call == nil { | 1538 if call == nil { |
1535 » » return &ast.BadStmt{pos, pos + 5} ÿ// len("defer")ÿ | 1539 » » return &ast.BadStmt{pos, pos + 5} // len("defer") |
1536 } | 1540 } |
1537 | 1541 |
1538 return &ast.DeferStmt{pos, call} | 1542 return &ast.DeferStmt{pos, call} |
1539 } | 1543 } |
1540 | 1544 |
1541 | 1545 |
1542 func (p *parser) parseReturnStmt() *ast.ReturnStmt { | 1546 func (p *parser) parseReturnStmt() *ast.ReturnStmt { |
1543 if p.trace { | 1547 if p.trace { |
1544 defer un(trace(p, "ReturnStmt")) | 1548 defer un(trace(p, "ReturnStmt")) |
1545 } | 1549 } |
(...skipping 12 matching lines...) Expand all Loading... |
1558 | 1562 |
1559 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt { | 1563 func (p *parser) parseBranchStmt(tok token.Token) *ast.BranchStmt { |
1560 if p.trace { | 1564 if p.trace { |
1561 defer un(trace(p, "BranchStmt")) | 1565 defer un(trace(p, "BranchStmt")) |
1562 } | 1566 } |
1563 | 1567 |
1564 pos := p.expect(tok) | 1568 pos := p.expect(tok) |
1565 var label *ast.Ident | 1569 var label *ast.Ident |
1566 if tok != token.FALLTHROUGH && p.tok == token.IDENT { | 1570 if tok != token.FALLTHROUGH && p.tok == token.IDENT { |
1567 label = p.parseIdent() | 1571 label = p.parseIdent() |
1568 » » ÿ// add to list of unresolved targetsÿ | 1572 » » // add to list of unresolved targets |
1569 n := len(p.targetStack) - 1 | 1573 n := len(p.targetStack) - 1 |
1570 p.targetStack[n] = append(p.targetStack[n], label) | 1574 p.targetStack[n] = append(p.targetStack[n], label) |
1571 } | 1575 } |
1572 p.expectSemi() | 1576 p.expectSemi() |
1573 | 1577 |
1574 return &ast.BranchStmt{pos, tok, label} | 1578 return &ast.BranchStmt{pos, tok, label} |
1575 } | 1579 } |
1576 | 1580 |
1577 | 1581 |
1578 func (p *parser) makeExpr(s ast.Stmt) ast.Expr { | 1582 func (p *parser) makeExpr(s ast.Stmt) ast.Expr { |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1671 return &ast.CaseClause{pos, list, colon, body} | 1675 return &ast.CaseClause{pos, list, colon, body} |
1672 } | 1676 } |
1673 | 1677 |
1674 | 1678 |
1675 func isExprSwitch(s ast.Stmt) bool { | 1679 func isExprSwitch(s ast.Stmt) bool { |
1676 if s == nil { | 1680 if s == nil { |
1677 return true | 1681 return true |
1678 } | 1682 } |
1679 if e, ok := s.(*ast.ExprStmt); ok { | 1683 if e, ok := s.(*ast.ExprStmt); ok { |
1680 if a, ok := e.X.(*ast.TypeAssertExpr); ok { | 1684 if a, ok := e.X.(*ast.TypeAssertExpr); ok { |
1681 » » » return a.Type != nil ÿ// regular type assertionÿ | 1685 » » » return a.Type != nil // regular type assertion |
1682 } | 1686 } |
1683 return true | 1687 return true |
1684 } | 1688 } |
1685 return false | 1689 return false |
1686 } | 1690 } |
1687 | 1691 |
1688 | 1692 |
1689 func (p *parser) parseSwitchStmt() ast.Stmt { | 1693 func (p *parser) parseSwitchStmt() ast.Stmt { |
1690 if p.trace { | 1694 if p.trace { |
1691 defer un(trace(p, "SwitchStmt")) | 1695 defer un(trace(p, "SwitchStmt")) |
(...skipping 27 matching lines...) Expand all Loading... |
1719 for p.tok == token.CASE || p.tok == token.DEFAULT { | 1723 for p.tok == token.CASE || p.tok == token.DEFAULT { |
1720 list = append(list, p.parseCaseClause(exprSwitch)) | 1724 list = append(list, p.parseCaseClause(exprSwitch)) |
1721 } | 1725 } |
1722 rbrace := p.expect(token.RBRACE) | 1726 rbrace := p.expect(token.RBRACE) |
1723 p.expectSemi() | 1727 p.expectSemi() |
1724 body := &ast.BlockStmt{lbrace, list, rbrace} | 1728 body := &ast.BlockStmt{lbrace, list, rbrace} |
1725 | 1729 |
1726 if exprSwitch { | 1730 if exprSwitch { |
1727 return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body} | 1731 return &ast.SwitchStmt{pos, s1, p.makeExpr(s2), body} |
1728 } | 1732 } |
1729 » ÿ// type switchÿ | 1733 » // type switch |
1730 » ÿ// TODO(gri): do all the checks!ÿ | 1734 » // TODO(gri): do all the checks! |
1731 return &ast.TypeSwitchStmt{pos, s1, s2, body} | 1735 return &ast.TypeSwitchStmt{pos, s1, s2, body} |
1732 } | 1736 } |
1733 | 1737 |
1734 | 1738 |
1735 func (p *parser) parseCommClause() *ast.CommClause { | 1739 func (p *parser) parseCommClause() *ast.CommClause { |
1736 if p.trace { | 1740 if p.trace { |
1737 defer un(trace(p, "CommClause")) | 1741 defer un(trace(p, "CommClause")) |
1738 } | 1742 } |
1739 | 1743 |
1740 p.openScope() | 1744 p.openScope() |
1741 pos := p.pos | 1745 pos := p.pos |
1742 var comm ast.Stmt | 1746 var comm ast.Stmt |
1743 if p.tok == token.CASE { | 1747 if p.tok == token.CASE { |
1744 p.next() | 1748 p.next() |
1745 lhs := p.parseLhsList() | 1749 lhs := p.parseLhsList() |
1746 if p.tok == token.ARROW { | 1750 if p.tok == token.ARROW { |
1747 » » » ÿ// SendStmtÿ | 1751 » » » // SendStmt |
1748 if len(lhs) > 1 { | 1752 if len(lhs) > 1 { |
1749 p.errorExpected(lhs[0].Pos(), "1 expression") | 1753 p.errorExpected(lhs[0].Pos(), "1 expression") |
1750 » » » » ÿ// continue with first expressionÿ | 1754 » » » » // continue with first expression |
1751 } | 1755 } |
1752 arrow := p.pos | 1756 arrow := p.pos |
1753 p.next() | 1757 p.next() |
1754 rhs := p.parseRhs() | 1758 rhs := p.parseRhs() |
1755 comm = &ast.SendStmt{lhs[0], arrow, rhs} | 1759 comm = &ast.SendStmt{lhs[0], arrow, rhs} |
1756 } else { | 1760 } else { |
1757 » » » ÿ// RecvStmtÿ | 1761 » » » // RecvStmt |
1758 pos := p.pos | 1762 pos := p.pos |
1759 tok := p.tok | 1763 tok := p.tok |
1760 var rhs ast.Expr | 1764 var rhs ast.Expr |
1761 if tok == token.ASSIGN || tok == token.DEFINE { | 1765 if tok == token.ASSIGN || tok == token.DEFINE { |
1762 » » » » ÿ// RecvStmt with assignmentÿ | 1766 » » » » // RecvStmt with assignment |
1763 if len(lhs) > 2 { | 1767 if len(lhs) > 2 { |
1764 p.errorExpected(lhs[0].Pos(), "1 or 2 ex
pressions") | 1768 p.errorExpected(lhs[0].Pos(), "1 or 2 ex
pressions") |
1765 » » » » » ÿ// continue with first two expressionsÿ | 1769 » » » » » // continue with first two expressions |
1766 lhs = lhs[0:2] | 1770 lhs = lhs[0:2] |
1767 } | 1771 } |
1768 p.next() | 1772 p.next() |
1769 rhs = p.parseRhs() | 1773 rhs = p.parseRhs() |
1770 } else { | 1774 } else { |
1771 » » » » ÿ// rhs must be single receive operationÿ | 1775 » » » » // rhs must be single receive operation |
1772 if len(lhs) > 1 { | 1776 if len(lhs) > 1 { |
1773 p.errorExpected(lhs[0].Pos(), "1 express
ion") | 1777 p.errorExpected(lhs[0].Pos(), "1 express
ion") |
1774 » » » » » ÿ// continue with first expressionÿ | 1778 » » » » » // continue with first expression |
1775 } | 1779 } |
1776 rhs = lhs[0] | 1780 rhs = lhs[0] |
1777 » » » » lhs = nil ÿ// there is no lhsÿ | 1781 » » » » lhs = nil // there is no lhs |
1778 } | 1782 } |
1779 if x, isUnary := rhs.(*ast.UnaryExpr); !isUnary || x.Op
!= token.ARROW { | 1783 if x, isUnary := rhs.(*ast.UnaryExpr); !isUnary || x.Op
!= token.ARROW { |
1780 p.errorExpected(rhs.Pos(), "send or receive oper
ation") | 1784 p.errorExpected(rhs.Pos(), "send or receive oper
ation") |
1781 rhs = &ast.BadExpr{rhs.Pos(), rhs.End()} | 1785 rhs = &ast.BadExpr{rhs.Pos(), rhs.End()} |
1782 } | 1786 } |
1783 if lhs != nil { | 1787 if lhs != nil { |
1784 comm = &ast.AssignStmt{lhs, pos, tok, []ast.Expr
{rhs}} | 1788 comm = &ast.AssignStmt{lhs, pos, tok, []ast.Expr
{rhs}} |
1785 } else { | 1789 } else { |
1786 comm = &ast.ExprStmt{rhs} | 1790 comm = &ast.ExprStmt{rhs} |
1787 } | 1791 } |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1845 s3 = p.parseSimpleStmt(false) | 1849 s3 = p.parseSimpleStmt(false) |
1846 } | 1850 } |
1847 } | 1851 } |
1848 p.exprLev = prevLev | 1852 p.exprLev = prevLev |
1849 } | 1853 } |
1850 | 1854 |
1851 body := p.parseBlockStmt() | 1855 body := p.parseBlockStmt() |
1852 p.expectSemi() | 1856 p.expectSemi() |
1853 | 1857 |
1854 if as, isAssign := s2.(*ast.AssignStmt); isAssign { | 1858 if as, isAssign := s2.(*ast.AssignStmt); isAssign { |
1855 » » ÿ// possibly a for statement with a range clause; check assignme
nt operatorÿ | 1859 » » // possibly a for statement with a range clause; check assignmen
t operator |
1856 if as.Tok != token.ASSIGN && as.Tok != token.DEFINE { | 1860 if as.Tok != token.ASSIGN && as.Tok != token.DEFINE { |
1857 p.errorExpected(as.TokPos, "'=' or ':='") | 1861 p.errorExpected(as.TokPos, "'=' or ':='") |
1858 return &ast.BadStmt{pos, body.End()} | 1862 return &ast.BadStmt{pos, body.End()} |
1859 } | 1863 } |
1860 » » ÿ// check lhsÿ | 1864 » » // check lhs |
1861 var key, value ast.Expr | 1865 var key, value ast.Expr |
1862 switch len(as.Lhs) { | 1866 switch len(as.Lhs) { |
1863 case 2: | 1867 case 2: |
1864 key, value = as.Lhs[0], as.Lhs[1] | 1868 key, value = as.Lhs[0], as.Lhs[1] |
1865 case 1: | 1869 case 1: |
1866 key = as.Lhs[0] | 1870 key = as.Lhs[0] |
1867 default: | 1871 default: |
1868 p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions") | 1872 p.errorExpected(as.Lhs[0].Pos(), "1 or 2 expressions") |
1869 return &ast.BadStmt{pos, body.End()} | 1873 return &ast.BadStmt{pos, body.End()} |
1870 } | 1874 } |
1871 » » ÿ// check rhsÿ | 1875 » » // check rhs |
1872 if len(as.Rhs) != 1 { | 1876 if len(as.Rhs) != 1 { |
1873 p.errorExpected(as.Rhs[0].Pos(), "1 expression") | 1877 p.errorExpected(as.Rhs[0].Pos(), "1 expression") |
1874 return &ast.BadStmt{pos, body.End()} | 1878 return &ast.BadStmt{pos, body.End()} |
1875 } | 1879 } |
1876 if rhs, isUnary := as.Rhs[0].(*ast.UnaryExpr); isUnary && rhs.Op
== token.RANGE { | 1880 if rhs, isUnary := as.Rhs[0].(*ast.UnaryExpr); isUnary && rhs.Op
== token.RANGE { |
1877 » » » ÿ// rhs is range expressionÿ | 1881 » » » // rhs is range expression |
1878 » » » ÿ// (any short variable declaration was handled by parse
SimpleStat above)ÿ | 1882 » » » // (any short variable declaration was handled by parseS
impleStat above) |
1879 return &ast.RangeStmt{pos, key, value, as.TokPos, as.Tok
, rhs.X, body} | 1883 return &ast.RangeStmt{pos, key, value, as.TokPos, as.Tok
, rhs.X, body} |
1880 } | 1884 } |
1881 p.errorExpected(s2.Pos(), "range clause") | 1885 p.errorExpected(s2.Pos(), "range clause") |
1882 return &ast.BadStmt{pos, body.End()} | 1886 return &ast.BadStmt{pos, body.End()} |
1883 } | 1887 } |
1884 | 1888 |
1885 » ÿ// regular for statementÿ | 1889 » // regular for statement |
1886 return &ast.ForStmt{pos, s1, p.makeExpr(s2), s3, body} | 1890 return &ast.ForStmt{pos, s1, p.makeExpr(s2), s3, body} |
1887 } | 1891 } |
1888 | 1892 |
1889 | 1893 |
1890 func (p *parser) parseStmt() (s ast.Stmt) { | 1894 func (p *parser) parseStmt() (s ast.Stmt) { |
1891 if p.trace { | 1895 if p.trace { |
1892 defer un(trace(p, "Statement")) | 1896 defer un(trace(p, "Statement")) |
1893 } | 1897 } |
1894 | 1898 |
1895 switch p.tok { | 1899 switch p.tok { |
1896 case token.CONST, token.TYPE, token.VAR: | 1900 case token.CONST, token.TYPE, token.VAR: |
1897 s = &ast.DeclStmt{p.parseDecl()} | 1901 s = &ast.DeclStmt{p.parseDecl()} |
1898 case | 1902 case |
1899 » » ÿ// tokens that may start a top-level expressionÿ | 1903 » » // tokens that may start a top-level expression |
1900 » » token.IDENT, token.INT, token.FLOAT, token.CHAR, token.STRING, t
oken.FUNC, token.LPAREN, ÿ// operandÿ | 1904 » » token.IDENT, token.INT, token.FLOAT, token.CHAR, token.STRING, t
oken.FUNC, token.LPAREN, // operand |
1901 » » token.LBRACK, token.STRUCT, ÿ// composite typeÿ | 1905 » » token.LBRACK, token.STRUCT, // composite type |
1902 » » token.MUL, token.AND, token.ARROW, token.ADD, token.SUB, token.X
OR: ÿ// unary operatorsÿ | 1906 » » token.MUL, token.AND, token.ARROW, token.ADD, token.SUB, token.X
OR: // unary operators |
1903 s = p.parseSimpleStmt(true) | 1907 s = p.parseSimpleStmt(true) |
1904 » » ÿ// because of the required look-ahead, labeled statements areÿ | 1908 » » // because of the required look-ahead, labeled statements are |
1905 » » ÿ// parsed by parseSimpleStmt - don't expect a semicolon afterÿ | 1909 » » // parsed by parseSimpleStmt - don't expect a semicolon after |
1906 » » ÿ// themÿ | 1910 » » // them |
1907 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt { | 1911 if _, isLabeledStmt := s.(*ast.LabeledStmt); !isLabeledStmt { |
1908 p.expectSemi() | 1912 p.expectSemi() |
1909 } | 1913 } |
1910 case token.GO: | 1914 case token.GO: |
1911 s = p.parseGoStmt() | 1915 s = p.parseGoStmt() |
1912 case token.DEFER: | 1916 case token.DEFER: |
1913 s = p.parseDeferStmt() | 1917 s = p.parseDeferStmt() |
1914 case token.RETURN: | 1918 case token.RETURN: |
1915 s = p.parseReturnStmt() | 1919 s = p.parseReturnStmt() |
1916 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH: | 1920 case token.BREAK, token.CONTINUE, token.GOTO, token.FALLTHROUGH: |
1917 s = p.parseBranchStmt(p.tok) | 1921 s = p.parseBranchStmt(p.tok) |
1918 case token.LBRACE: | 1922 case token.LBRACE: |
1919 s = p.parseBlockStmt() | 1923 s = p.parseBlockStmt() |
1920 p.expectSemi() | 1924 p.expectSemi() |
1921 case token.IF: | 1925 case token.IF: |
1922 s = p.parseIfStmt() | 1926 s = p.parseIfStmt() |
1923 case token.SWITCH: | 1927 case token.SWITCH: |
1924 s = p.parseSwitchStmt() | 1928 s = p.parseSwitchStmt() |
1925 case token.SELECT: | 1929 case token.SELECT: |
1926 s = p.parseSelectStmt() | 1930 s = p.parseSelectStmt() |
1927 case token.FOR: | 1931 case token.FOR: |
1928 s = p.parseForStmt() | 1932 s = p.parseForStmt() |
1929 case token.SEMICOLON: | 1933 case token.SEMICOLON: |
1930 s = &ast.EmptyStmt{p.pos} | 1934 s = &ast.EmptyStmt{p.pos} |
1931 p.next() | 1935 p.next() |
1932 case token.RBRACE: | 1936 case token.RBRACE: |
1933 » » ÿ// a semicolon may be omitted before a closing "}"ÿ | 1937 » » // a semicolon may be omitted before a closing "}" |
1934 s = &ast.EmptyStmt{p.pos} | 1938 s = &ast.EmptyStmt{p.pos} |
1935 default: | 1939 default: |
1936 » » ÿ// no statement foundÿ | 1940 » » // no statement found |
1937 pos := p.pos | 1941 pos := p.pos |
1938 p.errorExpected(pos, "statement") | 1942 p.errorExpected(pos, "statement") |
1939 » » p.next() ÿ// make progressÿ | 1943 » » p.next() // make progress |
1940 s = &ast.BadStmt{pos, p.pos} | 1944 s = &ast.BadStmt{pos, p.pos} |
1941 } | 1945 } |
1942 | 1946 |
1943 return | 1947 return |
1944 } | 1948 } |
1945 | 1949 |
1946 | 1950 |
1947 ÿ// ----------------------------------------------------------------------------
ÿ | 1951 // ---------------------------------------------------------------------------- |
1948 ÿ// Declarationsÿ | 1952 // Declarations |
1949 | 1953 |
1950 type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec | 1954 type parseSpecFunction func(p *parser, doc *ast.CommentGroup, iota int) ast.Spec |
1951 | 1955 |
1952 | 1956 |
1953 func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { | 1957 func parseImportSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { |
1954 if p.trace { | 1958 if p.trace { |
1955 defer un(trace(p, "ImportSpec")) | 1959 defer un(trace(p, "ImportSpec")) |
1956 } | 1960 } |
1957 | 1961 |
1958 var ident *ast.Ident | 1962 var ident *ast.Ident |
1959 switch p.tok { | 1963 switch p.tok { |
1960 case token.PERIOD: | 1964 case token.PERIOD: |
1961 ident = &ast.Ident{p.pos, ".", nil} | 1965 ident = &ast.Ident{p.pos, ".", nil} |
1962 p.next() | 1966 p.next() |
1963 case token.IDENT: | 1967 case token.IDENT: |
1964 ident = p.parseIdent() | 1968 ident = p.parseIdent() |
1965 } | 1969 } |
1966 | 1970 |
1967 var path *ast.BasicLit | 1971 var path *ast.BasicLit |
1968 if p.tok == token.STRING { | 1972 if p.tok == token.STRING { |
1969 path = &ast.BasicLit{p.pos, p.tok, p.lit} | 1973 path = &ast.BasicLit{p.pos, p.tok, p.lit} |
1970 p.next() | 1974 p.next() |
1971 } else { | 1975 } else { |
1972 » » p.expect(token.STRING) ÿ// use expect() error handlingÿ | 1976 » » p.expect(token.STRING) // use expect() error handling |
1973 » } | 1977 » } |
1974 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 1978 » p.expectSemi() // call before accessing p.linecomment |
1975 | 1979 |
1976 » ÿ// collect importsÿ | 1980 » // collect imports |
1977 spec := &ast.ImportSpec{doc, ident, path, p.lineComment} | 1981 spec := &ast.ImportSpec{doc, ident, path, p.lineComment} |
1978 p.imports = append(p.imports, spec) | 1982 p.imports = append(p.imports, spec) |
1979 | 1983 |
1980 return spec | 1984 return spec |
1981 } | 1985 } |
1982 | 1986 |
1983 | 1987 |
1984 func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec { | 1988 func parseConstSpec(p *parser, doc *ast.CommentGroup, iota int) ast.Spec { |
1985 if p.trace { | 1989 if p.trace { |
1986 defer un(trace(p, "ConstSpec")) | 1990 defer un(trace(p, "ConstSpec")) |
1987 } | 1991 } |
1988 | 1992 |
1989 idents := p.parseIdentList() | 1993 idents := p.parseIdentList() |
1990 typ := p.tryType() | 1994 typ := p.tryType() |
1991 var values []ast.Expr | 1995 var values []ast.Expr |
1992 if typ != nil || p.tok == token.ASSIGN || iota == 0 { | 1996 if typ != nil || p.tok == token.ASSIGN || iota == 0 { |
1993 p.expect(token.ASSIGN) | 1997 p.expect(token.ASSIGN) |
1994 values = p.parseRhsList() | 1998 values = p.parseRhsList() |
1995 } | 1999 } |
1996 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 2000 » p.expectSemi() // call before accessing p.linecomment |
1997 | 2001 |
1998 » ÿ// Go spec: The scope of a constant or variable identifier declared ins
ideÿ | 2002 » // Go spec: The scope of a constant or variable identifier declared insi
de |
1999 » ÿ// a function begins at the end of the ConstSpec or VarSpec and ends at
ÿ | 2003 » // a function begins at the end of the ConstSpec or VarSpec and ends at |
2000 » ÿ// the end of the innermost containing block.ÿ | 2004 » // the end of the innermost containing block. |
2001 » ÿ// (Global identifiers are resolved in a separate phase after parsing.)
ÿ | 2005 » // (Global identifiers are resolved in a separate phase after parsing.) |
2002 spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment} | 2006 spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment} |
2003 p.declare(spec, p.topScope, ast.Con, idents...) | 2007 p.declare(spec, p.topScope, ast.Con, idents...) |
2004 | 2008 |
2005 return spec | 2009 return spec |
2006 } | 2010 } |
2007 | 2011 |
2008 | 2012 |
2009 func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { | 2013 func parseTypeSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { |
2010 if p.trace { | 2014 if p.trace { |
2011 defer un(trace(p, "TypeSpec")) | 2015 defer un(trace(p, "TypeSpec")) |
2012 } | 2016 } |
2013 | 2017 |
2014 ident := p.parseIdent() | 2018 ident := p.parseIdent() |
2015 typ := p.parseType() | 2019 typ := p.parseType() |
2016 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 2020 » p.expectSemi() // call before accessing p.linecomment |
2017 | 2021 |
2018 » ÿ// Go spec: The scope of a type identifier declared inside a function b
eginsÿ | 2022 » // Go spec: The scope of a type identifier declared inside a function be
gins |
2019 » ÿ// at the identifier in the TypeSpec and ends at the end of the innermo
stÿ | 2023 » // at the identifier in the TypeSpec and ends at the end of the innermos
t |
2020 » ÿ// containing block.ÿ | 2024 » // containing block. |
2021 » ÿ// (Global identifiers are resolved in a separate phase after parsing.)
ÿ | 2025 » // (Global identifiers are resolved in a separate phase after parsing.) |
2022 spec := &ast.TypeSpec{doc, ident, typ, p.lineComment} | 2026 spec := &ast.TypeSpec{doc, ident, typ, p.lineComment} |
2023 p.declare(spec, p.topScope, ast.Typ, ident) | 2027 p.declare(spec, p.topScope, ast.Typ, ident) |
2024 | 2028 |
2025 return spec | 2029 return spec |
2026 } | 2030 } |
2027 | 2031 |
2028 | 2032 |
2029 func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { | 2033 func parseVarSpec(p *parser, doc *ast.CommentGroup, _ int) ast.Spec { |
2030 if p.trace { | 2034 if p.trace { |
2031 defer un(trace(p, "VarSpec")) | 2035 defer un(trace(p, "VarSpec")) |
2032 } | 2036 } |
2033 | 2037 |
2034 idents := p.parseIdentList() | 2038 idents := p.parseIdentList() |
2035 typ := p.tryType() | 2039 typ := p.tryType() |
2036 var values []ast.Expr | 2040 var values []ast.Expr |
2037 if typ == nil || p.tok == token.ASSIGN { | 2041 if typ == nil || p.tok == token.ASSIGN { |
2038 p.expect(token.ASSIGN) | 2042 p.expect(token.ASSIGN) |
2039 values = p.parseRhsList() | 2043 values = p.parseRhsList() |
2040 } | 2044 } |
2041 » p.expectSemi() ÿ// call before accessing p.linecommentÿ | 2045 » p.expectSemi() // call before accessing p.linecomment |
2042 | 2046 |
2043 » ÿ// Go spec: The scope of a constant or variable identifier declared ins
ideÿ | 2047 » // Go spec: The scope of a constant or variable identifier declared insi
de |
2044 » ÿ// a function begins at the end of the ConstSpec or VarSpec and ends at
ÿ | 2048 » // a function begins at the end of the ConstSpec or VarSpec and ends at |
2045 » ÿ// the end of the innermost containing block.ÿ | 2049 » // the end of the innermost containing block. |
2046 » ÿ// (Global identifiers are resolved in a separate phase after parsing.)
ÿ | 2050 » // (Global identifiers are resolved in a separate phase after parsing.) |
2047 spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment} | 2051 spec := &ast.ValueSpec{doc, idents, typ, values, p.lineComment} |
2048 p.declare(spec, p.topScope, ast.Var, idents...) | 2052 p.declare(spec, p.topScope, ast.Var, idents...) |
2049 | 2053 |
2050 return spec | 2054 return spec |
2051 } | 2055 } |
2052 | 2056 |
2053 | 2057 |
2054 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen
Decl { | 2058 func (p *parser) parseGenDecl(keyword token.Token, f parseSpecFunction) *ast.Gen
Decl { |
2055 if p.trace { | 2059 if p.trace { |
2056 defer un(trace(p, "GenDecl("+keyword.String()+")")) | 2060 defer un(trace(p, "GenDecl("+keyword.String()+")")) |
(...skipping 20 matching lines...) Expand all Loading... |
2077 | 2081 |
2078 | 2082 |
2079 func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList { | 2083 func (p *parser) parseReceiver(scope *ast.Scope) *ast.FieldList { |
2080 if p.trace { | 2084 if p.trace { |
2081 defer un(trace(p, "Receiver")) | 2085 defer un(trace(p, "Receiver")) |
2082 } | 2086 } |
2083 | 2087 |
2084 pos := p.pos | 2088 pos := p.pos |
2085 par := p.parseParameters(scope, false) | 2089 par := p.parseParameters(scope, false) |
2086 | 2090 |
2087 » ÿ// must have exactly one receiverÿ | 2091 » // must have exactly one receiver |
2088 if par.NumFields() != 1 { | 2092 if par.NumFields() != 1 { |
2089 p.errorExpected(pos, "exactly one receiver") | 2093 p.errorExpected(pos, "exactly one receiver") |
2090 » » ÿ// TODO determine a better range for BadExpr belowÿ | 2094 » » // TODO determine a better range for BadExpr below |
2091 par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{pos, pos}}
} | 2095 par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{pos, pos}}
} |
2092 return par | 2096 return par |
2093 } | 2097 } |
2094 | 2098 |
2095 » ÿ// recv type must be of the form ["*"] identifierÿ | 2099 » // recv type must be of the form ["*"] identifier |
2096 recv := par.List[0] | 2100 recv := par.List[0] |
2097 base := deref(recv.Type) | 2101 base := deref(recv.Type) |
2098 if _, isIdent := base.(*ast.Ident); !isIdent { | 2102 if _, isIdent := base.(*ast.Ident); !isIdent { |
2099 p.errorExpected(base.Pos(), "(unqualified) identifier") | 2103 p.errorExpected(base.Pos(), "(unqualified) identifier") |
2100 par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{recv.Pos()
, recv.End()}}} | 2104 par.List = []*ast.Field{&ast.Field{Type: &ast.BadExpr{recv.Pos()
, recv.End()}}} |
2101 } | 2105 } |
2102 | 2106 |
2103 return par | 2107 return par |
2104 } | 2108 } |
2105 | 2109 |
2106 | 2110 |
2107 func (p *parser) parseFuncDecl() *ast.FuncDecl { | 2111 func (p *parser) parseFuncDecl() *ast.FuncDecl { |
2108 if p.trace { | 2112 if p.trace { |
2109 defer un(trace(p, "FunctionDecl")) | 2113 defer un(trace(p, "FunctionDecl")) |
2110 } | 2114 } |
2111 | 2115 |
2112 doc := p.leadComment | 2116 doc := p.leadComment |
2113 pos := p.expect(token.FUNC) | 2117 pos := p.expect(token.FUNC) |
2114 » scope := ast.NewScope(p.topScope) ÿ// function scopeÿ | 2118 » scope := ast.NewScope(p.topScope) // function scope |
2115 | 2119 |
2116 var recv *ast.FieldList | 2120 var recv *ast.FieldList |
2117 if p.tok == token.LPAREN { | 2121 if p.tok == token.LPAREN { |
2118 recv = p.parseReceiver(scope) | 2122 recv = p.parseReceiver(scope) |
2119 } | 2123 } |
2120 | 2124 |
2121 ident := p.parseIdent() | 2125 ident := p.parseIdent() |
2122 | 2126 |
2123 params, results := p.parseSignature(scope) | 2127 params, results := p.parseSignature(scope) |
2124 | 2128 |
2125 var body *ast.BlockStmt | 2129 var body *ast.BlockStmt |
2126 if p.tok == token.LBRACE { | 2130 if p.tok == token.LBRACE { |
2127 body = p.parseBody(scope) | 2131 body = p.parseBody(scope) |
2128 } | 2132 } |
2129 p.expectSemi() | 2133 p.expectSemi() |
2130 | 2134 |
2131 decl := &ast.FuncDecl{doc, recv, ident, &ast.FuncType{pos, params, resul
ts}, body} | 2135 decl := &ast.FuncDecl{doc, recv, ident, &ast.FuncType{pos, params, resul
ts}, body} |
2132 if recv == nil { | 2136 if recv == nil { |
2133 » » ÿ// Go spec: The scope of an identifier denoting a constant, typ
e,ÿ | 2137 » » // Go spec: The scope of an identifier denoting a constant, type
, |
2134 » » ÿ// variable, or function (but not method) declared at top level
ÿ | 2138 » » // variable, or function (but not method) declared at top level |
2135 » » ÿ// (outside any function) is the package block.ÿ | 2139 » » // (outside any function) is the package block. |
2136 » » ÿ//ÿ | 2140 » » // |
2137 » » ÿ// init() functions cannot be referred to and there mayÿ | 2141 » » // init() functions cannot be referred to and there may |
2138 » » ÿ// be more than one - don't put them in the pkgScopeÿ | 2142 » » // be more than one - don't put them in the pkgScope |
2139 if ident.Name != "init" { | 2143 if ident.Name != "init" { |
2140 p.declare(decl, p.pkgScope, ast.Fun, ident) | 2144 p.declare(decl, p.pkgScope, ast.Fun, ident) |
2141 } | 2145 } |
2142 } | 2146 } |
2143 | 2147 |
2144 return decl | 2148 return decl |
2145 } | 2149 } |
2146 | 2150 |
2147 | 2151 |
2148 func (p *parser) parseDecl() ast.Decl { | 2152 func (p *parser) parseDecl() ast.Decl { |
(...skipping 11 matching lines...) Expand all Loading... |
2160 | 2164 |
2161 case token.VAR: | 2165 case token.VAR: |
2162 f = parseVarSpec | 2166 f = parseVarSpec |
2163 | 2167 |
2164 case token.FUNC: | 2168 case token.FUNC: |
2165 return p.parseFuncDecl() | 2169 return p.parseFuncDecl() |
2166 | 2170 |
2167 default: | 2171 default: |
2168 pos := p.pos | 2172 pos := p.pos |
2169 p.errorExpected(pos, "declaration") | 2173 p.errorExpected(pos, "declaration") |
2170 » » p.next() ÿ// make progressÿ | 2174 » » p.next() // make progress |
2171 decl := &ast.BadDecl{pos, p.pos} | 2175 decl := &ast.BadDecl{pos, p.pos} |
2172 return decl | 2176 return decl |
2173 } | 2177 } |
2174 | 2178 |
2175 return p.parseGenDecl(p.tok, f) | 2179 return p.parseGenDecl(p.tok, f) |
2176 } | 2180 } |
2177 | 2181 |
2178 | 2182 |
2179 func (p *parser) parseDeclList() (list []ast.Decl) { | 2183 func (p *parser) parseDeclList() (list []ast.Decl) { |
2180 if p.trace { | 2184 if p.trace { |
2181 defer un(trace(p, "DeclList")) | 2185 defer un(trace(p, "DeclList")) |
2182 } | 2186 } |
2183 | 2187 |
2184 for p.tok != token.EOF { | 2188 for p.tok != token.EOF { |
2185 list = append(list, p.parseDecl()) | 2189 list = append(list, p.parseDecl()) |
2186 } | 2190 } |
2187 | 2191 |
2188 return | 2192 return |
2189 } | 2193 } |
2190 | 2194 |
2191 | 2195 |
2192 ÿ// ----------------------------------------------------------------------------
ÿ | 2196 // ---------------------------------------------------------------------------- |
2193 ÿ// Source filesÿ | 2197 // Source files |
2194 | 2198 |
2195 func (p *parser) parseFile() *ast.File { | 2199 func (p *parser) parseFile() *ast.File { |
2196 if p.trace { | 2200 if p.trace { |
2197 defer un(trace(p, "File")) | 2201 defer un(trace(p, "File")) |
2198 } | 2202 } |
2199 | 2203 |
2200 » ÿ// package clauseÿ | 2204 » // package clause |
2201 doc := p.leadComment | 2205 doc := p.leadComment |
2202 pos := p.expect(token.PACKAGE) | 2206 pos := p.expect(token.PACKAGE) |
2203 » ÿ// Go spec: The package clause is not a declaration;ÿ | 2207 » // Go spec: The package clause is not a declaration; |
2204 » ÿ// the package name does not appear in any scope.ÿ | 2208 » // the package name does not appear in any scope. |
2205 ident := p.parseIdent() | 2209 ident := p.parseIdent() |
2206 p.expectSemi() | 2210 p.expectSemi() |
2207 | 2211 |
2208 var decls []ast.Decl | 2212 var decls []ast.Decl |
2209 | 2213 |
2210 » ÿ// Don't bother parsing the rest if we had errors already.ÿ | 2214 » // Don't bother parsing the rest if we had errors already. |
2211 » ÿ// Likely not a Go source file at all.ÿ | 2215 » // Likely not a Go source file at all. |
2212 | 2216 |
2213 if p.ErrorCount() == 0 && p.mode&PackageClauseOnly == 0 { | 2217 if p.ErrorCount() == 0 && p.mode&PackageClauseOnly == 0 { |
2214 » » ÿ// import declsÿ | 2218 » » // import decls |
2215 for p.tok == token.IMPORT { | 2219 for p.tok == token.IMPORT { |
2216 decls = append(decls, p.parseGenDecl(token.IMPORT, parse
ImportSpec)) | 2220 decls = append(decls, p.parseGenDecl(token.IMPORT, parse
ImportSpec)) |
2217 } | 2221 } |
2218 | 2222 |
2219 if p.mode&ImportsOnly == 0 { | 2223 if p.mode&ImportsOnly == 0 { |
2220 » » » ÿ// rest of package bodyÿ | 2224 » » » // rest of package body |
2221 for p.tok != token.EOF { | 2225 for p.tok != token.EOF { |
2222 decls = append(decls, p.parseDecl()) | 2226 decls = append(decls, p.parseDecl()) |
2223 } | 2227 } |
2224 } | 2228 } |
2225 } | 2229 } |
2226 | 2230 |
2227 assert(p.topScope == p.pkgScope, "imbalanced scopes") | 2231 assert(p.topScope == p.pkgScope, "imbalanced scopes") |
2228 | 2232 |
2229 » ÿ// resolve global identifiers within the same fileÿ | 2233 » // resolve global identifiers within the same file |
2230 i := 0 | 2234 i := 0 |
2231 for _, ident := range p.unresolved { | 2235 for _, ident := range p.unresolved { |
2232 » » ÿ// i <= index for current identÿ | 2236 » » // i <= index for current ident |
2233 assert(ident.Obj == unresolved, "object already resolved") | 2237 assert(ident.Obj == unresolved, "object already resolved") |
2234 » » ident.Obj = p.pkgScope.Lookup(ident.Name) ÿ// also removes unres
olved sentinelÿ | 2238 » » ident.Obj = p.pkgScope.Lookup(ident.Name) // also removes unreso
lved sentinel |
2235 if ident.Obj == nil { | 2239 if ident.Obj == nil { |
2236 p.unresolved[i] = ident | 2240 p.unresolved[i] = ident |
2237 i++ | 2241 i++ |
2238 } | 2242 } |
2239 } | 2243 } |
2240 | 2244 |
2241 » ÿ// TODO(gri): store p.imports in ASTÿ | 2245 » // TODO(gri): store p.imports in AST |
2242 » return &ast.File{doc, pos, ident, decls, p.pkgScope, p.unresolved[0:i],
p.comments} | 2246 » return &ast.File{doc, pos, ident, decls, p.pkgScope, p.imports, p.unreso
lved[0:i], p.comments} |
2243 } | 2247 } |
LEFT | RIGHT |