mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
vfmt: handle for ;; loops; run vfmt on parser.v
This commit is contained in:
parent
8f9a8e1e7f
commit
275b20a184
@ -132,7 +132,9 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
||||
f.write(', ')
|
||||
}
|
||||
}
|
||||
f.writeln('')
|
||||
if !f.single_line_if {
|
||||
f.writeln('')
|
||||
}
|
||||
f.is_assign = false
|
||||
}
|
||||
ast.Attr {
|
||||
@ -207,6 +209,21 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
||||
f.writeln('\n')
|
||||
}
|
||||
}
|
||||
ast.ForCStmt {
|
||||
f.write('for ')
|
||||
if it.has_init {
|
||||
f.single_line_if = true // to keep all for ;; exprs on the same line
|
||||
f.stmt(it.init)
|
||||
f.single_line_if = false
|
||||
}
|
||||
f.write('; ')
|
||||
f.expr(it.cond)
|
||||
f.write('; ')
|
||||
f.expr(it.inc)
|
||||
f.writeln('{ ')
|
||||
f.stmts(it.stmts)
|
||||
f.writeln('}')
|
||||
}
|
||||
ast.ForInStmt {
|
||||
f.write('for ')
|
||||
if it.key_var != '' {
|
||||
@ -294,8 +311,10 @@ fn (f mut Fmt) stmt(node ast.Stmt) {
|
||||
f.writeln('}')
|
||||
}
|
||||
else {
|
||||
eprintln('fmt stmt: unknown node: ' + typeof(node))
|
||||
// exit(1)
|
||||
eprintln('fmt stmt: unhandled node ' + typeof(node))
|
||||
if typeof(node) != 'unknown v.ast.Expr' {
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,42 +15,40 @@ import (
|
||||
)
|
||||
|
||||
struct Parser {
|
||||
scanner &scanner.Scanner
|
||||
file_name string
|
||||
scanner &scanner.Scanner
|
||||
file_name string
|
||||
mut:
|
||||
tok token.Token
|
||||
peek_tok token.Token
|
||||
table &table.Table
|
||||
is_c bool
|
||||
inside_if bool
|
||||
pref &pref.Preferences
|
||||
builtin_mod bool
|
||||
mod string
|
||||
attr string
|
||||
expr_mod string
|
||||
scope &ast.Scope
|
||||
global_scope &ast.Scope
|
||||
imports map[string]string
|
||||
ast_imports []ast.Import
|
||||
is_amp bool
|
||||
returns bool
|
||||
tok token.Token
|
||||
peek_tok token.Token
|
||||
table &table.Table
|
||||
is_c bool
|
||||
inside_if bool
|
||||
pref &pref.Preferences
|
||||
builtin_mod bool
|
||||
mod string
|
||||
attr string
|
||||
expr_mod string
|
||||
scope &ast.Scope
|
||||
global_scope &ast.Scope
|
||||
imports map[string]string
|
||||
ast_imports []ast.Import
|
||||
is_amp bool
|
||||
returns bool
|
||||
inside_match_case bool // to separate `match_expr { }` from `Struct{}`
|
||||
//comments []ast.Comment
|
||||
}
|
||||
|
||||
// for tests
|
||||
pub fn parse_stmt(text string, table &table.Table, scope &ast.Scope) ast.Stmt {
|
||||
s := scanner.new_scanner(text, .skip_comments)
|
||||
mut p := Parser{
|
||||
mut p := parser.Parser{
|
||||
scanner: s
|
||||
table: table
|
||||
pref: &pref.Preferences{}
|
||||
scope: scope
|
||||
// scope: &ast.Scope{start_pos: 0, parent: 0}
|
||||
global_scope: &ast.Scope{
|
||||
start_pos: 0
|
||||
parent: 0
|
||||
}
|
||||
start_pos: 0
|
||||
parent: 0
|
||||
}
|
||||
}
|
||||
p.init_parse_fns()
|
||||
p.read_first_token()
|
||||
@ -63,22 +61,23 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
|
||||
// panic(err)
|
||||
// }
|
||||
mut stmts := []ast.Stmt
|
||||
mut p := Parser{
|
||||
mut p := parser.Parser{
|
||||
scanner: scanner.new_scanner_file(path, comments_mode)
|
||||
table: table
|
||||
file_name: path
|
||||
pref: pref
|
||||
scope: &ast.Scope{
|
||||
start_pos: 0
|
||||
parent: 0
|
||||
}
|
||||
start_pos: 0
|
||||
parent: 0
|
||||
}
|
||||
global_scope: global_scope
|
||||
}
|
||||
// comments_mode: comments_mode
|
||||
p.read_first_token()
|
||||
// module decl
|
||||
module_decl := if p.tok.kind == .key_module { p.module_decl() } else { ast.Module{name: 'main'
|
||||
} }
|
||||
module_decl := if p.tok.kind == .key_module { p.module_decl() } else { ast.Module{
|
||||
name: 'main'
|
||||
} }
|
||||
p.mod = module_decl.name
|
||||
p.builtin_mod = p.mod == 'builtin'
|
||||
// imports
|
||||
@ -87,9 +86,9 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
|
||||
for p.tok.kind == .key_import {
|
||||
imports << p.import_stmt()
|
||||
}
|
||||
*/
|
||||
*/
|
||||
// TODO: import only mode
|
||||
for {
|
||||
for {
|
||||
// res := s.scan()
|
||||
if p.tok.kind == .eof {
|
||||
// println('EOF, breaking')
|
||||
@ -108,7 +107,6 @@ pub fn parse_file(path string, table &table.Table, comments_mode scanner.Comment
|
||||
stmts: stmts
|
||||
scope: p.scope
|
||||
global_scope: p.global_scope
|
||||
//comments: p.comments
|
||||
}
|
||||
}
|
||||
|
||||
@ -138,8 +136,6 @@ fn (q mut Queue) run() {
|
||||
q.mu.unlock()
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
pub fn parse_files(paths []string, table &table.Table, pref &pref.Preferences, global_scope &ast.Scope) []ast.File {
|
||||
/*
|
||||
println('\n\n\nparse_files()')
|
||||
@ -155,8 +151,7 @@ pub fn parse_files(paths []string, table &table.Table, pref &pref.Preferences, g
|
||||
}
|
||||
time.sleep_ms(100)
|
||||
return q.parsed_ast_files
|
||||
*/
|
||||
|
||||
*/
|
||||
// ///////////////
|
||||
mut files := []ast.File
|
||||
for path in paths {
|
||||
@ -204,7 +199,7 @@ pub fn (p mut Parser) parse_block_no_scope() []ast.Stmt {
|
||||
p.check(.lcbr)
|
||||
mut stmts := []ast.Stmt
|
||||
if p.tok.kind != .rcbr {
|
||||
for {
|
||||
for {
|
||||
stmts << p.stmt()
|
||||
// p.warn('after stmt(): tok=$p.tok.str()')
|
||||
if p.tok.kind in [.eof, .rcbr] {
|
||||
@ -222,8 +217,6 @@ fn (p mut Parser) next_with_comment() {
|
||||
p.peek_tok = p.scanner.scan()
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
fn (p mut Parser) next() {
|
||||
p.tok = p.peek_tok
|
||||
p.peek_tok = p.scanner.scan()
|
||||
@ -232,7 +225,7 @@ fn (p mut Parser) next() {
|
||||
p.comments << ast.Comment{text:p.tok.lit, line_nr:p.tok.line_nr}
|
||||
p.next()
|
||||
}
|
||||
*/
|
||||
*/
|
||||
}
|
||||
|
||||
fn (p mut Parser) check(expected token.Kind) {
|
||||
@ -278,7 +271,7 @@ pub fn (p mut Parser) top_stmt() ast.Stmt {
|
||||
p.error('wrong pub keyword usage')
|
||||
return ast.Stmt{}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.lsbr {
|
||||
return p.attribute()
|
||||
@ -344,7 +337,7 @@ pub fn (p mut Parser) comment() ast.Comment {
|
||||
pos := p.tok.position()
|
||||
text := p.tok.lit
|
||||
p.next()
|
||||
//p.next_with_comment()
|
||||
// p.next_with_comment()
|
||||
return ast.Comment{
|
||||
text: text
|
||||
pos: pos
|
||||
@ -409,10 +402,10 @@ pub fn (p mut Parser) stmt() ast.Stmt {
|
||||
.key_go {
|
||||
p.next()
|
||||
expr := p.expr(0)
|
||||
//mut call_expr := &ast.CallExpr(0) // TODO
|
||||
// mut call_expr := &ast.CallExpr(0) // TODO
|
||||
match expr {
|
||||
ast.CallExpr {
|
||||
//call_expr = it
|
||||
// call_expr = it
|
||||
}
|
||||
else {
|
||||
p.error('expression in `go` must be a function call')
|
||||
@ -433,8 +426,7 @@ pub fn (p mut Parser) stmt() ast.Stmt {
|
||||
// `x := ...`
|
||||
if p.tok.kind == .name && p.peek_tok.kind in [.decl_assign, .comma] {
|
||||
return p.assign_stmt()
|
||||
}
|
||||
else if p.tok.kind == .name && p.peek_tok.kind == .colon {
|
||||
} else if p.tok.kind == .name && p.peek_tok.kind == .colon {
|
||||
// `label:`
|
||||
name := p.check_name()
|
||||
p.check(.colon)
|
||||
@ -457,10 +449,10 @@ pub fn (p mut Parser) assign_expr(left ast.Expr) ast.AssignExpr {
|
||||
val := p.expr(0)
|
||||
match left {
|
||||
ast.IndexExpr {
|
||||
//it.mark_as_setter()
|
||||
// it.mark_as_setter()
|
||||
it.is_setter = true
|
||||
}
|
||||
else{}
|
||||
else {}
|
||||
}
|
||||
node := ast.AssignExpr{
|
||||
left: left
|
||||
@ -505,8 +497,6 @@ fn (p mut Parser) range_expr(low ast.Expr) ast.Expr {
|
||||
return node
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
pub fn (p &Parser) error(s string) {
|
||||
mut kind := 'error:'
|
||||
if p.pref.is_verbose {
|
||||
@ -558,7 +548,7 @@ fn (p mut Parser) struct_init(short_syntax bool) ast.StructInit {
|
||||
mut field_names := []string
|
||||
mut exprs := []ast.Expr
|
||||
mut i := 0
|
||||
is_short_syntax := !(p.peek_tok.kind == .colon || p.tok.kind == .rcbr) // `Vec{a,b,c}`
|
||||
is_short_syntax := !(p.peek_tok.kind == .colon || p.tok.kind == .rcbr) // `Vec{a,b,c}`
|
||||
// p.warn(is_short_syntax.str())
|
||||
for p.tok.kind != .rcbr {
|
||||
p.check_comment()
|
||||
@ -566,8 +556,7 @@ fn (p mut Parser) struct_init(short_syntax bool) ast.StructInit {
|
||||
if is_short_syntax {
|
||||
expr := p.expr(0)
|
||||
exprs << expr
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
field_name = p.check_name()
|
||||
field_names << field_name
|
||||
}
|
||||
@ -614,11 +603,11 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
||||
return p.string_expr()
|
||||
}
|
||||
known_var := p.scope.known_var(p.tok.lit)
|
||||
if p.peek_tok.kind == .dot && !known_var && (is_c || p.known_import(p.tok.lit) || p.mod.all_after('.') == p.tok.lit) {
|
||||
if p.peek_tok.kind == .dot && !known_var && (is_c || p.known_import(p.tok.lit) || p.mod.all_after('.') ==
|
||||
p.tok.lit) {
|
||||
if is_c {
|
||||
mod = 'C'
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// prepend the full import
|
||||
mod = p.imports[p.tok.lit]
|
||||
}
|
||||
@ -651,7 +640,7 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
||||
// TODO, string(b, len)
|
||||
if p.tok.kind == .comma && table.type_idx(to_typ) == table.string_type_idx {
|
||||
p.check(.comma)
|
||||
arg = p.expr(0) // len
|
||||
arg = p.expr(0) // len
|
||||
has_arg = true
|
||||
}
|
||||
p.check(.rpar)
|
||||
@ -663,29 +652,23 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
||||
}
|
||||
p.expr_mod = ''
|
||||
return node
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// fn call
|
||||
// println('calling $p.tok.lit')
|
||||
x := p.call_expr(is_c, mod) // TODO `node,typ :=` should work
|
||||
x := p.call_expr(is_c, mod) // TODO `node,typ :=` should work
|
||||
node = x
|
||||
}
|
||||
}
|
||||
else if p.peek_tok.kind == .lcbr && (p.tok.lit[0].is_capital() || is_c ||
|
||||
(p.builtin_mod && p.tok.lit in table.builtin_type_names)) &&
|
||||
(p.tok.lit.len in [1,2] || !p.tok.lit[p.tok.lit.len - 1].is_capital()) &&
|
||||
!p.inside_match_case
|
||||
{
|
||||
} else if p.peek_tok.kind == .lcbr && (p.tok.lit[0].is_capital() || is_c || (p.builtin_mod && p.tok.lit in
|
||||
table.builtin_type_names)) && (p.tok.lit.len in [1, 2] || !p.tok.lit[p.tok.lit.len - 1].is_capital()) &&
|
||||
!p.inside_match_case {
|
||||
// || p.table.known_type(p.tok.lit)) {
|
||||
return p.struct_init(false) // short_syntax: false
|
||||
}
|
||||
else if p.peek_tok.kind == .dot && (p.tok.lit[0].is_capital() && !known_var) {
|
||||
return p.struct_init(false) // short_syntax: false
|
||||
} else if p.peek_tok.kind == .dot && (p.tok.lit[0].is_capital() && !known_var) {
|
||||
// `Color.green`
|
||||
mut enum_name := p.check_name()
|
||||
if mod != '' {
|
||||
enum_name = mod + '.' + enum_name
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
enum_name = p.prepend_mod(enum_name)
|
||||
}
|
||||
// p.warn('Color.green $enum_name ' + p.prepend_mod(enum_name) + 'mod=$mod')
|
||||
@ -694,14 +677,12 @@ pub fn (p mut Parser) name_expr() ast.Expr {
|
||||
// println('enum val $enum_name . $val')
|
||||
p.expr_mod = ''
|
||||
return ast.EnumVal{
|
||||
enum_name: enum_name // lp.prepend_mod(enum_name)
|
||||
|
||||
enum_name: enum_name
|
||||
val: val
|
||||
pos: p.tok.position()
|
||||
mod: mod
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
mut ident := ast.Ident{}
|
||||
ident = p.parse_ident(is_c)
|
||||
node = ident
|
||||
@ -714,11 +695,11 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
// println('\n\nparser.expr()')
|
||||
mut typ := table.void_type
|
||||
mut node := ast.Expr{}
|
||||
//defer {
|
||||
//if p.tok.kind == .comment {
|
||||
//p.comment()
|
||||
//}
|
||||
//}
|
||||
// defer {
|
||||
// if p.tok.kind == .comment {
|
||||
// p.comment()
|
||||
// }
|
||||
// }
|
||||
// Prefix
|
||||
match p.tok.kind {
|
||||
.name {
|
||||
@ -772,7 +753,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
node = ast.None{}
|
||||
}
|
||||
.key_sizeof {
|
||||
p.next() // sizeof
|
||||
p.next() // sizeof
|
||||
p.check(.lpar)
|
||||
if p.tok.kind == .amp {
|
||||
p.next()
|
||||
@ -805,15 +786,12 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
p.next()
|
||||
if p.tok.kind == .string {
|
||||
node = p.map_init()
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if p.peek_tok.kind == .pipe {
|
||||
node = p.assoc()
|
||||
}
|
||||
else if p.peek_tok.kind == .colon || p.tok.kind == .rcbr {
|
||||
node = p.struct_init(true) // short_syntax: true
|
||||
}
|
||||
else {
|
||||
} else if p.peek_tok.kind == .colon || p.tok.kind == .rcbr {
|
||||
node = p.struct_init(true) // short_syntax: true
|
||||
} else {
|
||||
p.error('unexpected {')
|
||||
}
|
||||
}
|
||||
@ -830,14 +808,11 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
for precedence < p.tok.precedence() {
|
||||
if p.tok.kind.is_assign() {
|
||||
node = p.assign_expr(node)
|
||||
}
|
||||
else if p.tok.kind == .dot {
|
||||
} else if p.tok.kind == .dot {
|
||||
node = p.dot_expr(node)
|
||||
}
|
||||
else if p.tok.kind == .lsbr {
|
||||
} else if p.tok.kind == .lsbr {
|
||||
node = p.index_expr(node)
|
||||
}
|
||||
else if p.tok.kind == .key_as {
|
||||
} else if p.tok.kind == .key_as {
|
||||
pos := p.tok.position()
|
||||
p.next()
|
||||
typ = p.parse_type()
|
||||
@ -846,8 +821,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
typ: typ
|
||||
pos: pos
|
||||
}
|
||||
}
|
||||
else if p.tok.kind == .left_shift {
|
||||
} else if p.tok.kind == .left_shift {
|
||||
// TODO: handle in later stages since this
|
||||
// will fudge left shift as it makes it right assoc
|
||||
// `arr << 'a'` | `arr << 'a' + 'b'`
|
||||
@ -860,11 +834,9 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
op: tok.kind
|
||||
pos: tok.position()
|
||||
}
|
||||
}
|
||||
else if p.tok.kind.is_infix() {
|
||||
} else if p.tok.kind.is_infix() {
|
||||
node = p.infix_expr(node)
|
||||
}
|
||||
else if p.tok.kind in [.inc, .dec] {
|
||||
} else if p.tok.kind in [.inc, .dec] {
|
||||
// Postfix
|
||||
node = ast.PostfixExpr{
|
||||
op: p.tok.kind
|
||||
@ -873,8 +845,7 @@ pub fn (p mut Parser) expr(precedence int) ast.Expr {
|
||||
}
|
||||
p.next()
|
||||
// return node // TODO bring back, only allow ++/-- in exprs in translated code
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
return node
|
||||
}
|
||||
}
|
||||
@ -899,7 +870,7 @@ fn (p mut Parser) prefix_expr() ast.PrefixExpr {
|
||||
|
||||
fn (p mut Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
||||
// left == `a` in `a[0]`
|
||||
p.next() // [
|
||||
p.next() // [
|
||||
mut has_low := true
|
||||
if p.tok.kind == .dotdot {
|
||||
has_low = false
|
||||
@ -911,13 +882,13 @@ fn (p mut Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
||||
left: left
|
||||
pos: p.tok.position()
|
||||
index: ast.RangeExpr{
|
||||
low: ast.Expr{}
|
||||
high: high
|
||||
has_high: true
|
||||
}
|
||||
low: ast.Expr{}
|
||||
high: high
|
||||
has_high: true
|
||||
}
|
||||
}
|
||||
}
|
||||
expr := p.expr(0) // `[expr]` or `[expr..]`
|
||||
expr := p.expr(0) // `[expr]` or `[expr..]`
|
||||
mut has_high := false
|
||||
if p.tok.kind == .dotdot {
|
||||
// [start..end] or [start..]
|
||||
@ -932,11 +903,11 @@ fn (p mut Parser) index_expr(left ast.Expr) ast.IndexExpr {
|
||||
left: left
|
||||
pos: p.tok.position()
|
||||
index: ast.RangeExpr{
|
||||
low: expr
|
||||
high: high
|
||||
has_high: has_high
|
||||
has_low: has_low
|
||||
}
|
||||
low: expr
|
||||
high: high
|
||||
has_high: has_high
|
||||
has_low: has_low
|
||||
}
|
||||
}
|
||||
}
|
||||
// [expr]
|
||||
@ -993,8 +964,8 @@ fn (p mut Parser) dot_expr(left ast.Expr) ast.Expr {
|
||||
pos: pos
|
||||
is_method: true
|
||||
or_block: ast.OrExpr{
|
||||
stmts: or_stmts
|
||||
}
|
||||
stmts: or_stmts
|
||||
}
|
||||
}
|
||||
mut node := ast.Expr{}
|
||||
node = mcall_expr
|
||||
@ -1029,8 +1000,6 @@ fn (p mut Parser) infix_expr(left ast.Expr) ast.Expr {
|
||||
expr = ast.InfixExpr{
|
||||
left: left
|
||||
right: right
|
||||
// right_type: typ
|
||||
|
||||
op: op
|
||||
pos: pos
|
||||
}
|
||||
@ -1069,12 +1038,10 @@ fn (p mut Parser) for_statement() ast.Stmt {
|
||||
pos: pos
|
||||
is_inf: true
|
||||
}
|
||||
}
|
||||
else if p.tok.kind == .key_mut {
|
||||
} else if p.tok.kind == .key_mut {
|
||||
p.error('`mut` is not required in for loops')
|
||||
}
|
||||
else if p.peek_tok.kind in [.decl_assign, .assign, .semicolon] || p.tok.kind == .semicolon {
|
||||
// for i := 0; i < 10; i++ {
|
||||
} else if p.peek_tok.kind in [.decl_assign, .assign, .semicolon] || p.tok.kind == .semicolon {
|
||||
// `for i := 0; i < 10; i++ {`
|
||||
mut init := ast.Stmt{}
|
||||
mut cond := p.new_true_expr()
|
||||
// mut inc := ast.Stmt{}
|
||||
@ -1083,18 +1050,10 @@ fn (p mut Parser) for_statement() ast.Stmt {
|
||||
if p.peek_tok.kind in [.assign, .decl_assign] {
|
||||
init = p.assign_stmt()
|
||||
has_init = true
|
||||
} else if p.tok.kind != .semicolon {
|
||||
}
|
||||
else if p.tok.kind != .semicolon {}
|
||||
// allow `for ;; i++ {`
|
||||
// Allow `for i = 0; i < ...`
|
||||
/*
|
||||
cond, typ = p.expr(0)
|
||||
if typ.kind != _bool {
|
||||
p.error('non-bool used as for condition')
|
||||
}
|
||||
*/
|
||||
// println(1)
|
||||
// }
|
||||
p.check(.semicolon)
|
||||
if p.tok.kind != .semicolon {
|
||||
mut typ := table.void_type
|
||||
@ -1115,8 +1074,7 @@ fn (p mut Parser) for_statement() ast.Stmt {
|
||||
inc: inc
|
||||
pos: pos
|
||||
}
|
||||
}
|
||||
else if p.peek_tok.kind in [.key_in, .comma] {
|
||||
} else if p.peek_tok.kind in [.key_in, .comma] {
|
||||
// `for i in vals`, `for i in start .. end`
|
||||
mut key_var_name := ''
|
||||
mut val_var_name := p.check_name()
|
||||
@ -1145,8 +1103,7 @@ fn (p mut Parser) for_statement() ast.Stmt {
|
||||
name: val_var_name
|
||||
typ: table.int_type
|
||||
})
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// this type will be set in checker
|
||||
p.scope.register(val_var_name, ast.Var{
|
||||
name: val_var_name
|
||||
@ -1186,17 +1143,15 @@ fn (p mut Parser) if_expr() ast.IfExpr {
|
||||
mut comment := ast.Comment{}
|
||||
if p.tok.kind == .key_if {
|
||||
p.check(.key_if)
|
||||
}
|
||||
else {
|
||||
//if p.tok.kind == .comment {
|
||||
//p.error('place comments inside {}')
|
||||
//}
|
||||
//comment = p.check_comment()
|
||||
} else {
|
||||
// if p.tok.kind == .comment {
|
||||
// p.error('place comments inside {}')
|
||||
// }
|
||||
// comment = p.check_comment()
|
||||
p.check(.key_else)
|
||||
if p.tok.kind == .key_if {
|
||||
p.check(.key_if)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
has_else = true
|
||||
branches << ast.IfBranch{
|
||||
stmts: p.parse_block()
|
||||
@ -1223,8 +1178,7 @@ fn (p mut Parser) if_expr() ast.IfExpr {
|
||||
var_name: var_name
|
||||
expr: expr
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
cond = p.expr(0)
|
||||
}
|
||||
p.inside_if = false
|
||||
@ -1236,7 +1190,7 @@ fn (p mut Parser) if_expr() ast.IfExpr {
|
||||
cond: cond
|
||||
stmts: stmts
|
||||
pos: branch_pos
|
||||
comment: ast.Comment{}// comment
|
||||
comment: ast.Comment{}
|
||||
}
|
||||
if p.tok.kind != .key_else {
|
||||
break
|
||||
@ -1325,16 +1279,15 @@ fn (p mut Parser) array_init() ast.ArrayInit {
|
||||
idx := p.table.find_or_register_array(elem_type, 1)
|
||||
array_type = table.new_type(idx)
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// [1,2,3]
|
||||
for i := 0; p.tok.kind != .rsbr; i++ {
|
||||
for i := 0; p.tok.kind != .rsbr; i++{
|
||||
expr := p.expr(0)
|
||||
exprs << expr
|
||||
if p.tok.kind == .comma {
|
||||
p.check(.comma)
|
||||
}
|
||||
//p.check_comment()
|
||||
// p.check_comment()
|
||||
}
|
||||
line_nr := p.tok.line_nr
|
||||
p.check(.rsbr)
|
||||
@ -1386,11 +1339,9 @@ fn (p mut Parser) parse_number_literal() ast.Expr {
|
||||
mut node := ast.Expr{}
|
||||
if lit.contains('.') {
|
||||
node = ast.FloatLiteral{
|
||||
// val: lit.f64()
|
||||
val: lit
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
node = ast.IntegerLiteral{
|
||||
val: lit
|
||||
}
|
||||
@ -1444,8 +1395,7 @@ fn (p mut Parser) import_stmt() []ast.Import {
|
||||
}
|
||||
}
|
||||
p.check(.rpar)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
imports << p.parse_import()
|
||||
}
|
||||
return imports
|
||||
@ -1473,14 +1423,13 @@ fn (p mut Parser) const_decl() ast.ConstDecl {
|
||||
name: name
|
||||
expr: expr
|
||||
pos: p.tok.position()
|
||||
|
||||
}
|
||||
fields << field
|
||||
p.global_scope.register(field.name, field)
|
||||
}
|
||||
p.check(.rpar)
|
||||
return ast.ConstDecl{
|
||||
pos : pos
|
||||
pos: pos
|
||||
fields: fields
|
||||
is_pub: is_pub
|
||||
}
|
||||
@ -1494,14 +1443,13 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||
}
|
||||
if p.tok.kind == .key_struct {
|
||||
p.check(.key_struct)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
p.check(.key_union)
|
||||
}
|
||||
is_c := p.tok.lit == 'C' && p.peek_tok.kind == .dot
|
||||
if is_c {
|
||||
p.next() // C
|
||||
p.next() // .
|
||||
p.next() // C
|
||||
p.next() // .
|
||||
}
|
||||
is_typedef := p.attr == 'typedef'
|
||||
mut name := p.check_name()
|
||||
@ -1512,7 +1460,6 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||
mut mut_pos := -1
|
||||
mut pub_pos := -1
|
||||
mut pub_mut_pos := -1
|
||||
|
||||
for p.tok.kind != .rcbr {
|
||||
mut comment := ast.Comment{}
|
||||
if p.tok.kind == .comment {
|
||||
@ -1523,18 +1470,15 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||
if p.tok.kind == .key_mut {
|
||||
p.check(.key_mut)
|
||||
pub_mut_pos = fields.len
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
pub_pos = fields.len
|
||||
}
|
||||
p.check(.colon)
|
||||
}
|
||||
else if p.tok.kind == .key_mut {
|
||||
} else if p.tok.kind == .key_mut {
|
||||
p.check(.key_mut)
|
||||
p.check(.colon)
|
||||
mut_pos = fields.len
|
||||
}
|
||||
else if p.tok.kind == .key_global {
|
||||
} else if p.tok.kind == .key_global {
|
||||
p.check(.key_global)
|
||||
p.check(.colon)
|
||||
}
|
||||
@ -1547,14 +1491,14 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||
s := p.table.get_type_symbol(typ)
|
||||
println('XXXX' + s.str())
|
||||
}
|
||||
*/
|
||||
mut default_expr := '' // ast.Expr{}
|
||||
*/
|
||||
mut default_expr := '' // ast.Expr{}
|
||||
if p.tok.kind == .assign {
|
||||
// Default value
|
||||
p.next()
|
||||
default_expr = p.tok.lit
|
||||
p.expr(0)
|
||||
//default_expr = p.expr(0)
|
||||
// default_expr = p.expr(0)
|
||||
}
|
||||
if p.tok.kind == .comment {
|
||||
comment = p.comment()
|
||||
@ -1576,25 +1520,23 @@ fn (p mut Parser) struct_decl() ast.StructDecl {
|
||||
p.check(.rcbr)
|
||||
if is_c {
|
||||
name = 'C.$name'
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
name = p.prepend_mod(name)
|
||||
}
|
||||
t := table.TypeSymbol{
|
||||
kind: .struct_
|
||||
name: name
|
||||
info: table.Struct{
|
||||
fields: fields
|
||||
is_typedef: is_typedef
|
||||
}
|
||||
fields: fields
|
||||
is_typedef: is_typedef
|
||||
}
|
||||
}
|
||||
mut ret := 0
|
||||
if p.builtin_mod && t.name in table.builtin_type_names {
|
||||
// this allows overiding the builtins type
|
||||
// with the real struct type info parsed from builtin
|
||||
ret = p.table.register_builtin_type_symbol(t)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
ret = p.table.register_type_symbol(t)
|
||||
}
|
||||
if ret == -1 {
|
||||
@ -1618,7 +1560,7 @@ fn (p mut Parser) interface_decl() ast.InterfaceDecl {
|
||||
if is_pub {
|
||||
p.next()
|
||||
}
|
||||
p.next() // `interface`
|
||||
p.next() // `interface`
|
||||
interface_name := p.check_name()
|
||||
p.check(.lcbr)
|
||||
mut field_names := []string
|
||||
@ -1647,13 +1589,12 @@ fn (p mut Parser) return_stmt() ast.Return {
|
||||
pos: p.tok.position()
|
||||
}
|
||||
}
|
||||
for {
|
||||
for {
|
||||
expr := p.expr(0)
|
||||
exprs << expr
|
||||
if p.tok.kind == .comma {
|
||||
p.check(.comma)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -1667,7 +1608,7 @@ fn (p mut Parser) return_stmt() ast.Return {
|
||||
// left hand side of `=` or `:=` in `a,b,c := 1,2,3`
|
||||
fn (p mut Parser) parse_assign_lhs() []ast.Ident {
|
||||
mut idents := []ast.Ident
|
||||
for {
|
||||
for {
|
||||
is_mut := p.tok.kind == .key_mut
|
||||
if is_mut {
|
||||
p.check(.key_mut)
|
||||
@ -1684,8 +1625,7 @@ fn (p mut Parser) parse_assign_lhs() []ast.Ident {
|
||||
idents << ident
|
||||
if p.tok.kind == .comma {
|
||||
p.check(.comma)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -1695,13 +1635,12 @@ fn (p mut Parser) parse_assign_lhs() []ast.Ident {
|
||||
// right hand side of `=` or `:=` in `a,b,c := 1,2,3`
|
||||
fn (p mut Parser) parse_assign_rhs() []ast.Expr {
|
||||
mut exprs := []ast.Expr
|
||||
for {
|
||||
for {
|
||||
expr := p.expr(0)
|
||||
exprs << expr
|
||||
if p.tok.kind == .comma {
|
||||
p.check(.comma)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -1716,7 +1655,7 @@ fn (p mut Parser) assign_stmt() ast.Stmt {
|
||||
idents := p.parse_assign_lhs()
|
||||
pos := p.tok.position()
|
||||
op := p.tok.kind
|
||||
p.next() // :=, =
|
||||
p.next() // :=, =
|
||||
exprs := p.parse_assign_rhs()
|
||||
is_decl := op == .decl_assign
|
||||
for i, ident in idents {
|
||||
@ -1736,8 +1675,7 @@ fn (p mut Parser) assign_stmt() ast.Stmt {
|
||||
name: ident.name
|
||||
expr: exprs[i]
|
||||
})
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
p.scope.register(ident.name, ast.Var{
|
||||
name: ident.name
|
||||
})
|
||||
@ -1771,7 +1709,8 @@ fn (p mut Parser) hash() ast.HashStmt {
|
||||
}
|
||||
|
||||
fn (p mut Parser) global_decl() ast.GlobalDecl {
|
||||
if !p.pref.translated && !p.pref.is_live && !p.builtin_mod && !p.pref.building_v && p.mod != 'ui' && p.mod != 'gg2' && p.mod != 'uiold' && !os.getwd().contains('/volt') && !p.pref.enable_globals {
|
||||
if !p.pref.translated && !p.pref.is_live && !p.builtin_mod && !p.pref.building_v && p.mod != 'ui' &&
|
||||
p.mod != 'gg2' && p.mod != 'uiold' && !os.getwd().contains('/volt') && !p.pref.enable_globals {
|
||||
p.error('use `v --enable-globals ...` to enable globals')
|
||||
}
|
||||
p.next()
|
||||
@ -1796,8 +1735,7 @@ fn (p mut Parser) global_decl() ast.GlobalDecl {
|
||||
if !p.cgen.nogen {
|
||||
p.cgen.consts << g
|
||||
}
|
||||
*/
|
||||
|
||||
*/
|
||||
glob := ast.GlobalDecl{
|
||||
name: name
|
||||
typ: typ
|
||||
@ -1818,8 +1756,8 @@ fn (p mut Parser) match_expr() ast.MatchExpr {
|
||||
p.check(.lcbr)
|
||||
mut branches := []ast.MatchBranch
|
||||
mut have_final_else := false
|
||||
for {
|
||||
comment := p.check_comment() // comment before {}
|
||||
for {
|
||||
comment := p.check_comment() // comment before {}
|
||||
mut exprs := []ast.Expr
|
||||
branch_pos := p.tok.position()
|
||||
p.open_scope()
|
||||
@ -1827,10 +1765,9 @@ fn (p mut Parser) match_expr() ast.MatchExpr {
|
||||
if p.tok.kind == .key_else {
|
||||
have_final_else = true
|
||||
p.next()
|
||||
}
|
||||
// Sum type match
|
||||
else if p.tok.kind == .name &&
|
||||
(p.tok.lit in table.builtin_type_names || p.tok.lit[0].is_capital() || p.peek_tok.kind == .dot) {
|
||||
} else if p.tok.kind == .name && (p.tok.lit in table.builtin_type_names || p.tok.lit[0].is_capital() ||
|
||||
p.peek_tok.kind == .dot) {
|
||||
// Sum type match
|
||||
// if sym.kind == .sum_type {
|
||||
// p.warn('is sum')
|
||||
// TODO `exprs << ast.Type{...}`
|
||||
@ -1851,10 +1788,9 @@ fn (p mut Parser) match_expr() ast.MatchExpr {
|
||||
p.parse_type()
|
||||
}
|
||||
is_sum_type = true
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// Expression match
|
||||
for {
|
||||
for {
|
||||
p.inside_match_case = true
|
||||
expr := p.expr(0)
|
||||
p.inside_match_case = false
|
||||
@ -1922,8 +1858,8 @@ fn (p mut Parser) enum_decl() ast.EnumDecl {
|
||||
kind: .enum_
|
||||
name: name
|
||||
info: table.Enum{
|
||||
vals: vals
|
||||
}
|
||||
vals: vals
|
||||
}
|
||||
})
|
||||
return ast.EnumDecl{
|
||||
name: name
|
||||
@ -1944,7 +1880,7 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||
if p.tok.kind == .assign {
|
||||
// type SumType = A | B | c
|
||||
p.next()
|
||||
for {
|
||||
for {
|
||||
variant_type := p.parse_type()
|
||||
sum_variants << variant_type
|
||||
if p.tok.kind != .pipe {
|
||||
@ -1956,8 +1892,8 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||
kind: .sum_type
|
||||
name: p.prepend_mod(name)
|
||||
info: table.SumType{
|
||||
variants: sum_variants
|
||||
}
|
||||
variants: sum_variants
|
||||
}
|
||||
})
|
||||
return ast.SumTypeDecl{
|
||||
name: name
|
||||
@ -1983,8 +1919,8 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||
name: p.prepend_mod(name)
|
||||
parent_idx: pid
|
||||
info: table.Alias{
|
||||
foo: ''
|
||||
}
|
||||
foo: ''
|
||||
}
|
||||
})
|
||||
return ast.AliasTypeDecl{
|
||||
name: name
|
||||
@ -1993,7 +1929,7 @@ fn (p mut Parser) type_decl() ast.TypeDecl {
|
||||
}
|
||||
}
|
||||
|
||||
fn (p mut Parser) assoc() ast.Assoc{
|
||||
fn (p mut Parser) assoc() ast.Assoc {
|
||||
var_name := p.check_name()
|
||||
pos := p.tok.position()
|
||||
var := p.scope.find_var(var_name) or {
|
||||
@ -2004,7 +1940,7 @@ fn (p mut Parser) assoc() ast.Assoc{
|
||||
mut fields := []string
|
||||
mut vals := []ast.Expr
|
||||
p.check(.pipe)
|
||||
for {
|
||||
for {
|
||||
fields << p.check_name()
|
||||
p.check(.colon)
|
||||
expr := p.expr(0)
|
||||
|
Loading…
Reference in New Issue
Block a user