1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

fmt: fix multiple things and format most of the compiler (#6631)

Format expressions inside string interpolation like the rest (it used to be a+b instead of a + b, not too sure why)
Fix formatting some match branches when there were only one statement inside (it was inlined)
Fix parsing and formatting some comments edge case on struct field init. You should check out this test because the result is a bit different from before. I personally find it more logical but I would understand if the former format was to stay
Fix formatting of void-returning function signature
This commit is contained in:
Enzo 2020-10-15 22:12:59 +02:00 committed by GitHub
parent 23644d92a9
commit b083f4014b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
52 changed files with 414 additions and 456 deletions

View File

@ -18,37 +18,42 @@ jobs:
./v vet vlib/v
- name: v fmt -verify
run: |
./v fmt -verify vlib/v/scanner/scanner.v
./v fmt -verify vlib/v/parser/parser.v
./v fmt -verify vlib/v/parser/fn.v
./v fmt -verify vlib/v/checker/checker.v
./v fmt -verify vlib/v/gen/cgen.v
./v fmt -verify vlib/v/gen/fn.v
./v fmt -verify vlib/v/gen/x64/gen.v
./v fmt -verify vlib/v/table/table.v
./v fmt -verify vlib/v/fmt/fmt.v
./v fmt -verify vlib/builtin/array.v
./v fmt -verify vlib/os/file.v
./v fmt -verify vlib/v/util/errors.v
./v fmt -verify vlib/v/util/suggestions.v
./v fmt -verify vlib/v/util/util.v
./v fmt -verify vlib/v/builder/builder.v
./v fmt -verify vlib/v/builder/cc.v
./v fmt -verify vlib/v/builder/compile.v
./v fmt -verify vlib/v/builder/msvc.v
./v fmt -verify vlib/math/bits/bits.v
./v fmt -verify vlib/time/time.v
./v fmt -verify vlib/term/colors.v
./v fmt -verify vlib/term/term.v
./v fmt -verify vlib/v/ast/scope.v
./v fmt -verify vlib/v/checker/check_types.v
./v fmt -verify vlib/v/table/atypes.v
./v fmt -verify vlib/v/cflag/cflags.v
./v fmt -verify vlib/v/table/cflags.v
./v fmt -verify vlib/v/ast/
./v fmt -verify vlib/v/builder/
./v fmt -verify vlib/v/cflag/
./v fmt -verify vlib/v/checker/
./v fmt -verify vlib/v/depgraph/
./v fmt -verify vlib/v/doc/
./v fmt -verify vlib/v/errors/
./v fmt -verify vlib/v/eval/
./v fmt -verify vlib/v/fmt/
./v fmt -verify vlib/v/gen/auto_str_methods.v
./v fmt -verify vlib/v/parser/parse_type.v
./v fmt -verify vlib/v/gen/cgen.v
./v fmt -verify vlib/v/gen/cgen_test.v
./v fmt -verify vlib/v/gen/cmain.v
./v fmt -verify vlib/v/gen/comptime.v
./v fmt -verify vlib/v/gen/fn.v
./v fmt -verify vlib/v/gen/json.v
./v fmt -verify vlib/v/gen/live.v
./v fmt -verify vlib/v/gen/profile.v
./v fmt -verify vlib/v/gen/sql.v
./v fmt -verify vlib/v/gen/str.v
./v fmt -verify vlib/v/gen/x64/elf.v
./v fmt -verify vlib/v/gen/x64/elf_obj.v
./v fmt -verify vlib/v/gen/x64/gen.v
./v fmt -verify vlib/v/parser/
./v fmt -verify vlib/v/pref/
./v fmt -verify vlib/v/scanner/
./v fmt -verify vlib/v/table/
./v fmt -verify vlib/v/util/
./v fmt -verify vlib/v/vet/
./v fmt -verify vlib/v/vmod/
- name: v test-fmt
run: ./v -silent test-fmt

View File

@ -240,13 +240,13 @@ pub fn (t Time) relative_short() string {
return '1m'
}
if secs < 3600 {
return '${secs/60}m'
return '${secs / 60}m'
}
if secs < 3600 * 24 {
return '${secs/3600}h'
return '${secs / 3600}h'
}
if secs < 3600 * 24 * 5 {
return '${secs/3600/24}d'
return '${secs / 3600 / 24}d'
}
if secs > 3600 * 24 * 10000 {
return ''

View File

@ -179,11 +179,11 @@ pub:
pub struct InterfaceDecl {
pub:
name string
field_names []string
is_pub bool
methods []FnDecl
pos token.Position
name string
field_names []string
is_pub bool
methods []FnDecl
pos token.Position
pre_comments []Comment
}
@ -602,8 +602,8 @@ pub:
stmts []Stmt
pos token.Position
val_is_mut bool // `for mut val in vals {` means that modifying `val` will modify the array
pub mut:
// and the array cannot be indexed inside the loop
pub mut:
key_type table.Type
val_type table.Type
cond_type table.Type
@ -625,10 +625,10 @@ pub:
// #include etc
pub struct HashStmt {
pub:
mod string
pos token.Position
mod string
pos token.Position
pub mut:
val string
val string
kind string
}

View File

@ -58,7 +58,9 @@ pub fn (mut b Builder) compile_c() {
// println(files)
}
$if windows {
b.find_win_cc() or { verror(no_compiler_error) }
b.find_win_cc() or {
verror(no_compiler_error)
}
// TODO Probably extend this to other OS's?
}
// v1 compiler files
@ -83,7 +85,8 @@ pub fn (mut b Builder) compile_c() {
bundle_id := if b.pref.bundle_id != '' { b.pref.bundle_id } else { 'app.vlang.$bundle_name' }
display_name := if b.pref.display_name != '' { b.pref.display_name } else { bundle_name }
os.mkdir('$display_name\.app')
os.write_file('$display_name\.app/Info.plist', make_ios_plist(display_name, bundle_id, bundle_name, 1))
os.write_file('$display_name\.app/Info.plist', make_ios_plist(display_name, bundle_id,
bundle_name, 1))
}
b.cc()
}

View File

@ -761,7 +761,7 @@ fn (mut v Builder) build_thirdparty_obj_file(path string, moduleflags []cflag.CF
return
}
println('$obj_path not found, building it...')
cfile := '${path[..path.len-2]}.c'
cfile := '${path[..path.len - 2]}.c'
btarget := moduleflags.c_options_before_target()
atarget := moduleflags.c_options_after_target()
cppoptions := if v.pref.ccompiler.contains('++') { ' -fpermissive -w ' } else { '' }

View File

@ -5,7 +5,7 @@ import v.table
// generic struct instantiations to concrete types
pub fn (b &Builder) generic_struct_insts_to_concrete() {
for idx, _ in b.table.types {
mut typ := unsafe { &b.table.types[idx] }
mut typ := unsafe {&b.table.types[idx]}
if typ.kind == .generic_struct_inst {
info := typ.info as table.GenericStructInst
parent := b.table.types[info.parent_idx]

View File

@ -36,4 +36,4 @@ fn make_ios_plist(display_name string, bundle_id string, bundle_name string, bun
</array>
</dict>
</plist>'
}
}

View File

@ -331,7 +331,7 @@ fn (mut v Builder) build_thirdparty_obj_file_with_msvc(path string, moduleflags
return
}
println('$obj_path not found, building it (with msvc)...')
cfiles := '${path[..path.len-2]}.c'
cfiles := '${path[..path.len - 2]}.c'
flags := msvc_string_flags(moduleflags)
inc_dirs := flags.inc_paths.join(' ')
defines := flags.defines.join(' ')

View File

@ -1168,8 +1168,8 @@ pub fn (mut c Checker) call_method(mut call_expr ast.CallExpr) table.Type {
}
}
if got_arg_typ != table.void_type {
c.error('cannot use type `$got_arg_sym.source_name` as type `$exp_arg_sym.source_name` in argument ${i+1} to `${left_type_sym.source_name}.$method_name`',
call_expr.pos)
c.error('cannot use type `$got_arg_sym.source_name` as type `$exp_arg_sym.source_name` in argument ${i +
1} to `${left_type_sym.source_name}.$method_name`', call_expr.pos)
}
}
param := if method.is_variadic && i >= method.params.len - 1 { method.params[method.params.len -
@ -1186,8 +1186,8 @@ pub fn (mut c Checker) call_method(mut call_expr ast.CallExpr) table.Type {
} else {
if param.is_mut && (!arg.is_mut || param.typ.share() != arg.share) {
tok := arg.share.str()
c.error('`$call_expr.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`',
arg.expr.position())
c.error('`$call_expr.name` parameter `$param.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i +
1}`', arg.expr.position())
}
}
}
@ -1460,8 +1460,8 @@ pub fn (mut c Checker) call_fn(mut call_expr ast.CallExpr) table.Type {
} else {
if arg.is_mut && (!call_arg.is_mut || arg.typ.share() != call_arg.share) {
tok := call_arg.share.str()
c.error('`$call_expr.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i+1}`',
call_arg.expr.position())
c.error('`$call_expr.name` parameter `$arg.name` is `$tok`, you need to provide `$tok` e.g. `$tok arg${i +
1}`', call_arg.expr.position())
}
}
// Handle expected interface
@ -1492,11 +1492,11 @@ pub fn (mut c Checker) call_fn(mut call_expr ast.CallExpr) table.Type {
}
if typ_sym.kind == .function && arg_typ_sym.kind == .function {
candidate_fn_name := if typ_sym.source_name.starts_with('anon_') { 'anonymous function' } else { 'fn `$typ_sym.source_name`' }
c.error('cannot use $candidate_fn_name as function type `$arg_typ_sym.str()` in argument ${i+1} to `$fn_name`',
call_expr.pos)
c.error('cannot use $candidate_fn_name as function type `$arg_typ_sym.str()` in argument ${i +
1} to `$fn_name`', call_expr.pos)
} else {
c.error('cannot use type `$typ_sym.source_name` as type `$arg_typ_sym.source_name` in argument ${i+1} to `$fn_name`',
call_expr.pos)
c.error('cannot use type `$typ_sym.source_name` as type `$arg_typ_sym.source_name` in argument ${i +
1} to `$fn_name`', call_expr.pos)
}
}
}
@ -3197,21 +3197,27 @@ fn (mut c Checker) match_exprs(mut node ast.MatchExpr, type_sym table.TypeSymbol
mut is_exhaustive := true
mut unhandled := []string{}
match type_sym.info as info {
table.SumType { for v in info.variants {
table.SumType {
for v in info.variants {
v_str := c.table.type_to_str(v)
if v_str !in branch_exprs {
is_exhaustive = false
unhandled << '`$v_str`'
}
} }
}
}
//
table.Enum { for v in info.vals {
table.Enum {
for v in info.vals {
if v !in branch_exprs {
is_exhaustive = false
unhandled << '`.$v`'
}
} }
else { is_exhaustive = false }
}
}
else {
is_exhaustive = false
}
}
mut else_branch := node.branches[node.branches.len - 1]
mut has_else := else_branch.is_else

View File

@ -1,6 +1,6 @@
vlib/v/checker/tests/modules/overload_return_type/main.v:8:11: error: cannot assign to `two`: expected `Point`, not `int`
6 | one := Point {x:1, y:2}
7 | mut two := Point {x:5, y:1}
8 | two = one + two
vlib/v/checker/tests/modules/overload_return_type/main.v:14:8: error: cannot assign to `two`: expected `Point`, not `int`
12 | y: 1
13 | }
14 | two = one + two
| ~~~~~~~~~
9 | }
15 | }

View File

@ -3,7 +3,13 @@ module main
import point { Point }
fn main() {
one := Point {x:1, y:2}
mut two := Point {x:5, y:1}
two = one + two
one := Point{
x: 1
y: 2
}
mut two := Point{
x: 5
y: 1
}
two = one + two
}

View File

@ -1,11 +1,11 @@
module point
pub struct Point {
mut:
x int
y int
mut:
x int
y int
}
pub fn (a Point) +(b Point) int {
return a.x + b.x
return a.x + b.x
}

View File

@ -35,8 +35,8 @@ pub fn (mut o OrderedDepMap) add(name string, deps []string) {
for dep in deps {
if dep !in d {
d << dep
} else {
}
else{}
}
o.set(name, d)
}
@ -135,7 +135,7 @@ pub fn (graph &DepGraph) display() string {
}
pub fn (graph &DepGraph) display_cycles() string {
mut node_names := map[string]DepGraphNode
mut node_names := map[string]DepGraphNode{}
for node in graph.nodes {
node_names[node.name] = node
}

View File

@ -56,7 +56,7 @@ pub mut:
pub fn merge_comments(comments []ast.Comment) string {
mut res := []string{}
for comment in comments {
res << comment.text.trim_left('|')
res << comment.text.trim_left('\x01')
}
return res.join('\n')
}
@ -74,7 +74,7 @@ pub fn get_comment_block_right_before(comments []ast.Comment) string {
// located right above the top level statement.
// break
}
mut cmt_content := cmt.text.trim_left('|')
mut cmt_content := cmt.text.trim_left('\x01')
if cmt_content.len == cmt.text.len || cmt.is_multi {
// ignore /* */ style comments for now
continue

View File

@ -1,10 +1,9 @@
//import v.table
//import v.doc
//import v.pref
// import v.table
// import v.doc
// import v.pref
// fn test_vdoc() {
// mut prefs := &pref.Preferences{}
// prefs.fill_with_defaults()
// table := table.new_table()
// println(doc.doc('net', table, prefs))
// mut prefs := &pref.Preferences{}
// prefs.fill_with_defaults()
// table := table.new_table()
// println(doc.doc('net', table, prefs))
// }

View File

@ -12,7 +12,7 @@ pub enum Reporter {
pub struct Error {
pub:
message string
details string
details string
file_path string
pos token.Position
backtrace string

View File

@ -34,14 +34,14 @@ pub fn (mut e Eval) eval(file ast.File, table &table.Table) string {
fn print_object(o Object) {
match o {
int { println(it) }
int { println(o) }
else { println('unknown object') }
}
}
pub fn (o Object) str() string {
match o {
int { return it.str() }
int { return o.str() }
else { println('unknown object') }
}
return ''
@ -53,18 +53,18 @@ fn (mut e Eval) stmt(node ast.Stmt) string {
// TODO; replaced VarDecl
}
ast.ExprStmt {
o := e.expr(it.expr)
o := e.expr(node.expr)
print('out: ')
print_object(o)
return o.str()
}
// ast.StructDecl {
// println('s decl')
// println('s decl')
// }
// ast.VarDecl {
// e.vars[it.name] = Var{
// value: e.expr(it.expr)
// }
// e.vars[it.name] = Var{
// value: e.expr(it.expr)
// }
// }
else {}
}
@ -74,20 +74,20 @@ fn (mut e Eval) stmt(node ast.Stmt) string {
fn (mut e Eval) expr(node ast.Expr) Object {
match node {
ast.IntegerLiteral {
return it.val
return node.val
}
ast.Ident {
print_object(it.value)
print_object(node.value)
// Find the variable
v := e.vars[it.name]
v := e.vars[node.name]
return v.value
}
ast.InfixExpr {
e.checker.infix_expr(mut it)
e.checker.infix_expr(mut node)
// println('bin $it.op')
left := e.expr(it.left) as int
right := e.expr(it.right) as int
match it.op {
left := e.expr(node.left) as int
right := e.expr(node.right) as int
match node.op {
.plus { return left + right }
.mul { return left * right }
else {}

View File

@ -38,7 +38,6 @@ pub mut:
file ast.File
did_imports bool
is_assign bool
is_inside_interp bool
auto_imports []string // automatically inserted imports that the user forgot to specify
import_pos int // position of the imports in the resulting string for later autoimports insertion
used_imports []string // to remove unused imports
@ -609,55 +608,48 @@ pub fn (mut f Fmt) struct_decl(node ast.StructDecl) {
}
end_pos := field.pos.pos + field.pos.len
comments := field.comments
if comments.len == 0 {
f.write('\t$field.name ')
f.write(strings.repeat(` `, max - field.name.len))
f.write(field_types[i])
if field.attrs.len > 0 && field.attrs[0].name != 'ref_only' { // TODO a bug with [ref_only] attr being added to fields, fix it
f.write(strings.repeat(` `, max_type - field_types[i].len))
f.inline_attrs(field.attrs)
}
if field.has_default_expr {
f.write(' = ')
f.prefix_expr_cast_expr(field.default_expr)
}
f.write('\n')
continue
}
// Handle comments before field
mut j := 0
for j < comments.len && comments[j].pos.pos < field.pos.pos {
mut comm_idx := 0
for comm_idx < comments.len && comments[comm_idx].pos.pos < field.pos.pos {
f.indent++
f.empty_line = true
f.comment(comments[j], {
inline: true
})
f.comment(comments[comm_idx], {})
f.writeln('')
f.indent--
j++
comm_idx++
}
f.write('\t$field.name ')
// Handle comments between field name and type
mut comments_len := 0
for j < comments.len && comments[j].pos.pos < end_pos {
comment := '/* ${comments[j].text} */ ' // TODO: handle in a function
comments_len += comment.len
f.write(comment)
j++
for comm_idx < comments.len && comments[comm_idx].pos.pos < end_pos {
comment_text := '/* ${comments[comm_idx].text} */ ' // TODO handle in a function
comments_len += comment_text.len
f.write(comment_text)
comm_idx++
}
f.write(strings.repeat(` `, max - field.name.len - comments_len))
f.write(field_types[i])
f.inline_attrs(field.attrs)
if field.attrs.len > 0 && field.attrs[0].name != 'ref_only' { // TODO a bug with [ref_only] attr being added to fields, fix it
f.write(strings.repeat(` `, max_type - field_types[i].len))
f.inline_attrs(field.attrs)
}
if field.has_default_expr {
f.write(' = ')
f.prefix_expr_cast_expr(field.default_expr)
}
// Handle comments after field type (same line)
for j < comments.len && field.pos.line_nr == comments[j].pos.line_nr {
f.write(' // ${comments[j].text}') // TODO: handle in a function
j++
if comm_idx < comments.len {
if comments[comm_idx].pos.line_nr > field.pos.line_nr {
f.writeln('')
} else {
f.write(' ')
}
f.comments(comments[comm_idx..], {
level: .indent
})
} else {
f.writeln('')
}
f.write('\n')
}
f.comments_after_last_field(node.end_comments)
f.writeln('}\n')
@ -1011,7 +1003,6 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
} else {
f.write("'")
}
f.is_inside_interp = true
for i, val in node.vals {
f.write(val)
if i >= node.exprs.len {
@ -1028,7 +1019,6 @@ pub fn (mut f Fmt) expr(node ast.Expr) {
f.expr(node.exprs[i])
}
}
f.is_inside_interp = false
if contains_single_quote {
f.write('"')
} else {
@ -1093,11 +1083,7 @@ pub fn (mut f Fmt) call_args(args []ast.CallArg) {
}
f.expr(arg.expr)
if i < args.len - 1 {
if f.is_inside_interp {
f.write(',')
} else {
f.write(', ')
}
f.write(', ')
}
}
}
@ -1145,16 +1131,20 @@ enum CommentsLevel {
indent
}
// CommentsOptions defines the way comments are going to be written
// - has_nl: adds an newline at the end of the list of comments
// - inline: single-line comments will be on the same line as the last statement
// - level: either .keep (don't indent), or .indent (increment indentation)
struct CommentsOptions {
has_nl bool = true
inline bool
level CommentsLevel = .keep
level CommentsLevel
}
pub fn (mut f Fmt) comment(node ast.Comment, options CommentsOptions) {
if !node.text.contains('\n') {
is_separate_line := !options.inline || node.text.starts_with('|')
mut s := if node.text.starts_with('|') { node.text[1..] } else { node.text }
is_separate_line := !options.inline || node.text.starts_with('\x01')
mut s := if node.text.starts_with('\x01') { node.text[1..] } else { node.text }
if s == '' {
s = '//'
} else {
@ -1258,73 +1248,67 @@ pub fn (mut f Fmt) lock_expr(lex ast.LockExpr) {
}
pub fn (mut f Fmt) infix_expr(node ast.InfixExpr) {
if f.is_inside_interp {
f.expr(node.left)
f.write('$node.op.str()')
f.expr(node.right)
} else {
buffering_save := f.buffering
if !f.buffering {
f.out_save = f.out
f.out = strings.new_builder(60)
f.buffering = true
}
f.expr(node.left)
is_one_val_array_init := node.op in [.key_in, .not_in] &&
node.right is ast.ArrayInit && (node.right as ast.ArrayInit).exprs.len == 1
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.write(if node.op == .key_in {
' == '
} else {
' != '
})
} else {
f.write(' $node.op.str() ')
}
f.expr_bufs << f.out.str()
mut penalty := 3
match node.left as left {
ast.InfixExpr {
if int(token.precedences[left.op]) > int(token.precedences[node.op]) {
penalty--
}
}
ast.ParExpr {
penalty = 1
}
else {}
}
match node.right as right {
ast.InfixExpr { penalty-- }
ast.ParExpr { penalty = 1 }
else {}
}
f.penalties << penalty
// combine parentheses level with operator precedence to form effective precedence
f.precedences << int(token.precedences[node.op]) | (f.par_level << 16)
buffering_save := f.buffering
if !f.buffering {
f.out_save = f.out
f.out = strings.new_builder(60)
f.buffering = true
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.expr((node.right as ast.ArrayInit).exprs[0])
}
f.expr(node.left)
is_one_val_array_init := node.op in [.key_in, .not_in] &&
node.right is ast.ArrayInit && (node.right as ast.ArrayInit).exprs.len == 1
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.write(if node.op == .key_in {
' == '
} else {
f.expr(node.right)
}
if !buffering_save && f.buffering { // now decide if and where to break
f.expr_bufs << f.out.str()
f.out = f.out_save
f.buffering = false
f.adjust_complete_line()
for i, p in f.penalties {
f.write(f.expr_bufs[i])
f.wrap_long_line(p, true)
' != '
})
} else {
f.write(' $node.op.str() ')
}
f.expr_bufs << f.out.str()
mut penalty := 3
match node.left as left {
ast.InfixExpr {
if int(token.precedences[left.op]) > int(token.precedences[node.op]) {
penalty--
}
f.write(f.expr_bufs[f.expr_bufs.len - 1])
f.expr_bufs = []string{}
f.penalties = []int{}
f.precedences = []int{}
}
ast.ParExpr {
penalty = 1
}
else {}
}
match node.right as right {
ast.InfixExpr { penalty-- }
ast.ParExpr { penalty = 1 }
else {}
}
f.penalties << penalty
// combine parentheses level with operator precedence to form effective precedence
f.precedences << int(token.precedences[node.op]) | (f.par_level << 16)
f.out = strings.new_builder(60)
f.buffering = true
if is_one_val_array_init {
// `var in [val]` => `var == val`
f.expr((node.right as ast.ArrayInit).exprs[0])
} else {
f.expr(node.right)
}
if !buffering_save && f.buffering { // now decide if and where to break
f.expr_bufs << f.out.str()
f.out = f.out_save
f.buffering = false
f.adjust_complete_line()
for i, p in f.penalties {
f.write(f.expr_bufs[i])
f.wrap_long_line(p, true)
}
f.write(f.expr_bufs[f.expr_bufs.len - 1])
f.expr_bufs = []string{}
f.penalties = []int{}
f.precedences = []int{}
}
}
@ -1499,13 +1483,9 @@ pub fn (mut f Fmt) match_expr(it ast.MatchExpr) {
if branch.stmts.len == 0 {
continue
}
stmt := branch.stmts[0]
if stmt is ast.ExprStmt {
// If expressions inside match branches can't be one a single line
if !expr_is_single_line(stmt.expr) {
single_line = false
break
}
if !stmt_is_single_line(branch.stmts[0]) {
single_line = false
break
}
}
for branch in it.branches {
@ -1595,6 +1575,15 @@ fn (mut f Fmt) write_language_prefix(lang table.Language) {
}
}
fn stmt_is_single_line(stmt ast.Stmt) bool {
match stmt {
ast.ExprStmt { return expr_is_single_line(stmt.expr) }
ast.Return { return true }
ast.AssignStmt { return true }
else { return false }
}
}
fn expr_is_single_line(expr ast.Expr) bool {
match expr {
ast.IfExpr { return false }

View File

@ -47,31 +47,31 @@ fn test_fmt() {
opath := ipath
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${vrelpath}'))
eprintln(fmt_bench.step_message_fail('cannot read from $vrelpath'))
continue
}
table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true,
ccompiler: 'gcc'
}, &ast.Scope{
parent: 0
is_fmt: true
ccompiler: 'gcc'
}, &ast.Scope{
parent: 0
})
result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${vrelpath} after formatting, does not look as expected.'))
eprintln(fmt_bench.step_message_fail('file $vrelpath after formatting, does not look as expected.'))
if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
}
fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${vrelpath}'))
eprintln(fmt_bench.step_message_ok('$vrelpath'))
}
fmt_bench.stop()
eprintln(term.h_divider('-'))

View File

@ -23,7 +23,9 @@ fn test_fmt() {
}
vroot := os.dir(vexe)
tmpfolder := os.temp_dir()
diff_cmd := util.find_working_diff_command() or { '' }
diff_cmd := util.find_working_diff_command() or {
''
}
mut fmt_bench := benchmark.new_benchmark()
// Lookup the existing test _input.vv files:
input_files := os.walk_ext('$vroot/vlib/v/fmt/tests', '_input.vv')
@ -35,35 +37,35 @@ fn test_fmt() {
opath := ipath.replace('_input.vv', '_expected.vv')
if !os.exists(opath) {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('missing file ${opath}'))
eprintln(fmt_bench.step_message_fail('missing file $opath'))
continue
}
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${opath}'))
eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
continue
}
table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true
}, &ast.Scope{
parent: 0
is_fmt: true
}, &ast.Scope{
parent: 0
})
result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.'))
eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
}
fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${ipath}'))
eprintln(fmt_bench.step_message_ok('$ipath'))
}
fmt_bench.stop()
eprintln(term.h_divider('-'))

View File

@ -40,30 +40,30 @@ fn test_vlib_fmt() {
opath := ipath
expected_ocontent := os.read_file(opath) or {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('cannot read from ${opath}'))
eprintln(fmt_bench.step_message_fail('cannot read from $opath'))
continue
}
table := table.new_table()
file_ast := parser.parse_file(ipath, table, .parse_comments, &pref.Preferences{
is_fmt: true
}, &ast.Scope{
parent: 0
is_fmt: true
}, &ast.Scope{
parent: 0
})
result_ocontent := fmt.fmt(file_ast, table, false)
if expected_ocontent != result_ocontent {
fmt_bench.fail()
eprintln(fmt_bench.step_message_fail('file ${ipath} after formatting, does not look as expected.'))
eprintln(fmt_bench.step_message_fail('file $ipath after formatting, does not look as expected.'))
if diff_cmd == '' {
eprintln('>> sorry, but no working "diff" CLI command can be found')
continue
}
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_${ifilename}')
vfmt_result_file := os.join_path(tmpfolder, 'vfmt_run_over_$ifilename')
os.write_file(vfmt_result_file, result_ocontent)
eprintln(util.color_compare_files(diff_cmd, opath, vfmt_result_file))
continue
}
fmt_bench.ok()
eprintln(fmt_bench.step_message_ok('${ipath}'))
eprintln(fmt_bench.step_message_ok('$ipath'))
}
fmt_bench.stop()
eprintln(term.h_divider('-'))

View File

@ -1,3 +1,7 @@
pub fn (a []int) reduce(iter fn (int, int) int, accum_start int) int {
iter(accum_start)
}
pub fn test_anon_fn_void(func fn ()) int {
return 0
}

View File

@ -27,6 +27,6 @@ fn main() {
println('$st.a.xy${ar.a[2].xy}$aa.xy$z')
println('${st.a.xy}ya ${ar.a[2].xy}X2 ${aa.xy}.b ${z}3')
println('${z:-5} ${z:+5.3} ${z:+09.3f} ${z:-7.2} ${z:+09} ${z:08.3f}')
println('$ar.f() ${ar.g(1,2)} ${ar.a}() ${z}(')
println('${z>12.3*z-3} ${@VEXE} ${4*5}')
println('$ar.f() ${ar.g(1, 2)} ${ar.a}() ${z}(')
println('${z > 12.3 * z - 3} ${@VEXE} ${4 * 5}')
}

View File

@ -28,7 +28,9 @@ mut:
// 1
// 2
// 3
somefield /* 4 */ /* 5 */ int // 6 // 7 // 8
somefield /* 4 */ /* 5 */ int // 6
// 7
// 8
/*
9
10

View File

@ -170,7 +170,7 @@ fn (mut g Gen) gen_str_for_array_fixed(info table.ArrayFixed, styp string, str_f
g.auto_str_funcs.writeln('\t\tstrings__Builder_write(&sb, ${elem_str_fn_name}(*a[i]));')
}
}
g.auto_str_funcs.writeln('\t\tif (i < ${info.size-1}) {')
g.auto_str_funcs.writeln('\t\tif (i < ${info.size - 1}) {')
g.auto_str_funcs.writeln('\t\t\tstrings__Builder_write(&sb, tos_lit(", "));')
g.auto_str_funcs.writeln('\t\t}')
g.auto_str_funcs.writeln('\t}')
@ -337,7 +337,7 @@ fn (mut g Gen) gen_str_for_struct(info table.Struct, styp string, str_fn_name st
}
g.auto_str_funcs.writeln('\t\t"%.*s\\000 $field.name: $fmt\\n"')
}
g.auto_str_funcs.write('\t\t"%.*s\\000}", ${2*(info.fields.len+1)}')
g.auto_str_funcs.write('\t\t"%.*s\\000}", ${2 * (info.fields.len + 1)}')
if info.fields.len > 0 {
g.auto_str_funcs.write(',\n\t\t')
for i, field in info.fields {

View File

@ -2601,7 +2601,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
node.op in [.eq, .ne, .gt, .lt, .ge, .le] {
bitsize := if unaliased_left.idx() == table.u32_type_idx &&
unaliased_right.idx() != table.i64_type_idx { 32 } else { 64 }
g.write('_us${bitsize}_${cmp_str[int(node.op)-int(token.Kind.eq)]}(')
g.write('_us${bitsize}_${cmp_str[int(node.op) - int(token.Kind.eq)]}(')
g.expr(node.left)
g.write(',')
g.expr(node.right)
@ -2610,7 +2610,7 @@ fn (mut g Gen) infix_expr(node ast.InfixExpr) {
node.op in [.eq, .ne, .gt, .lt, .ge, .le] {
bitsize := if unaliased_right.idx() == table.u32_type_idx &&
unaliased_left.idx() != table.i64_type_idx { 32 } else { 64 }
g.write('_us${bitsize}_${cmp_rev[int(node.op)-int(token.Kind.eq)]}(')
g.write('_us${bitsize}_${cmp_rev[int(node.op) - int(token.Kind.eq)]}(')
g.expr(node.right)
g.write(',')
g.expr(node.left)
@ -4016,7 +4016,7 @@ fn verror(s string) {
}
fn (g &Gen) error(s string, pos token.Position) {
p := if pos.line_nr == 0 { '?' } else { '${pos.line_nr+1}' }
p := if pos.line_nr == 0 { '?' } else { '${pos.line_nr + 1}' }
util.verror('$g.file.path:$p: cgen error', s)
}
@ -4151,7 +4151,7 @@ fn (mut g Gen) write_types(types []table.TypeSymbol) {
styp, base := g.optional_type_name(field.typ)
g.optionals << styp
g.typedefs2.writeln('typedef struct $styp $styp;')
g.type_definitions.writeln('${g.optional_type_text(styp,base)};')
g.type_definitions.writeln('${g.optional_type_text(styp, base)};')
g.type_definitions.write(last_text)
}
type_name := g.typ(field.typ)
@ -5031,7 +5031,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
g.writeln(';')
}
for i, arg in expr.args {
g.write('$arg_tmp_var->arg${i+1} = ')
g.write('$arg_tmp_var->arg${i + 1} = ')
g.expr(arg.expr)
g.writeln(';')
}
@ -5056,7 +5056,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
} else {
for i, arg in expr.args {
styp := g.typ(arg.typ)
g.type_definitions.writeln('\t$styp arg${i+1};')
g.type_definitions.writeln('\t$styp arg${i + 1};')
}
}
g.type_definitions.writeln('} $wrapper_struct_name;')
@ -5070,7 +5070,7 @@ fn (mut g Gen) go_stmt(node ast.GoStmt) {
}
}
for i in 0 .. expr.args.len {
g.gowrappers.write('arg->arg${i+1}')
g.gowrappers.write('arg->arg${i + 1}')
if i < expr.args.len - 1 {
g.gowrappers.write(', ')
}

View File

@ -32,14 +32,14 @@ fn test_c_files() {
res = res[..pos] + res[end + 15..]
}
if compare_texts(res, ctext, path) {
println('${term_ok} ${i}')
println('$term_ok $i')
} else {
assert false
}
}
}
fn compare_texts(a, b, path string) bool {
fn compare_texts(a string, b string, path string) bool {
lines_a_ := a.trim_space().split_into_lines()
lines_b_ := b.trim_space().split_into_lines()
lines_a := lines_a_.filter(it != '')
@ -60,8 +60,8 @@ fn compare_texts(a, b, path string) bool {
}
line_b := lines_b[i]
if line_a.trim_space() != line_b.trim_space() {
println('${path}: Got\n$a')
println('${path}:${i}: ${term_fail}')
println('$path: Got\n$a')
println('$path:$i: $term_fail')
println(term.bold(term.bright_yellow('actual : ')) + line_a)
println(term.green('expected: ') + line_b)
println(lines_b[i + 1])
@ -72,8 +72,3 @@ fn compare_texts(a, b, path string) bool {
}
return true
}
fn test_nested_if() {
a := if true { if true { 'a' } else { 'b' } } else { 'c' }
assert a == 'a'
}

View File

@ -35,7 +35,7 @@ fn (mut g Gen) gen_vlines_reset() {
g.vlines_path = util.vlines_escape_path(g.pref.out_name_c, g.pref.ccompiler)
g.writeln('')
g.writeln('\n// Reset the file/line numbers')
g.writeln('\n#line $lines_so_far "${g.vlines_path}"')
g.writeln('\n#line $lines_so_far "$g.vlines_path"')
g.writeln('')
}
}
@ -111,7 +111,6 @@ void (_vsokol_cleanup_userdata_cb)(void* user_data) {
}
')
}
g.writeln('// The sokol_main entry point on Android
sapp_desc sokol_main(int argc, char* argv[]) {
(void)argc; (void)argv;

View File

@ -53,9 +53,9 @@ fn (mut g Gen) comptime_call(node ast.ComptimeCall) {
if m.params[i].typ.is_int() || m.params[i].typ.idx() == table.bool_type_idx {
// Gets the type name and cast the string to the type with the string_<type> function
type_name := g.table.types[int(m.params[i].typ)].str()
g.write('string_${type_name}(((string*)${node.args_var}.data) [${i-1}])')
g.write('string_${type_name}(((string*)${node.args_var}.data) [${i - 1}])')
} else {
g.write('((string*)${node.args_var}.data) [${i-1}] ')
g.write('((string*)${node.args_var}.data) [${i - 1}] ')
}
if i < m.params.len - 1 {
g.write(', ')
@ -106,7 +106,9 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
stmt_str := g.go_before_stmt(0)
g.write(tabs[g.indent])
stmt_str.trim_space()
} else { '' }
} else {
''
}
for i, branch in node.branches {
start_pos := g.out.len
if i == node.branches.len - 1 && node.has_else {
@ -132,7 +134,7 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
g.indent++
g.writeln('$styp $tmp;')
g.writeln('{')
g.stmts(branch.stmts[0 .. len - 1])
g.stmts(branch.stmts[0..len - 1])
g.write('\t$tmp = ')
g.stmt(last)
g.writeln('}')
@ -146,13 +148,21 @@ fn (mut g Gen) comp_if(node ast.IfExpr) {
} else {
// Only wrap the contents in {} if we're inside a function, not on the top level scope
should_create_scope := g.fn_decl != 0
if should_create_scope { g.writeln('{') }
if should_create_scope {
g.writeln('{')
}
g.stmts(branch.stmts)
if should_create_scope { g.writeln('}') }
if should_create_scope {
g.writeln('}')
}
}
g.defer_ifdef = ''
}
if node.is_expr { g.write('#endif') } else { g.writeln('#endif') }
if node.is_expr {
g.write('#endif')
} else {
g.writeln('#endif')
}
}
fn (mut g Gen) comp_if_expr(cond ast.Expr) {
@ -161,13 +171,16 @@ fn (mut g Gen) comp_if_expr(cond ast.Expr) {
g.write('(')
g.comp_if_expr(cond.expr)
g.write(')')
} ast.PrefixExpr {
}
ast.PrefixExpr {
g.write(cond.op.str())
g.comp_if_expr(cond.right)
} ast.PostfixExpr {
}
ast.PostfixExpr {
ifdef := g.comp_if_to_ifdef((cond.expr as ast.Ident).name, true)
g.write('defined($ifdef)')
} ast.InfixExpr {
}
ast.InfixExpr {
match cond.op {
.and, .logical_or {
g.comp_if_expr(cond.left)
@ -180,14 +193,18 @@ fn (mut g Gen) comp_if_expr(cond ast.Expr) {
exp_type := g.comptime_var_type_map[name]
got_type := (cond.right as ast.Type).typ
g.write('$exp_type == $got_type')
} .eq, .ne {
}
.eq, .ne {
// TODO Implement `$if method.args.len == 1`
} else {}
}
else {}
}
} ast.Ident {
}
ast.Ident {
ifdef := g.comp_if_to_ifdef(cond.name, false)
g.write('defined($ifdef)')
} else {}
}
else {}
}
}

View File

@ -48,7 +48,7 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
if field.name == 'id' {
continue
}
g.write('?${i+0}')
g.write('?${i + 0}')
if i < node.fields.len - 1 {
g.write(', ')
}
@ -78,9 +78,9 @@ fn (mut g Gen) sql_stmt(node ast.SqlStmt) {
}
x := '${node.object_var_name}.$field.name'
if field.typ == table.string_type {
g.writeln('sqlite3_bind_text($g.sql_stmt_name, ${i+0}, ${x}.str, ${x}.len, 0);')
g.writeln('sqlite3_bind_text($g.sql_stmt_name, ${i + 0}, ${x}.str, ${x}.len, 0);')
} else {
g.writeln('sqlite3_bind_int($g.sql_stmt_name, ${i+0}, $x); // stmt')
g.writeln('sqlite3_bind_int($g.sql_stmt_name, ${i + 0}, $x); // stmt')
}
}
}

View File

@ -34,11 +34,7 @@ const (
)
pub fn (mut g Gen) generate_elf_header() {
g.buf << [byte(mag0),
mag1,
mag2,
mag3
]
g.buf << [byte(mag0), mag1, mag2, mag3]
g.buf << elfclass64 // file class
g.buf << elfdata2lsb // data encoding
g.buf << ev_current // file version

View File

@ -32,7 +32,7 @@ fn test_x64() {
bench.step()
full_test_path := os.real_path(test)
println('x.v: $wrkdir/x.v')
os.system('cp ${dir}/${test} $wrkdir/x.v') // cant run .vv file
os.system('cp $dir/$test $wrkdir/x.v') // cant run .vv file
os.exec('$vexe -o exe -x64 $wrkdir/x.v') or {
bench.fail()
eprintln(bench.step_message_fail('x64 $test failed'))

View File

@ -63,9 +63,15 @@ fn (mut p Parser) check_cross_variables(exprs []ast.Expr, val ast.Expr) bool {
}
}
}
ast.InfixExpr { return p.check_cross_variables(exprs, val_.left) || p.check_cross_variables(exprs, val_.right) }
ast.PrefixExpr { return p.check_cross_variables(exprs, val_.right) }
ast.PostfixExpr { return p.check_cross_variables(exprs, val_.expr) }
ast.InfixExpr {
return p.check_cross_variables(exprs, val_.left) || p.check_cross_variables(exprs, val_.right)
}
ast.PrefixExpr {
return p.check_cross_variables(exprs, val_.right)
}
ast.PostfixExpr {
return p.check_cross_variables(exprs, val_.expr)
}
ast.SelectorExpr {
for expr in exprs {
if expr.str() == val.str() {
@ -119,7 +125,8 @@ fn (mut p Parser) partial_assign_stmt(left []ast.Expr, left_comments []ast.Comme
share = iv.share
if iv.is_static {
if !p.pref.translated {
p.error_with_pos('static variables are supported only in -translated mode', lx.pos)
p.error_with_pos('static variables are supported only in -translated mode',
lx.pos)
}
is_static = true
}

View File

@ -22,7 +22,7 @@ fn (mut p Parser) hash() ast.HashStmt {
val := p.tok.lit
kind := val.all_before(' ')
p.next()
//p.trace('a.v', 'kind: ${kind:-10s} | pos: ${pos:-45s} | hash: $val')
// p.trace('a.v', 'kind: ${kind:-10s} | pos: ${pos:-45s} | hash: $val')
return ast.HashStmt{
mod: p.mod
val: val

View File

@ -72,7 +72,8 @@ fn (mut p Parser) array_init() ast.ArrayInit {
last_pos = p.tok.position()
p.check(.rcbr)
} else {
p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`', last_pos)
p.warn_with_pos('use e.g. `x := [1]Type{}` instead of `x := [1]Type`',
last_pos)
}
} else {
if p.tok.kind == .not {

View File

@ -13,8 +13,6 @@ fn (mut p Parser) lock_expr() ast.LockExpr {
for p.tok.kind == .name {
lockeds << ast.Ident{
language: table.Language.v
// kind is set in checker once ident is processed
// kind: .variable
pos: p.tok.position()
mod: p.mod
name: p.tok.lit

View File

@ -36,8 +36,8 @@ fn (mut p Parser) register_auto_import(alias string) {
p.imports[alias] = alias
p.table.imports << alias
node := ast.Import{
pos: p.tok.position()
mod: alias
pos: p.tok.position()
mod: alias
alias: alias
}
p.ast_imports << node

View File

@ -21,7 +21,8 @@ pub struct Parser {
pref &pref.Preferences
mut:
scanner &scanner.Scanner
comments_mode scanner.CommentsMode = .skip_comments // see comment in parse_file
comments_mode scanner.CommentsMode = .skip_comments
// see comment in parse_file
tok token.Token
prev_tok token.Token
peek_tok token.Token
@ -832,7 +833,7 @@ pub fn (mut p Parser) warn_with_pos(s string, pos token.Position) {
}
pub fn (mut p Parser) vet_error(s string, line int) {
p.vet_errors << '$p.scanner.file_path:${line+1}: $s'
p.vet_errors << '$p.scanner.file_path:${line + 1}: $s'
}
fn (mut p Parser) parse_multi_expr(is_top_level bool) ast.Stmt {

View File

@ -47,13 +47,9 @@ pub fn (mut p Parser) expr(precedence int) ast.Expr {
}
.dollar {
match p.peek_tok.kind {
.name {
return p.vweb()
} .key_if {
return p.if_expr(true)
} else {
p.error('unexpected $')
}
.name { return p.vweb() }
.key_if { return p.if_expr(true) }
else { p.error('unexpected $') }
}
}
.chartoken {
@ -275,7 +271,8 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
return node
}
// added 10/2020: LATER this will be parsed as PrefixExpr instead
p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression', p.tok.position())
p.warn_with_pos('move infix `$p.tok.kind` operator before new line (if infix intended) or use brackets for a prefix expression',
p.tok.position())
}
// continue on infix expr
node = p.infix_expr(node)
@ -286,9 +283,9 @@ pub fn (mut p Parser) expr_with_left(left ast.Expr, precedence int, is_stmt_iden
} else if p.tok.kind in [.inc, .dec] || (p.tok.kind == .question && p.inside_ct_if_expr) {
// Postfix
// detect `f(x++)`, `a[x++]`
if p.peek_tok.kind in [.rpar, .rsbr] &&
p.mod !in ['builtin', 'regex', 'strconv'] { // temp
p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement', p.peek_tok.position())
if p.peek_tok.kind in [.rpar, .rsbr] && p.mod !in ['builtin', 'regex', 'strconv'] { // temp
p.warn_with_pos('`$p.tok.kind` operator can only be used as a statement',
p.peek_tok.position())
}
node = ast.PostfixExpr{
op: p.tok.kind
@ -321,8 +318,8 @@ fn (mut p Parser) infix_expr(left ast.Expr) ast.Expr {
}
right = p.expr(precedence)
p.expecting_type = prev_expecting_type
if p.pref.is_vet && op in [.key_in, .not_in] &&
right is ast.ArrayInit && (right as ast.ArrayInit).exprs.len == 1 {
if p.pref.is_vet && op in [.key_in, .not_in] && right is ast.ArrayInit && (right as ast.ArrayInit).exprs.len ==
1 {
p.vet_error('Use `var == value` instead of `var in [value]`', pos.line_nr)
}
return ast.InfixExpr{

View File

@ -154,22 +154,8 @@ fn (mut p Parser) struct_decl() ast.StructDecl {
typ := p.parse_type()
type_pos := p.prev_tok.position()
field_pos := field_start_pos.extend(type_pos)
// if name == '_net_module_s' {
// if name.contains('App') {
// s := p.table.get_type_symbol(typ)
// println('struct decl field type ' + s.str())
// }
// Comments after type (same line)
line_pos := field_pos.line_nr
for p.tok.kind == .comment && line_pos + 1 == p.tok.line_nr {
if p.tok.lit.contains('\n') {
break
}
comments << p.comment()
if p.tok.kind == .rcbr {
break
}
}
comments << p.eat_comments()
if p.tok.kind == .lsbr {
// attrs are stored in `p.attrs`
p.attributes()
@ -284,38 +270,38 @@ fn (mut p Parser) struct_init(short_syntax bool) ast.StructInit {
p.is_amp = false
for p.tok.kind != .rcbr && p.tok.kind != .rpar {
mut field_name := ''
mut expr := ast.Expr{}
mut field_pos := token.Position{}
mut comments := []ast.Comment{}
if no_keys {
expr := p.expr(0)
comments := p.eat_comments()
// name will be set later in checker
fields << ast.StructInitField{
expr: expr
pos: expr.position()
comments: comments
}
expr = p.expr(0)
field_pos = expr.position()
comments = p.eat_comments()
} else {
first_field_pos := p.tok.position()
field_name = p.check_name()
p.check(.colon)
expr := p.expr(0)
comments := p.eat_comments()
expr = p.expr(0)
comments = p.eat_comments()
last_field_pos := expr.position()
field_pos := token.Position{
field_pos = token.Position{
line_nr: first_field_pos.line_nr
pos: first_field_pos.pos
len: last_field_pos.pos - first_field_pos.pos + last_field_pos.len
}
fields << ast.StructInitField{
name: field_name
expr: expr
pos: field_pos
comments: comments
}
}
i++
if p.tok.kind == .comma {
p.next()
}
comments << p.eat_comments()
fields << ast.StructInitField{
name: field_name
expr: expr
pos: field_pos
comments: comments
}
}
last_pos := p.tok.position()
if !short_syntax {

View File

@ -125,14 +125,15 @@ fn test_parse_expr() {
if true {
return
}
input := ['1 == 1', '234234', '2 * 8 + 3', 'a := 3', 'a++', 'b := 4 + 2', 'neg := -a',
'a + a', 'bo := 2 + 3 == 5', '2 + 1', 'q := 1', 'q + 777', '2 + 3', '2+2*4', 'x := 10',
'mut aa := 12', 'ab := 10 + 3 * 9', 's := "hi"', 'x = 11', 'a += 10', '1.2 + 3.4', '4 + 4',
'1 + 2 * 5', '-a+1', '2+2']
expecting := ['1 == 1;', '234234;', '2 * 8 + 3;', 'int a = 3;', 'a++;', 'int b = 4 + 2;',
'int neg = -a;', 'a + a;', 'bool bo = 2 + 3 == 5;', '2 + 1;', 'int q = 1;', 'q + 777;',
'2 + 3;', '2 + 2 * 4;', 'int x = 10;', 'int aa = 12;', 'int ab = 10 + 3 * 9;', 'string s = tos3("hi");',
'x = 11;', 'a += 10;', '1.2 + 3.4;', '4 + 4;', '1 + 2 * 5;', '-a + 1;', '2 + 2;']
input := ['1 == 1', '234234', '2 * 8 + 3', 'a := 3', 'a++', 'b := 4 + 2', 'neg := -a', 'a + a',
'bo := 2 + 3 == 5', '2 + 1', 'q := 1', 'q + 777', '2 + 3', '2+2*4', 'x := 10', 'mut aa := 12',
'ab := 10 + 3 * 9', 's := "hi"', 'x = 11', 'a += 10', '1.2 + 3.4', '4 + 4', '1 + 2 * 5', '-a+1',
'2+2',
]
expecting := ['1 == 1;', '234234;', '2 * 8 + 3;', 'int a = 3;', 'a++;', 'int b = 4 + 2;', 'int neg = -a;',
'a + a;', 'bool bo = 2 + 3 == 5;', '2 + 1;', 'int q = 1;', 'q + 777;', '2 + 3;', '2 + 2 * 4;',
'int x = 10;', 'int aa = 12;', 'int ab = 10 + 3 * 9;', 'string s = tos3("hi");', 'x = 11;', 'a += 10;',
'1.2 + 3.4;', '4 + 4;', '1 + 2 * 5;', '-a + 1;', '2 + 2;']
mut e := []ast.Stmt{}
table := table.new_table()
vpref := &pref.Preferences{}

View File

@ -22,95 +22,39 @@ pub enum OS {
// Helper function to convert string names to OS enum
pub fn os_from_string(os_str string) ?OS {
match os_str {
'linux' {
return .linux
}
'windows' {
return .windows
}
'ios' {
return .ios
}
'macos' {
return .macos
}
'freebsd' {
return .freebsd
}
'openbsd' {
return .openbsd
}
'netbsd' {
return .netbsd
}
'dragonfly' {
return .dragonfly
}
'js' {
return .js
}
'solaris' {
return .solaris
}
'android' {
return .android
}
'haiku' {
return .haiku
}
'linux_or_macos' {
return .linux
}
'' {
return ._auto
}
else {
return error('bad OS $os_str')
}
'linux' { return .linux }
'windows' { return .windows }
'ios' { return .ios }
'macos' { return .macos }
'freebsd' { return .freebsd }
'openbsd' { return .openbsd }
'netbsd' { return .netbsd }
'dragonfly' { return .dragonfly }
'js' { return .js }
'solaris' { return .solaris }
'android' { return .android }
'haiku' { return .haiku }
'linux_or_macos' { return .linux }
'' { return ._auto }
else { return error('bad OS $os_str') }
}
}
pub fn (o OS) str() string {
match o {
._auto {
return 'RESERVED: AUTO'
}
.ios {
return 'iOS'
}
.macos {
return 'MacOS'
}
.linux {
return 'Linux'
}
.windows {
return 'Windows'
}
.freebsd {
return 'FreeBSD'
}
.openbsd {
return 'OpenBSD'
}
.netbsd {
return 'NetBSD'
}
.dragonfly {
return 'Dragonfly'
}
.js {
return 'JavaScript'
}
.android {
return 'Android'
}
.solaris {
return 'Solaris'
}
.haiku {
return 'Haiku'
}
._auto { return 'RESERVED: AUTO' }
.ios { return 'iOS' }
.macos { return 'MacOS' }
.linux { return 'Linux' }
.windows { return 'Windows' }
.freebsd { return 'FreeBSD' }
.openbsd { return 'OpenBSD' }
.netbsd { return 'NetBSD' }
.dragonfly { return 'Dragonfly' }
.js { return 'JavaScript' }
.android { return 'Android' }
.solaris { return 'Solaris' }
.haiku { return 'Haiku' }
}
}

View File

@ -1104,7 +1104,7 @@ fn (mut s Scanner) text_scan() token.Token {
}
}
if is_separate_line_comment {
comment = '|' + comment
comment = '\x01' + comment
}
return s.new_token(.comment, comment, comment.len + 2)
}

View File

@ -3,7 +3,7 @@ module scanner
import os
struct TestStruct {
test string
test string
}
fn (mut t TestStruct) test_struct() {
@ -15,18 +15,19 @@ fn (mut t TestStruct) test_struct_w_return() string {
return t.test
}
fn (mut t TestStruct) test_struct_w_high_order(cb fn(int)string) string {
fn (mut t TestStruct) test_struct_w_high_order(cb fn (int) string) string {
assert @STRUCT == 'TestStruct'
return 'test'+cb(2)
return 'test' + cb(2)
}
struct TestFn { }
struct TestFn {
}
fn (mut t TestFn) tst_1() {
assert @FN == 'tst_1'
}
fn (mut t TestFn) tst_2(cb fn(int)) {
fn (mut t TestFn) tst_2(cb fn (int)) {
assert @FN == 'tst_2'
cb(1)
}
@ -35,7 +36,7 @@ fn fn_name_mod_level() {
assert @FN == 'fn_name_mod_level'
}
fn fn_name_mod_level_high_order(cb fn(int)) {
fn fn_name_mod_level_high_order(cb fn (int)) {
assert @FN == 'fn_name_mod_level_high_order'
cb(1)
}
@ -49,16 +50,14 @@ fn test_at_file() {
fn test_at_fn() {
// Test @FN
assert @FN == 'test_at_fn'
fn_name_mod_level()
fn_name_mod_level_high_order(fn(i int){
fn_name_mod_level_high_order(fn (i int) {
t := i + 1
assert t == 2
})
mut tfn := TestFn{}
tfn.tst_1()
tfn.tst_2(fn(i int){
tfn.tst_2(fn (i int) {
t := i + 1
assert t == 2
})
@ -72,10 +71,12 @@ fn test_at_mod() {
fn test_at_struct() {
// Test @STRUCT
assert @STRUCT == ''
mut ts := TestStruct { test: "test" }
mut ts := TestStruct{
test: 'test'
}
ts.test_struct()
r1 := ts.test_struct_w_return()
r2 := ts.test_struct_w_high_order(fn(i int)string{
r2 := ts.test_struct_w_high_order(fn (i int) string {
assert @STRUCT == ''
return i.str()
})

View File

@ -59,14 +59,12 @@ fn test_float_without_fraction() {
assert result[0] == .name
assert result[1] == .decl_assign
assert result[2] == .number
result = scan_kinds('return 3., 4.')
assert result.len == 4
assert result[0] == .key_return
assert result[1] == .number
assert result[2] == .comma
assert result[3] == .number
result = scan_kinds('fun(5.)')
assert result.len == 4
assert result[0] == .name

View File

@ -6,10 +6,10 @@ module table
// e.g. `[unsafe]`
pub struct Attr {
pub:
name string // [name]
is_string bool // ['name']
is_ctdefine bool // [if name]
arg string // [name: arg]
name string // [name]
is_string bool // ['name']
is_ctdefine bool // [if name]
arg string // [name: arg]
is_string_arg bool // [name: 'arg']
}
@ -21,8 +21,7 @@ pub fn (attr Attr) str() string {
}
if attr.is_string {
s += "'$attr.name'"
}
else {
} else {
s += attr.name
if attr.arg.len > 0 {
s += ': '
@ -31,8 +30,7 @@ pub fn (attr Attr) str() string {
// FIXME: other escapes e.g. \r\n
a = a.replace("'", "\\'")
s += "'$a'"
}
else {
} else {
s += attr.arg
}
}

View File

@ -64,7 +64,7 @@ fn assert_parse_invalid_flag(mut t table.Table, flag string) {
assert false
}
fn make_flag(os, name, value string) cflag.CFlag {
fn make_flag(os string, name string, value string) cflag.CFlag {
return cflag.CFlag{
mod: module_name
os: os

View File

@ -117,7 +117,10 @@ pub fn (f &Fn) source_signature() string {
sig += ', '
}
}
sig += ') $f.return_type_source_name'
sig += ')'
if f.return_type != void_type {
sig += ' $f.return_type_source_name'
}
return sig
}

View File

@ -56,7 +56,7 @@ fn opendiff_exists() bool {
pub fn color_compare_files(diff_cmd string, file1 string, file2 string) string {
if diff_cmd != '' {
full_cmd := '$diff_cmd --minimal --text --unified=2 ' + ' --show-function-line="fn " "$file1" "$file2" '
full_cmd := '$diff_cmd --minimal --text --unified=2 --show-function-line="fn " "$file1" "$file2" '
x := os.exec(full_cmd) or {
return 'comparison command: `$full_cmd` failed'
}

View File

@ -91,7 +91,7 @@ pub fn formatted_error(kind string, omsg string, filepath string, pos token.Posi
}
}
column := imax(0, pos.pos - p - 1)
position := '$path:${pos.line_nr+1}:${imax(1,column+1)}:'
position := '$path:${pos.line_nr + 1}:${imax(1, column + 1)}:'
scontext := source_context(kind, source, column, pos).join('\n')
final_position := bold(position)
final_kind := bold(color(kind, kind))
@ -116,7 +116,7 @@ pub fn source_context(kind string, source string, column int, pos token.Position
end_column := imax(0, imin(column + imax(0, pos.len), sline.len))
cline := if iline == pos.line_nr { sline[..start_column] + color(kind, sline[start_column..end_column]) +
sline[end_column..] } else { sline }
clines << '${iline+1:5d} | ' + cline.replace('\t', tab_spaces)
clines << '${iline + 1:5d} | ' + cline.replace('\t', tab_spaces)
//
if iline == pos.line_nr {
// The pointerline should have the same spaces/tabs as the offending

View File

@ -21,7 +21,7 @@ fn get_tests_in_dir(dir string) []string {
return tests
}
fn check_path(vexe, dir string, tests []string) int {
fn check_path(vexe string, dir string, tests []string) int {
mut nb_fail := 0
paths := vtest.filter_vtest_only(tests, {
basepath: dir

View File

@ -166,7 +166,7 @@ fn get_array_content(tokens []Token, st_idx int) ?([]string, int) {
.str {
vals << tok.val
if tokens[idx + 1].typ !in [.comma, .rabr] {
return error('vmod: invalid separator "${tokens[idx+1].val}"')
return error('vmod: invalid separator "${tokens[idx + 1].val}"')
}
idx += if tokens[idx + 1].typ == .comma { 2 } else { 1 }
}
@ -233,14 +233,14 @@ fn (mut p Parser) parse() ?Manifest {
mn.author = field_value
}
'dependencies' {
deps, idx := get_array_content(tokens, i + 1)?
deps, idx := get_array_content(tokens, i + 1) ?
mn.dependencies = deps
i = idx
continue
}
else {
if tokens[i + 1].typ == .labr {
vals, idx := get_array_content(tokens, i + 1)?
vals, idx := get_array_content(tokens, i + 1) ?
mn.unknown[field_name] = vals
i = idx
continue