1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

vfmt fixes

This commit is contained in:
Alexander Medvednikov 2019-07-16 17:59:07 +02:00
parent 7bbaf432e6
commit c35adbea91
18 changed files with 136 additions and 96 deletions

View File

@ -5,6 +5,7 @@
module main module main
import os import os
import strings
struct CGen { struct CGen {
out os.File out os.File
@ -20,6 +21,7 @@ struct CGen {
so_fns []string so_fns []string
consts_init []string consts_init []string
lines []string lines []string
//buf strings.Builder
is_user bool is_user bool
mut: mut:
run Pass run Pass
@ -37,11 +39,11 @@ fn new_cgen(out_name_c string) *CGen {
out := os.create(path) or { out := os.create(path) or {
println('failed to create $path') println('failed to create $path')
return &CGen{} return &CGen{}
} }
gen := &CGen { gen := &CGen {
out_path: path out_path: path
out: out out: out
//buf: strings.new_builder(10000)
lines: _make(0, 1000, sizeof(string)) lines: _make(0, 1000, sizeof(string))
} }
return gen return gen
@ -144,7 +146,8 @@ fn (g mut CGen) set_placeholder2(pos int, val string) {
} }
fn (g mut CGen) insert_before(val string) { fn (g mut CGen) insert_before(val string) {
g.lines.insert(g.lines.len - 1, val) prev := g.lines[g.lines.len - 1]
g.lines[g.lines.len - 1] = '$prev \n $val \n'
} }
fn (g mut CGen) register_thread_fn(wrapper_name, wrapper_text, struct_text string) { fn (g mut CGen) register_thread_fn(wrapper_name, wrapper_text, struct_text string) {

View File

@ -44,11 +44,14 @@ enum OS {
} }
enum Pass { enum Pass {
// A very short pass that only looks at imports in the beginning of each file // A very short pass that only looks at imports in the beginning of
// each file
imports imports
// First pass, only parses and saves declarations (fn signatures, consts, types). // First pass, only parses and saves declarations (fn signatures,
// consts, types).
// Skips function bodies. // Skips function bodies.
// We need this because in V things can be used before they are declared. // We need this because in V things can be used before they are
// declared.
decl decl
// Second pass, parses function bodies and generates C or machine code. // Second pass, parses function bodies and generates C or machine code.
main main
@ -622,6 +625,7 @@ mut args := ''
if v.pref.build_mode != .build && (v.os == .linux || v.os == .freebsd || v.os == .openbsd || if v.pref.build_mode != .build && (v.os == .linux || v.os == .freebsd || v.os == .openbsd ||
v.os == .netbsd || v.os == .dragonfly) { v.os == .netbsd || v.os == .dragonfly) {
a << '-lm -lpthread ' a << '-lm -lpthread '
// -ldl is a Linux only thing. BSDs have it in libc.
if v.os == .linux { if v.os == .linux {
a << ' -ldl ' a << ' -ldl '
} }

View File

@ -104,6 +104,7 @@ fn (c mut V) new_parser(path string, run Pass) Parser {
fn (p mut Parser) next() { fn (p mut Parser) next() {
p.prev_tok2 = p.prev_tok p.prev_tok2 = p.prev_tok
p.prev_tok = p.tok p.prev_tok = p.tok
p.scanner.prev_tok = p.tok
res := p.scanner.scan() res := p.scanner.scan()
p.tok = res.tok p.tok = res.tok
p.lit = res.lit p.lit = res.lit
@ -144,7 +145,7 @@ fn (p mut Parser) parse() {
p.mod = fq_mod.replace('.', '_dot_') p.mod = fq_mod.replace('.', '_dot_')
if p.run == .imports { if p.run == .imports {
for p.tok == .key_import && p.peek() != .key_const { for p.tok == .key_import && p.peek() != .key_const {
p.import_statement() p.imports()
} }
if p.table.imports.contains('builtin') { if p.table.imports.contains('builtin') {
p.error('module `builtin` cannot be imported') p.error('module `builtin` cannot be imported')
@ -160,7 +161,10 @@ fn (p mut Parser) parse() {
} }
else { else {
// TODO remove imported consts from the language // TODO remove imported consts from the language
p.import_statement() p.imports()
if p.tok != .key_import {
p.fgenln('')
}
} }
case Token.key_enum: case Token.key_enum:
p.next() p.next()
@ -285,7 +289,7 @@ fn (p mut Parser) parse() {
} }
} }
fn (p mut Parser) import_statement() { fn (p mut Parser) imports() {
p.check(.key_import) p.check(.key_import)
// `import ()` // `import ()`
if p.tok == .lpar { if p.tok == .lpar {
@ -482,7 +486,7 @@ fn (p mut Parser) struct_decl() {
else { else {
// type alias is generated later // type alias is generated later
if !is_c { if !is_c {
kind := if is_union{'union'} else { 'struct'} kind := if is_union {'union'} else {'struct'}
p.gen_typedef('typedef $kind $name $name;') p.gen_typedef('typedef $kind $name $name;')
p.gen_type('$kind $name {') p.gen_type('$kind $name {')
} }
@ -510,9 +514,7 @@ fn (p mut Parser) struct_decl() {
} }
} }
// Struct `C.Foo` declaration, no body // Struct `C.Foo` declaration, no body
// println('EEEE $is_c $is_struct')
if is_c && is_struct && p.tok != .lcbr { if is_c && is_struct && p.tok != .lcbr {
// println('skipping struct header $name')
p.table.register_type2(typ) p.table.register_type2(typ)
return return
} }
@ -522,7 +524,15 @@ fn (p mut Parser) struct_decl() {
mut is_pub := false mut is_pub := false
mut is_mut := false mut is_mut := false
mut names := []string// to avoid dup names TODO alloc perf mut names := []string// to avoid dup names TODO alloc perf
// mut is_mut_mut := false /*
mut fmt_max_len := 0
for field in typ.fields {
if field.name.len > max_len {
fmt_max_len = field.name.len
}
}
println('fmt max len = $max_len nrfields=$typ.fields.len pass=$p.run')
*/
for p.tok != .rcbr { for p.tok != .rcbr {
if p.tok == .key_pub { if p.tok == .key_pub {
if is_pub { if is_pub {
@ -592,6 +602,7 @@ fn (p mut Parser) struct_decl() {
} }
if !is_ph && p.first_run() { if !is_ph && p.first_run() {
p.table.register_type2(typ) p.table.register_type2(typ)
//println('registering 1 nrfields=$typ.fields.len')
} }
p.check(.rcbr) p.check(.rcbr)
if !is_c { if !is_c {
@ -619,8 +630,8 @@ fn (p mut Parser) enum_decl(_enum_name string) {
for p.tok == .name { for p.tok == .name {
field := p.check_name() field := p.check_name()
fields << field fields << field
name := '${p.mod}__${enum_name}_$field'
p.fgenln('') p.fgenln('')
name := '${p.mod}__${enum_name}_$field'
if p.run == .main { if p.run == .main {
p.cgen.consts << '#define $name $val' p.cgen.consts << '#define $name $val'
} }
@ -655,7 +666,7 @@ fn (p mut Parser) check_name() string {
fn (p mut Parser) check_string() string { fn (p mut Parser) check_string() string {
s := p.lit s := p.lit
p.check(.strtoken) p.check(.str)
return s return s
} }
@ -663,7 +674,7 @@ fn (p &Parser) strtok() string {
if p.tok == .name { if p.tok == .name {
return p.lit return p.lit
} }
if p.tok == .strtoken { if p.tok == .str {
return '"$p.lit"' return '"$p.lit"'
} }
res := p.tok.str() res := p.tok.str()
@ -701,6 +712,11 @@ fn (p mut Parser) check(expected Token) {
p.fmt_inc() p.fmt_inc()
} }
p.next() p.next()
if p.scanner.line_comment != '' {
//p.fgenln('// ! "$p.scanner.line_comment"')
//p.scanner.line_comment = ''
}
} }
fn (p mut Parser) error(s string) { fn (p mut Parser) error(s string) {
@ -919,7 +935,7 @@ fn (p &Parser) print_tok() {
println(p.lit) println(p.lit)
return return
} }
if p.tok == .strtoken { if p.tok == .str {
println('"$p.lit"') println('"$p.lit"')
return return
} }
@ -1585,6 +1601,7 @@ fn (p mut Parser) dot(str_typ string, method_ph int) string {
if p.tok == .key_type { if p.tok == .key_type {
field_name = 'type' field_name = 'type'
} }
p.fgen(field_name)
p.log('dot() field_name=$field_name typ=$str_typ') p.log('dot() field_name=$field_name typ=$str_typ')
//if p.fileis('main.v') { //if p.fileis('main.v') {
//println('dot() field_name=$field_name typ=$str_typ prev_tok=${prev_tok.str()}') //println('dot() field_name=$field_name typ=$str_typ prev_tok=${prev_tok.str()}')
@ -2120,7 +2137,7 @@ fn (p mut Parser) factor() string {
p.char_expr() p.char_expr()
typ = 'byte' typ = 'byte'
return typ return typ
case Token.strtoken: case Token.str:
p.string_expr() p.string_expr()
typ = 'string' typ = 'string'
return typ return typ
@ -2232,11 +2249,11 @@ fn format_str(str string) string {
} }
fn (p mut Parser) string_expr() { fn (p mut Parser) string_expr() {
// println('.strtoken EXPR') // println('.str EXPR')
str := p.lit str := p.lit
p.fgen('\'$str\'') // No ${}, just return a simple string
// No ${}, just return simple string
if p.peek() != .dollar { if p.peek() != .dollar {
p.fgen('\'$str\'')
// println('before format: "$str"') // println('before format: "$str"')
f := format_str(str) f := format_str(str)
// println('after format: "$str"') // println('after format: "$str"')
@ -2252,8 +2269,11 @@ fn (p mut Parser) string_expr() {
// tmp := p.get_tmp() // tmp := p.get_tmp()
mut args := '"' mut args := '"'
mut format := '"' mut format := '"'
for p.tok == .strtoken { p.fgen('\'')
mut complex_inter := false // for vfmt
for p.tok == .str {
// Add the string between %d's // Add the string between %d's
p.fgen(p.lit)
p.lit = p.lit.replace('%', '%%') p.lit = p.lit.replace('%', '%%')
format += format_str(p.lit) format += format_str(p.lit)
p.next()// skip $ p.next()// skip $
@ -2261,7 +2281,13 @@ fn (p mut Parser) string_expr() {
continue continue
} }
// Handle .dollar // Handle .dollar
p.next() p.check(.dollar)
// If there's no string after current token, it means we are in
// a complex expression (`${...}`)
if p.peek() != .str {
p.fgen('{')
complex_inter = true
}
// Get bool expr inside a temp var // Get bool expr inside a temp var
p.cgen.start_tmp() p.cgen.start_tmp()
typ := p.bool_expression() typ := p.bool_expression()
@ -2299,6 +2325,10 @@ fn (p mut Parser) string_expr() {
format += f format += f
} }
} }
if complex_inter {
p.fgen('}')
}
p.fgen('\'')
// println("hello %d", num) optimization. // println("hello %d", num) optimization.
if p.cgen.nogen { if p.cgen.nogen {
return return
@ -3140,7 +3170,7 @@ else {
fn (p mut Parser) return_st() { fn (p mut Parser) return_st() {
p.cgen.insert_before(p.cur_fn.defer_text) p.cgen.insert_before(p.cur_fn.defer_text)
p.check(.key_return) p.check(.key_return)
p.fgen(' ')
fn_returns := p.cur_fn.typ != 'void' fn_returns := p.cur_fn.typ != 'void'
if fn_returns { if fn_returns {
if p.tok == .rcbr { if p.tok == .rcbr {

View File

@ -19,15 +19,15 @@ mut:
debug bool debug bool
line_comment string line_comment string
started bool started bool
is_fmt bool
// vfmt fields // vfmt fields
fmt_out strings.Builder fmt_out strings.Builder
fmt_indent int fmt_indent int
fmt_line_empty bool fmt_line_empty bool
prev_tok Token
} }
const ( const (
SINGLE_QUOTE = `\'` SingleQuote = `\'`
//QUOTE = `"` //QUOTE = `"`
) )
@ -159,9 +159,6 @@ fn (s mut Scanner) skip_whitespace() {
if !(s.text[s.pos] == `\n` && s.pos > 0 && s.text[s.pos-1] == `\r`) { if !(s.text[s.pos] == `\n` && s.pos > 0 && s.text[s.pos-1] == `\r`) {
s.line_nr++ s.line_nr++
} }
if s.is_fmt {
return
}
} }
s.pos++ s.pos++
} }
@ -179,11 +176,17 @@ fn (s mut Scanner) get_var_name(pos int) string {
} }
// CAO stands for Compound Assignment Operators (e.g '+=' ) // CAO stands for Compound Assignment Operators (e.g '+=' )
/*
fn (s mut Scanner) cao_change(operator string) { fn (s mut Scanner) cao_change(operator string) {
s.text = s.text.substr(0, s.pos - operator.len) + ' = ' + s.get_var_name(s.pos - operator.len) + ' ' + operator + ' ' + s.text.substr(s.pos + 1, s.text.len) s.text = s.text.substr(0, s.pos - operator.len) + ' = ' + s.get_var_name(s.pos - operator.len) + ' ' + operator + ' ' + s.text.substr(s.pos + 1, s.text.len)
} }
*/
fn (s mut Scanner) scan() ScanRes { fn (s mut Scanner) scan() ScanRes {
if s.line_comment != '' {
//s.fgenln('// LOL "$s.line_comment"')
//s.line_comment = ''
}
// if s.file_path == 'd.v' { // if s.file_path == 'd.v' {
// println('\nscan()') // println('\nscan()')
// } // }
@ -199,19 +202,15 @@ fn (s mut Scanner) scan() ScanRes {
if !s.inside_string { if !s.inside_string {
s.skip_whitespace() s.skip_whitespace()
} }
if s.is_fmt && s.text[s.pos] == `\n` {
return scan_res(.nl, '')
}
// End of $var, start next string // End of $var, start next string
if !s.is_fmt && s.dollar_end { if s.dollar_end {
// fmt.Println("end of $var, get string", s.pos, string(s.text[s.pos])) // fmt.Println("end of $var, get string", s.pos, string(s.text[s.pos]))
if s.text[s.pos] == SINGLE_QUOTE { if s.text[s.pos] == SingleQuote {
// fmt.Println("ENDDD")
s.dollar_end = false s.dollar_end = false
return scan_res(.strtoken, '') return scan_res(.str, '')
} }
s.dollar_end = false s.dollar_end = false
return scan_res(.strtoken, s.ident_string()) return scan_res(.str, s.ident_string())
} }
s.skip_whitespace() s.skip_whitespace()
// end of file // end of file
@ -242,15 +241,15 @@ fn (s mut Scanner) scan() ScanRes {
// at the next ', skip it // at the next ', skip it
if s.inside_string { if s.inside_string {
// println('is_letter inside string! nextc=${nextc.str()}') // println('is_letter inside string! nextc=${nextc.str()}')
if next_char == SINGLE_QUOTE { if next_char == SingleQuote {
// println('var is last before QUOTE') // println('var is last before QUOTE')
s.pos++ s.pos++
s.dollar_start = false s.dollar_start = false
s.inside_string = false s.inside_string = false
} }
} }
if s.dollar_start && next_char != `.` { if s.dollar_start && next_char != `.` {//&& next_char != `(` {
// println('INSIDE .strtoken .dollar var=$name') // println('INSIDE .str .dollar var=$name')
s.dollar_end = true s.dollar_end = true
s.dollar_start = false s.dollar_start = false
} }
@ -261,7 +260,7 @@ fn (s mut Scanner) scan() ScanRes {
} }
return scan_res(.name, name) return scan_res(.name, name)
} }
// number, `.123` // `123`, `.123`
else if c.is_digit() || c == `.` && nextc.is_digit() { else if c.is_digit() || c == `.` && nextc.is_digit() {
num := s.ident_number() num := s.ident_number()
return scan_res(.integer, num) return scan_res(.integer, num)
@ -308,11 +307,11 @@ fn (s mut Scanner) scan() ScanRes {
return scan_res(.mod, '') return scan_res(.mod, '')
case `?`: case `?`:
return scan_res(.question, '') return scan_res(.question, '')
case SINGLE_QUOTE: case SingleQuote:
return scan_res(.strtoken, s.ident_string()) return scan_res(.str, s.ident_string())
// TODO allow double quotes // TODO allow double quotes
// case QUOTE: // case QUOTE:
// return scan_res(.strtoken, s.ident_string()) // return scan_res(.str, s.ident_string())
case `\``: case `\``:
return scan_res(.chartoken, s.ident_char()) return scan_res(.chartoken, s.ident_char())
case `(`: case `(`:
@ -337,11 +336,11 @@ fn (s mut Scanner) scan() ScanRes {
if s.inside_string { if s.inside_string {
s.pos++ s.pos++
// TODO UN.neEDED? // TODO UN.neEDED?
if s.text[s.pos] == SINGLE_QUOTE { if s.text[s.pos] == SingleQuote {
s.inside_string = false s.inside_string = false
return scan_res(.strtoken, '') return scan_res(.str, '')
} }
return scan_res(.strtoken, s.ident_string()) return scan_res(.str, s.ident_string())
} }
else { else {
return scan_res(.rcbr, '') return scan_res(.rcbr, '')
@ -388,10 +387,6 @@ fn (s mut Scanner) scan() ScanRes {
} }
s.line_nr++ s.line_nr++
hash := s.text.substr(start, s.pos) hash := s.text.substr(start, s.pos)
if s.is_fmt {
// fmt needs NL after #
s.pos--
}
return scan_res(.hash, hash.trim_space()) return scan_res(.hash, hash.trim_space())
case `>`: case `>`:
if nextc == `=` { if nextc == `=` {
@ -471,17 +466,10 @@ fn (s mut Scanner) scan() ScanRes {
s.line_nr++ s.line_nr++
s.line_comment = s.text.substr(start + 1, s.pos) s.line_comment = s.text.substr(start + 1, s.pos)
s.line_comment = s.line_comment.trim_space() s.line_comment = s.line_comment.trim_space()
s.fgenln('// $s.line_comment') s.fgenln('// ${s.prev_tok.str()} "$s.line_comment"')
if s.is_fmt { // Skip the comment (return the next token)
// fmt needs NL after comment
s.pos--
}
else {
// Skip comment
return s.scan() return s.scan()
} }
return scan_res(.line_com, s.line_comment)
}
// Multiline comments // Multiline comments
if nextc == `*` { if nextc == `*` {
start := s.pos start := s.pos
@ -509,9 +497,6 @@ fn (s mut Scanner) scan() ScanRes {
end := s.pos + 1 end := s.pos + 1
comm := s.text.substr(start, end) comm := s.text.substr(start, end)
s.fgenln(comm) s.fgenln(comm)
if s.is_fmt {
return scan_res(.mline_com, comm)
}
// Skip if not in fmt mode // Skip if not in fmt mode
return s.scan() return s.scan()
} }
@ -564,7 +549,7 @@ fn (s mut Scanner) ident_string() string {
} }
prevc := s.text[s.pos - 1] prevc := s.text[s.pos - 1]
// end of string // end of string
if c == SINGLE_QUOTE && (prevc != slash || (prevc == slash && s.text[s.pos - 2] == slash)) { if c == SingleQuote && (prevc != slash || (prevc == slash && s.text[s.pos - 2] == slash)) {
// handle '123\\' slash at the end // handle '123\\' slash at the end
break break
} }
@ -581,17 +566,15 @@ fn (s mut Scanner) ident_string() string {
s.error('0 character in a string literal') s.error('0 character in a string literal')
} }
// ${var} // ${var}
if !s.is_fmt && c == `{` && prevc == `$` { if c == `{` && prevc == `$` {
s.inside_string = true s.inside_string = true
// fmt.Println("breaking out of is()")
// so that s.pos points to $ at the next step // so that s.pos points to $ at the next step
s.pos -= 2 s.pos -= 2
// fmt.Println("break pos=", s.pos, "c=", string(s.text[s.pos]), "d=", s.text[s.pos])
break break
} }
// $var // $var
// if !s.is_fmt && c != `{` && c != ` ` && ! (c >= `0` && c <= `9`) && prevc == `$` { // if !s.is_fmt && c != `{` && c != ` ` && ! (c >= `0` && c <= `9`) && prevc == `$` {
if !s.is_fmt && (c.is_letter() || c == `_`) && prevc == `$` { if (c.is_letter() || c == `_`) && prevc == `$` {
s.inside_string = true s.inside_string = true
s.dollar_start = true s.dollar_start = true
// println('setting s.dollar=true pos=$s.pos') // println('setting s.dollar=true pos=$s.pos')
@ -600,7 +583,7 @@ fn (s mut Scanner) ident_string() string {
} }
} }
mut lit := '' mut lit := ''
if s.text[start] == SINGLE_QUOTE { if s.text[start] == SingleQuote {
start++ start++
} }
mut end := s.pos mut end := s.pos
@ -685,7 +668,7 @@ fn (s mut Scanner) peek() Token {
fn (s mut Scanner) debug_tokens() { fn (s mut Scanner) debug_tokens() {
s.pos = 0 s.pos = 0
fname := s.file_path.all_after('/') fname := s.file_path.all_after('/')
println('\n===DEBUG TOKENS $fname ============') println('\n===DEBUG TOKENS $fname===')
// allToks := '' // allToks := ''
s.debug = true s.debug = true
for { for {

View File

@ -6,20 +6,21 @@ module main
enum Token { enum Token {
eof eof
name name // user
integer integer // 123
strtoken str // 'foo'
chartoken str_inter // 'name=$user.name'
chartoken // `A`
plus plus
minus minus
mul mul
div div
mod mod
xor xor // ^
pipe pipe // |
inc inc // ++
dec dec // --
and and // &&
logical_or logical_or
not not
bit_not bit_not
@ -61,8 +62,8 @@ enum Token {
ge ge
le le
// comments // comments
line_com //line_com
mline_com //mline_com
nl nl
dot dot
dotdot dotdot
@ -127,7 +128,7 @@ fn build_token_str() []string {
s[Token.eof] = '.eof' s[Token.eof] = '.eof'
s[Token.name] = '.name' s[Token.name] = '.name'
s[Token.integer] = '.integer' s[Token.integer] = '.integer'
s[Token.strtoken] = 'STR' s[Token.str] = 'STR'
s[Token.chartoken] = '.chartoken' s[Token.chartoken] = '.chartoken'
s[Token.plus] = '+' s[Token.plus] = '+'
s[Token.minus] = '-' s[Token.minus] = '-'
@ -177,7 +178,7 @@ fn build_token_str() []string {
s[Token.question] = '?' s[Token.question] = '?'
s[Token.left_shift] = '<<' s[Token.left_shift] = '<<'
s[Token.righ_shift] = '>>' s[Token.righ_shift] = '>>'
s[Token.line_com] = '//' //s[Token.line_com] = '//'
s[Token.nl] = 'NLL' s[Token.nl] = 'NLL'
s[Token.dollar] = '$' s[Token.dollar] = '$'
s[Token.key_assert] = 'assert' s[Token.key_assert] = 'assert'

View File

@ -299,3 +299,22 @@ fn test_reverse() {
t := '' t := ''
assert t.reverse() == t assert t.reverse() == t
} }
struct Foo {
bar int
}
fn (f Foo) baz() string {
return 'baz'
}
fn test_interpolation() {
num := 7
mut s := 'number=$num'
assert s == 'number=7'
foo := Foo{}
s = 'baz=${foo.baz()}'
assert s == 'baz=baz'
}