2019-07-29 19:21:36 +03:00
|
|
|
// Copyright (c) 2019 Alexander Medvednikov. All rights reserved.
|
|
|
|
// Use of this source code is governed by an MIT license
|
|
|
|
// that can be found in the LICENSE file.
|
|
|
|
|
2019-10-13 16:37:43 +03:00
|
|
|
module compiler
|
2019-07-29 19:21:36 +03:00
|
|
|
|
2019-08-17 22:19:37 +03:00
|
|
|
import strings
|
2019-11-09 22:05:44 +03:00
|
|
|
import os
|
2019-07-29 19:21:36 +03:00
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-08-07 09:19:27 +03:00
|
|
|
fn (scanner mut Scanner) fgen(s_ string) {
|
2019-08-17 22:19:37 +03:00
|
|
|
mut s := s_
|
2019-12-18 04:34:50 +03:00
|
|
|
if s != ' ' {
|
|
|
|
//s = s.trim_space()
|
|
|
|
}
|
2019-07-29 19:21:36 +03:00
|
|
|
if scanner.fmt_line_empty {
|
2019-12-17 17:28:25 +03:00
|
|
|
s = strings.repeat(`\t`, scanner.fmt_indent) + s.trim_left(' ')
|
2019-07-29 19:21:36 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
scanner.fmt_lines << s
|
2019-11-11 08:04:37 +03:00
|
|
|
//scanner.fmt_out << s
|
2019-12-17 17:28:25 +03:00
|
|
|
//scanner.fmt_out.write(s)
|
2019-07-29 19:21:36 +03:00
|
|
|
scanner.fmt_line_empty = false
|
|
|
|
}
|
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-08-07 09:19:27 +03:00
|
|
|
fn (scanner mut Scanner) fgenln(s_ string) {
|
2019-12-19 05:41:12 +03:00
|
|
|
mut s := s_.trim_right(' ')
|
2019-11-11 17:18:32 +03:00
|
|
|
if scanner.fmt_line_empty && scanner.fmt_indent > 0 {
|
2019-07-29 19:21:36 +03:00
|
|
|
s = strings.repeat(`\t`, scanner.fmt_indent) + s
|
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
scanner.fmt_lines << s
|
2019-12-19 05:41:12 +03:00
|
|
|
//println('s="$s"')
|
2019-12-17 17:28:25 +03:00
|
|
|
//scanner.fmt_lines << '//!'
|
|
|
|
scanner.fmt_lines << '\n'
|
|
|
|
//scanner.fmt_out.writeln(s)
|
2019-11-18 13:10:31 +03:00
|
|
|
scanner.fmt_line_empty = true
|
|
|
|
}
|
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-07-29 19:21:36 +03:00
|
|
|
fn (p mut Parser) fgen(s string) {
|
2019-11-09 19:13:26 +03:00
|
|
|
if p.pass != .main {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-07-29 19:21:36 +03:00
|
|
|
p.scanner.fgen(s)
|
|
|
|
}
|
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-07-29 19:21:36 +03:00
|
|
|
fn (p mut Parser) fspace() {
|
2019-11-09 19:13:26 +03:00
|
|
|
if p.first_pass() {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-11-10 03:08:53 +03:00
|
|
|
p.fgen(' ')
|
2019-07-29 19:21:36 +03:00
|
|
|
}
|
|
|
|
|
2019-12-22 02:22:32 +03:00
|
|
|
[if vfmt]
|
|
|
|
fn (p mut Parser) fspace_or_newline() {
|
|
|
|
if p.first_pass() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if p.token_idx >= 2 && p.tokens[p.token_idx-1].line_nr !=
|
|
|
|
p.tokens[p.token_idx-2].line_nr {
|
|
|
|
p.fgen_nl()
|
|
|
|
} else {
|
|
|
|
p.fgen(' ')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-11-09 19:13:26 +03:00
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-07-29 19:21:36 +03:00
|
|
|
fn (p mut Parser) fgenln(s string) {
|
2019-11-09 19:13:26 +03:00
|
|
|
if p.pass != .main {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-07-29 19:21:36 +03:00
|
|
|
p.scanner.fgenln(s)
|
|
|
|
}
|
|
|
|
|
2019-11-18 13:10:31 +03:00
|
|
|
[if vfmt]
|
|
|
|
fn (p mut Parser) fgen_nl() {
|
|
|
|
if p.pass != .main {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
|
|
|
|
//println(p.tok)
|
|
|
|
// Don't insert a newline after a comment
|
|
|
|
/*
|
|
|
|
if p.token_idx>0 && p.tokens[p.token_idx-1].tok == .line_comment &&
|
|
|
|
p.tokens[p.token_idx].tok != .line_comment {
|
|
|
|
p.scanner.fgenln('notin')
|
2019-11-18 13:10:31 +03:00
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
*/
|
|
|
|
|
|
|
|
///if p.token_idx > 0 && p.token_idx < p.tokens.len &&
|
|
|
|
// Previous token is a comment, and NL has already been generated?
|
|
|
|
// Don't generate a second NL.
|
|
|
|
if p.scanner.fmt_lines.len > 0 && p.scanner.fmt_lines.last() == '\n' &&
|
2019-12-22 01:27:03 +03:00
|
|
|
p.token_idx > 2 &&
|
2019-12-17 17:28:25 +03:00
|
|
|
p.tokens[p.token_idx-2].tok == .line_comment
|
|
|
|
{
|
|
|
|
//if p.fileis('parser.v') {
|
|
|
|
//println(p.scanner.line_nr.str() + ' ' +p.tokens[p.token_idx-2].str())
|
|
|
|
//}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-11-18 13:10:31 +03:00
|
|
|
p.scanner.fgen_nl()
|
|
|
|
}
|
|
|
|
|
2019-12-17 17:28:25 +03:00
|
|
|
[if vfmt]
|
|
|
|
fn (scanner mut Scanner) fgen_nl() {
|
|
|
|
//scanner.fmt_lines << ' fgen_nl'
|
|
|
|
//scanner.fmt_lines << '//fgen_nl\n'
|
|
|
|
scanner.fmt_lines << '\n'
|
|
|
|
//scanner.fmt_out.writeln('')
|
|
|
|
scanner.fmt_line_empty = true
|
|
|
|
}
|
|
|
|
|
2019-09-27 04:00:48 +03:00
|
|
|
/*
|
2019-10-09 01:05:34 +03:00
|
|
|
fn (p mut Parser) peek() TokenKind {
|
2019-07-29 19:21:36 +03:00
|
|
|
for {
|
2019-08-04 12:00:56 +03:00
|
|
|
p.cgen.line = p.scanner.line_nr + 1
|
2019-07-29 19:21:36 +03:00
|
|
|
tok := p.scanner.peek()
|
|
|
|
if tok != .nl {
|
|
|
|
return tok
|
|
|
|
}
|
|
|
|
}
|
2019-08-08 10:49:56 +03:00
|
|
|
return .eof // TODO can never get here - v doesn't know that
|
2019-07-29 19:21:36 +03:00
|
|
|
}
|
2019-09-27 04:00:48 +03:00
|
|
|
*/
|
2019-07-29 19:21:36 +03:00
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-07-29 19:21:36 +03:00
|
|
|
fn (p mut Parser) fmt_inc() {
|
2019-11-09 19:13:26 +03:00
|
|
|
if p.pass != .main {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-07-29 19:21:36 +03:00
|
|
|
p.scanner.fmt_indent++
|
|
|
|
}
|
|
|
|
|
2019-10-27 12:16:33 +03:00
|
|
|
[if vfmt]
|
2019-07-29 19:21:36 +03:00
|
|
|
fn (p mut Parser) fmt_dec() {
|
2019-11-09 19:13:26 +03:00
|
|
|
if p.pass != .main {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-07-29 19:21:36 +03:00
|
|
|
p.scanner.fmt_indent--
|
|
|
|
}
|
|
|
|
|
2019-11-11 08:58:50 +03:00
|
|
|
[if vfmt]
|
2019-12-18 20:07:32 +03:00
|
|
|
fn (s mut Scanner) init_fmt() {
|
2019-11-11 17:18:32 +03:00
|
|
|
// Right now we can't do `$if vfmt {`, so I'm using
|
|
|
|
// a conditional function init_fmt to set this flag.
|
|
|
|
// This function will only be called if `-d vfmt` is passed.
|
2019-12-18 20:07:32 +03:00
|
|
|
s.is_fmt = true
|
2019-11-11 08:58:50 +03:00
|
|
|
}
|
|
|
|
|
2019-11-09 19:13:26 +03:00
|
|
|
[if vfmt]
|
|
|
|
fn (p mut Parser) fnext() {
|
2019-11-11 17:18:32 +03:00
|
|
|
//if p.tok == .eof {
|
|
|
|
//println('eof ret')
|
|
|
|
//return
|
|
|
|
//}
|
2019-12-26 12:02:38 +03:00
|
|
|
if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr {
|
2019-11-09 19:13:26 +03:00
|
|
|
p.fmt_dec()
|
|
|
|
}
|
2019-12-06 15:24:53 +03:00
|
|
|
s := p.strtok()
|
2019-11-11 17:18:32 +03:00
|
|
|
if p.tok != .eof {
|
2019-11-10 03:08:53 +03:00
|
|
|
p.fgen(s)
|
2019-11-11 17:18:32 +03:00
|
|
|
}
|
2019-11-09 19:13:26 +03:00
|
|
|
// vfmt: increase indentation on `{` unless it's `{}`
|
2019-12-06 15:24:53 +03:00
|
|
|
inc_indent := false
|
2019-12-26 12:02:38 +03:00
|
|
|
if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr {
|
2019-11-18 13:10:31 +03:00
|
|
|
p.fgen_nl()
|
2019-11-09 19:13:26 +03:00
|
|
|
p.fmt_inc()
|
|
|
|
}
|
2019-12-06 15:24:53 +03:00
|
|
|
|
2019-11-11 17:18:32 +03:00
|
|
|
// Skip comments and add them to vfmt output
|
|
|
|
if p.tokens[p.token_idx].tok in [.line_comment, .mline_comment] {
|
|
|
|
// Newline before the comment and after consts and closing }
|
|
|
|
if p.inside_const {
|
2019-12-18 08:13:31 +03:00
|
|
|
//p.fgen_nl()
|
|
|
|
//p.fgen_nl()
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
|
|
|
//is_rcbr := p.tok == .rcbr
|
2019-11-11 17:18:32 +03:00
|
|
|
for p.token_idx < p.tokens.len - 1 {
|
2019-11-18 13:10:31 +03:00
|
|
|
i := p.token_idx
|
2019-11-11 17:18:32 +03:00
|
|
|
tok := p.tokens[p.token_idx].tok
|
|
|
|
if tok != .line_comment && tok != .mline_comment {
|
|
|
|
break
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
comment_token := p.tokens[i]
|
|
|
|
next := p.tokens[i+1]
|
|
|
|
comment_on_new_line := i == 0 ||
|
|
|
|
comment_token.line_nr > p.tokens[i-1].line_nr
|
2019-11-18 13:10:31 +03:00
|
|
|
//prev_token := p.tokens[p.token_idx - 1]
|
2019-11-11 17:18:32 +03:00
|
|
|
comment := comment_token.lit
|
2019-12-17 17:28:25 +03:00
|
|
|
// Newline before the comment, but not between two // comments,
|
|
|
|
// and not right after `{`, there's already a newline there
|
2019-12-22 01:41:42 +03:00
|
|
|
if i > 0 && ((p.tokens[i-1].tok != .line_comment &&
|
2019-12-17 17:28:25 +03:00
|
|
|
p.tokens[i-1].tok != .lcbr &&
|
2019-12-22 01:41:42 +03:00
|
|
|
comment_token.line_nr > p.tokens[i-1].line_nr) ||
|
|
|
|
p.tokens[i-1].tok == .hash) { // TODO not sure why this is needed, newline wasn't added after a hash
|
2019-11-18 13:10:31 +03:00
|
|
|
p.fgen_nl()
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
if i > 0 && p.tokens[i-1].tok == .rcbr && p.scanner.fmt_indent == 0 {
|
|
|
|
p.fgen_nl()
|
|
|
|
}
|
2019-11-11 17:18:32 +03:00
|
|
|
if tok == .line_comment {
|
2019-11-18 13:10:31 +03:00
|
|
|
if !comment_on_new_line { //prev_token.line_nr < comment_token.line_nr {
|
|
|
|
p.fgen(' ')
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-11-11 17:18:32 +03:00
|
|
|
p.fgen('// ' + comment)
|
2019-11-18 13:10:31 +03:00
|
|
|
/*
|
|
|
|
if false && i > 0 {
|
|
|
|
p.fgen(
|
|
|
|
'pln=${p.tokens[i-1].line_nr} ${comment_token.str()} ' +
|
|
|
|
'line_nr=$comment_token.line_nr next=${next.str()} next_line_nr=$next.line_nr')
|
|
|
|
}
|
|
|
|
*/
|
2019-12-06 15:24:53 +03:00
|
|
|
|
2019-11-11 17:18:32 +03:00
|
|
|
} else {
|
2019-12-17 17:28:25 +03:00
|
|
|
// /**/ comment
|
2019-11-11 17:18:32 +03:00
|
|
|
p.fgen(comment)
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-17 17:28:25 +03:00
|
|
|
//if next.tok == .line_comment && comment_token.line_nr < next.line_nr {
|
|
|
|
if comment_token.line_nr < next.line_nr {
|
|
|
|
//p.fgenln('nextcm')
|
2019-11-18 13:10:31 +03:00
|
|
|
p.fgen_nl()
|
2019-11-11 17:18:32 +03:00
|
|
|
}
|
|
|
|
p.token_idx++
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
|
|
|
|
2019-11-18 13:10:31 +03:00
|
|
|
if inc_indent {
|
|
|
|
p.fgen_nl()
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-11-11 17:18:32 +03:00
|
|
|
}
|
2019-11-09 19:13:26 +03:00
|
|
|
}
|
|
|
|
|
2019-12-18 08:13:31 +03:00
|
|
|
[if vfmt]
|
|
|
|
fn (p mut Parser) fremove_last() {
|
2019-12-24 05:43:31 +03:00
|
|
|
if p.scanner.fmt_lines.len > 0 {
|
|
|
|
p.scanner.fmt_lines[p.scanner.fmt_lines.len-1] = ''
|
|
|
|
}
|
2019-12-18 08:13:31 +03:00
|
|
|
}
|
|
|
|
|
2019-11-09 22:05:44 +03:00
|
|
|
[if vfmt]
|
2019-12-17 17:28:25 +03:00
|
|
|
fn (p &Parser) gen_fmt() {
|
2019-11-09 22:05:44 +03:00
|
|
|
if p.pass != .main {
|
|
|
|
return
|
|
|
|
}
|
2019-12-22 01:27:03 +03:00
|
|
|
//println('gen fmt name=$p.file_name path=$p.file_path')
|
2019-11-09 22:05:44 +03:00
|
|
|
if p.file_name == '' {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-24 05:43:31 +03:00
|
|
|
is_all := p.v.v_fmt_all
|
|
|
|
vfmt_file := p.v.v_fmt_file
|
|
|
|
if p.file_path != vfmt_file && !is_all {
|
2019-12-23 13:02:50 +03:00
|
|
|
// skip everything except the last file (given by the CLI argument)
|
|
|
|
return
|
|
|
|
}
|
2019-11-11 17:18:32 +03:00
|
|
|
//s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space()
|
2019-12-17 17:28:25 +03:00
|
|
|
//s := p.scanner.fmt_out.str().trim_space()
|
2019-12-19 05:41:12 +03:00
|
|
|
//p.scanner.fgenln('// nice')
|
2019-12-28 15:55:53 +03:00
|
|
|
mut s := p.scanner.fmt_lines.join('')
|
2019-12-22 01:27:03 +03:00
|
|
|
/*.replace_each([
|
2019-12-18 05:45:48 +03:00
|
|
|
'\n\n\n\n', '\n\n',
|
|
|
|
' \n', '\n',
|
|
|
|
') or{', ') or {',
|
|
|
|
])
|
2019-12-19 05:41:12 +03:00
|
|
|
*/
|
2019-12-23 13:02:50 +03:00
|
|
|
//.replace('\n\n\n\n', '\n\n')
|
2019-12-26 12:02:38 +03:00
|
|
|
|
2019-12-28 15:55:53 +03:00
|
|
|
s = s.replace(' \n', '\n')
|
|
|
|
s = s.replace(') or {', ') or {')
|
|
|
|
s = s.replace(') or{', ') or {')
|
|
|
|
s = s.replace(')or{', ') or {')
|
|
|
|
s = s.replace('or{', 'or {')
|
|
|
|
s = s.replace('}}\n', '}\n\t}\n')
|
2019-12-24 05:43:31 +03:00
|
|
|
|
2019-11-09 22:05:44 +03:00
|
|
|
if s == '' {
|
|
|
|
return
|
2019-12-06 15:24:53 +03:00
|
|
|
}
|
2019-12-20 00:20:22 +03:00
|
|
|
//files := ['get_type.v']
|
2019-12-23 13:02:50 +03:00
|
|
|
if p.file_path.contains('compiler/vfmt.v') {return}
|
2019-12-19 23:52:45 +03:00
|
|
|
//if !(p.file_name in files) { return }
|
2019-12-23 13:02:50 +03:00
|
|
|
if is_all {
|
|
|
|
if p.file_path.len > 0 {
|
|
|
|
path := write_formatted_source( p.file_name, s )
|
|
|
|
os.cp( path, p.file_path ) or { panic(err) }
|
|
|
|
eprintln('Written fmt file to: $p.file_path')
|
|
|
|
}
|
|
|
|
}
|
2019-12-24 05:43:31 +03:00
|
|
|
if p.file_path == vfmt_file {
|
2019-12-23 13:02:50 +03:00
|
|
|
res_path := write_formatted_source( p.file_name, s )
|
2019-12-24 05:43:31 +03:00
|
|
|
mut vv := p.v
|
|
|
|
vv.v_fmt_file_result = res_path
|
2019-12-23 13:02:50 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn write_formatted_source(file_name string, s string) string {
|
|
|
|
path := os.tmpdir() + '/' + file_name
|
2019-12-18 04:18:53 +03:00
|
|
|
mut out := os.create(path) or {
|
2019-12-23 13:02:50 +03:00
|
|
|
verror('failed to create file $path')
|
|
|
|
return ''
|
2019-11-09 22:05:44 +03:00
|
|
|
}
|
2019-12-23 13:02:50 +03:00
|
|
|
//eprintln('replacing ${p.file_path} ...\n')
|
2019-12-19 05:41:12 +03:00
|
|
|
out.writeln(s.trim_space())//p.scanner.fmt_out.str().trim_space())
|
2019-11-09 22:05:44 +03:00
|
|
|
out.close()
|
2019-12-23 13:02:50 +03:00
|
|
|
return path
|
2019-12-26 12:02:38 +03:00
|
|
|
}
|