1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

parser: cleanup & reuse

This commit is contained in:
joe-conigliaro 2019-10-26 00:34:12 +11:00 committed by Alexander Medvednikov
parent 784847cf18
commit 7d418e9105
6 changed files with 187 additions and 200 deletions

View File

@ -158,7 +158,7 @@ fn (p mut Parser) print_error_context(){
p.cgen.save() p.cgen.save()
// V up hint // V up hint
cur_path := os.getwd() cur_path := os.getwd()
if !p.pref.is_repl && !p.pref.is_test && ( p.file_path_id.contains('v/compiler') || cur_path.contains('v/compiler') ){ if !p.pref.is_repl && !p.pref.is_test && ( p.file_path.contains('v/compiler') || cur_path.contains('v/compiler') ){
println('\n=========================') println('\n=========================')
println('It looks like you are building V. It is being frequently updated every day.') println('It looks like you are building V. It is being frequently updated every day.')
println('If you didn\'t modify V\'s code, most likely there was a change that ') println('If you didn\'t modify V\'s code, most likely there was a change that ')

View File

@ -229,7 +229,7 @@ fn (p mut Parser) fn_decl() {
} }
// Don't allow modifying types from a different module // Don't allow modifying types from a different module
if !p.first_pass() && !p.builtin_mod && t.mod != p.mod && if !p.first_pass() && !p.builtin_mod && t.mod != p.mod &&
p.file_path_id != 'vgen' // allow .str() on builtin arrays !p.is_vgen // allow .str()
{ {
//println('T.mod=$T.mod') //println('T.mod=$T.mod')
//println('p.mod=$p.mod') //println('p.mod=$p.mod')
@ -866,7 +866,7 @@ fn (p mut Parser) fn_call_args(f mut Fn) {
if p.v.pref.is_debug && f.name == 'panic' && !p.is_js { if p.v.pref.is_debug && f.name == 'panic' && !p.is_js {
mod_name := p.mod.replace('_dot_', '.') mod_name := p.mod.replace('_dot_', '.')
fn_name := p.cur_fn.name.replace('${p.mod}__', '') fn_name := p.cur_fn.name.replace('${p.mod}__', '')
file_path := cescaped_path(p.file_path_id) file_path := cescaped_path(p.file_path)
p.cgen.resetln(p.cgen.cur_line.replace( p.cgen.resetln(p.cgen.cur_line.replace(
'v_panic (', 'v_panic (',
'panic_debug ($p.scanner.line_nr, tos3("$file_path"), tos3("$mod_name"), tos2((byte *)"$fn_name"), ' 'panic_debug ($p.scanner.line_nr, tos3("$file_path"), tos3("$mod_name"), tos2((byte *)"$fn_name"), '
@ -1435,9 +1435,9 @@ fn (p &Parser) find_misspelled_local_var(name string, min_match f32) string {
} }
n := name.all_after('.') n := name.all_after('.')
if var.name == '' || (n.len - var.name.len > 2 || var.name.len - n.len > 2) { continue } if var.name == '' || (n.len - var.name.len > 2 || var.name.len - n.len > 2) { continue }
coeff := strings.dice_coefficient(var.name, n) c := strings.dice_coefficient(var.name, n)
if coeff > closest { if c > closest {
closest = coeff closest = c
closest_var = var.name closest_var = var.name
} }
} }

View File

@ -67,8 +67,9 @@ pub mut:
out_name string // "program.exe" out_name string // "program.exe"
vroot string vroot string
mod string // module being built with -lib mod string // module being built with -lib
parsers []Parser parsers []Parser // file parsers
vgen_buf strings.Builder // temporary buffer for generated V code (.str() etc) vgen_buf strings.Builder // temporary buffer for generated V code (.str() etc)
file_parser_idx map[string]int // map absolute file path to v.parsers index
cached_mods []string cached_mods []string
} }
@ -137,15 +138,17 @@ pub fn (v mut V) finalize_compilation(){
} }
} }
pub fn (v mut V) add_parser(parser Parser) { pub fn (v mut V) add_parser(parser Parser) int {
v.parsers << parser v.parsers << parser
pidx := v.parsers.len-1
v.file_parser_idx[os.realpath(parser.file_path)] = pidx
return pidx
} }
pub fn (v &V) get_file_parser_index(file string) ?int { pub fn (v &V) get_file_parser_index(file string) ?int {
for i, p in v.parsers { file_path := os.realpath(file)
if os.realpath(p.file_path_id) == os.realpath(file) { if file_path in v.file_parser_idx {
return i return v.file_parser_idx[file_path]
}
} }
return error('parser for "$file" not found') return error('parser for "$file" not found')
} }
@ -157,9 +160,9 @@ pub fn (v mut V) parse(file string, pass Pass) int {
mut p := v.new_parser_from_file(file) mut p := v.new_parser_from_file(file)
p.parse(pass) p.parse(pass)
//if p.pref.autofree { p.scanner.text.free() free(p.scanner) } //if p.pref.autofree { p.scanner.text.free() free(p.scanner) }
v.add_parser(p) return v.add_parser(p)
return v.parsers.len-1
} }
// println('matched ' + v.parsers[pidx].file_path + ' with $file')
v.parsers[pidx].parse(pass) v.parsers[pidx].parse(pass)
//if v.parsers[i].pref.autofree { v.parsers[i].scanner.text.free() free(v.parsers[i].scanner) } //if v.parsers[i].pref.autofree { v.parsers[i].scanner.text.free() free(v.parsers[i].scanner) }
return pidx return pidx
@ -274,8 +277,9 @@ pub fn (v mut V) compile() {
} }
// parse generated V code (str() methods etc) // parse generated V code (str() methods etc)
mut vgen_parser := v.new_parser_from_string(v.vgen_buf.str(), 'vgen') mut vgen_parser := v.new_parser_from_string(v.vgen_buf.str())
// free the string builder which held the generated methods // free the string builder which held the generated methods
vgen_parser.is_vgen = true
v.vgen_buf.free() v.vgen_buf.free()
vgen_parser.parse(.main) vgen_parser.parse(.main)
// v.parsers.add(vgen_parser) // v.parsers.add(vgen_parser)
@ -582,8 +586,7 @@ pub fn (v mut V) add_v_files_to_compile() {
v.log('imports0:') v.log('imports0:')
println(v.table.imports) println(v.table.imports)
println(v.files) println(v.files)
p.import_table.register_import('os', 0) p.register_import('os', 0)
v.table.file_imports[p.file_path_id] = p.import_table
p.table.imports << 'os' p.table.imports << 'os'
p.table.register_module('os') p.table.register_module('os')
} }
@ -622,9 +625,9 @@ pub fn (v mut V) add_v_files_to_compile() {
} }
} }
// add remaining main files last // add remaining main files last
for _, fit in v.table.file_imports { for p in v.parsers {
if fit.module_name != 'main' { continue } if p.mod != 'main' { continue }
v.files << fit.file_path_id v.files << p.file_path
} }
} }
@ -684,9 +687,9 @@ pub fn (v &V) get_user_files() []string {
// get module files from already parsed imports // get module files from already parsed imports
fn (v &V) get_imported_module_files(mod string) []string { fn (v &V) get_imported_module_files(mod string) []string {
mut files := []string mut files := []string
for _, fit in v.table.file_imports { for p in v.parsers {
if fit.module_name == mod { if p.mod == mod {
files << fit.file_path_id files << p.file_path
} }
} }
return files return files
@ -694,48 +697,34 @@ fn (v &V) get_imported_module_files(mod string) []string {
// parse deps from already parsed builtin/user files // parse deps from already parsed builtin/user files
pub fn (v mut V) parse_lib_imports() { pub fn (v mut V) parse_lib_imports() {
mut done_fits := []string
mut done_imports := []string mut done_imports := []string
for { for i in 0..v.parsers.len {
for _, fit in v.table.file_imports { for _, mod in v.parsers[i].import_table.imports {
if fit.file_path_id in done_fits { continue }
for _, mod in fit.imports {
if mod in done_imports { continue } if mod in done_imports { continue }
import_path := v.find_module_path(mod) or { import_path := v.find_module_path(mod) or {
pidx := v.get_file_parser_index(fit.file_path_id) or { verror(err) break } v.parsers[i].error_with_token_index(
v.parsers[pidx].error_with_token_index('cannot import module "$mod" (not found)', fit.get_import_tok_idx(mod)) 'cannot import module "$mod" (not found)',
v.parsers[i].import_table.get_import_tok_idx(mod))
break break
} }
vfiles := v.v_files_from_dir(import_path) vfiles := v.v_files_from_dir(import_path)
if vfiles.len == 0 { if vfiles.len == 0 {
pidx := v.get_file_parser_index(fit.file_path_id) or { verror(err) break } v.parsers[i].error_with_token_index(
v.parsers[pidx].error_with_token_index('cannot import module "$mod" (no .v files in "$import_path")', fit.get_import_tok_idx(mod)) 'cannot import module "$mod" (no .v files in "$import_path")',
v.parsers[i].import_table.get_import_tok_idx(mod))
} }
// Add all imports referenced by these libs // Add all imports referenced by these libs
for file in vfiles { for file in vfiles {
pid := v.parse(file, .imports) pidx := v.parse(file, .imports)
p_mod := v.parsers[pid].import_table.module_name p_mod := v.parsers[pidx].mod
if p_mod != mod { if p_mod != mod {
v.parsers[pid].error_with_token_index('bad module definition: $fit.file_path_id imports module "$mod" but $file is defined as module `$p_mod`', 1) v.parsers[pidx].error_with_token_index(
'bad module definition: ${v.parsers[pidx].file_path} imports module "$mod" but $file is defined as module `$p_mod`', 1)
} }
} }
done_imports << mod done_imports << mod
} }
done_fits << fit.file_path_id
} }
if v.table.file_imports.size == done_fits.len { break}
}
}
// return resolved dep graph (order deps)
pub fn (v &V) resolve_deps() &DepGraph {
mut dep_graph := new_dep_graph()
dep_graph.from_import_tables(v.table.file_imports)
deps_resolved := dep_graph.resolve()
if !deps_resolved.acyclic {
verror('import cycle detected between the following modules: \n' + deps_resolved.display_cycles())
}
return deps_resolved
} }
pub fn get_arg(joined_args, arg, def string) string { pub fn get_arg(joined_args, arg, def string) string {

View File

@ -10,15 +10,117 @@ const (
v_modules_path = os.home_dir() + '.vmodules' v_modules_path = os.home_dir() + '.vmodules'
) )
// add a module and its deps (module speficic dag method) // Holds import information scoped to the parsed file
pub fn(graph mut DepGraph) from_import_tables(import_tables map[string]FileImportTable) { struct ImportTable {
for _, fit in import_tables { mut:
imports map[string]string // alias => module
used_imports []string // alias
import_tok_idx map[string]int // module => idx
}
// Once we have a module format we can read from module file instead
// this is not optimal
fn (table &Table) qualify_module(mod string, file_path string) string {
for m in table.imports {
if m.contains('.') && m.contains(mod) {
m_parts := m.split('.')
m_path := m_parts.join(os.path_separator)
if mod == m_parts[m_parts.len-1] && file_path.contains(m_path) {
return m
}
}
}
return mod
}
fn new_import_table() ImportTable {
return ImportTable{
imports: map[string]string
}
}
fn (p mut Parser) register_import(mod string, tok_idx int) {
p.register_import_alias(mod, mod, tok_idx)
}
fn (p mut Parser) register_import_alias(alias string, mod string, tok_idx int) {
// NOTE: come back here
// if alias in it.imports && it.imports[alias] == mod {}
if alias in p.import_table.imports && p.import_table.imports[alias] != mod {
p.error('cannot import $mod as $alias: import name $alias already in use"')
}
if mod.contains('.internal.') {
mod_parts := mod.split('.')
mut internal_mod_parts := []string
for part in mod_parts {
if part == 'internal' { break }
internal_mod_parts << part
}
internal_parent := internal_mod_parts.join('.')
if !p.mod.starts_with(internal_parent) {
p.error('module $mod can only be imported internally by libs')
}
}
p.import_table.imports[alias] = mod
p.import_table.import_tok_idx[mod] = tok_idx
}
fn (it &ImportTable) get_import_tok_idx(mod string) int {
return it.import_tok_idx[mod]
}
fn (it &ImportTable) known_import(mod string) bool {
return mod in it.imports || it.is_aliased(mod)
}
fn (it &ImportTable) known_alias(alias string) bool {
return alias in it.imports
}
fn (it &ImportTable) is_aliased(mod string) bool {
for _, val in it.imports {
if val == mod {
return true
}
}
return false
}
fn (it &ImportTable) resolve_alias(alias string) string {
return it.imports[alias]
}
fn (it mut ImportTable) register_used_import(alias string) {
if !(alias in it.used_imports) {
it.used_imports << alias
}
}
fn (it &ImportTable) is_used_import(alias string) bool {
return alias in it.used_imports
}
// return resolved dep graph (order deps)
pub fn (v &V) resolve_deps() &DepGraph {
graph := v.import_graph()
deps_resolved := graph.resolve()
if !deps_resolved.acyclic {
verror('import cycle detected between the following modules: \n' + deps_resolved.display_cycles())
}
return deps_resolved
}
// graph of all imported modules
pub fn(v &V) import_graph() &DepGraph {
mut graph := new_dep_graph()
for p in v.parsers {
mut deps := []string mut deps := []string
for _, m in fit.imports { for _, m in p.import_table.imports {
deps << m deps << m
} }
graph.add(fit.module_name, deps) graph.add(p.mod, deps)
} }
return graph
} }
// get ordered imports (module speficic dag method) // get ordered imports (module speficic dag method)

View File

@ -10,7 +10,7 @@ import (
) )
struct Parser { struct Parser {
file_path_id string // unique id. if parsing file will be path eg, "/home/user/hello.v" file_path string // if parsing file will be path eg, "/home/user/hello.v"
file_name string // "hello.v" file_name string // "hello.v"
file_platform string // ".v", "_windows.v", "_nix.v", "_darwin.v", "_linux.v" ... file_platform string // ".v", "_windows.v", "_nix.v", "_darwin.v", "_linux.v" ...
// When p.file_pcguard != '', it contains a // When p.file_pcguard != '', it contains a
@ -29,7 +29,7 @@ mut:
lit string lit string
cgen &CGen cgen &CGen
table &Table table &Table
import_table FileImportTable // Holds imports for just the file being parsed import_table ImportTable // Holds imports for just the file being parsed
pass Pass pass Pass
os OS os OS
inside_const bool inside_const bool
@ -64,6 +64,7 @@ mut:
is_alloc bool // Whether current expression resulted in an allocation is_alloc bool // Whether current expression resulted in an allocation
is_const_literal bool // `1`, `2.0` etc, so that `u64_var == 0` works is_const_literal bool // `1`, `2.0` etc, so that `u64_var == 0` works
in_dispatch bool // dispatching generic instance? in_dispatch bool // dispatching generic instance?
is_vgen bool
is_vweb bool is_vweb bool
is_sql bool is_sql bool
is_js bool is_js bool
@ -81,8 +82,8 @@ const (
// new parser from string. unique id specified in `id`. // new parser from string. unique id specified in `id`.
// tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text) // tip: use a hashing function to auto generate `id` from `text` eg. sha1.hexhash(text)
fn (v mut V) new_parser_from_string(text string, id string) Parser { fn (v mut V) new_parser_from_string(text string) Parser {
mut p := v.new_parser(new_scanner(text), id) mut p := v.new_parser(new_scanner(text))
p.scan_tokens() p.scan_tokens()
return p return p
} }
@ -119,12 +120,17 @@ fn (v mut V) new_parser_from_file(path string) Parser {
} }
} }
mut p := v.new_parser(new_scanner_file(path), path) mut p := v.new_parser(new_scanner_file(path))
p = { p| p = { p|
file_path: path,
file_name: path.all_after(os.path_separator), file_name: path.all_after(os.path_separator),
file_platform: path_platform, file_platform: path_platform,
file_pcguard: path_pcguard, file_pcguard: path_pcguard,
is_vh: path.ends_with('.vh') is_vh: path.ends_with('.vh'),
v_script: path.ends_with('.vsh')
}
if p.v_script {
println('new_parser: V script')
} }
if p.pref.building_v { if p.pref.building_v {
p.scanner.should_print_relative_paths_on_error = true p.scanner.should_print_relative_paths_on_error = true
@ -140,10 +146,9 @@ fn (v mut V) new_parser_from_file(path string) Parser {
// creates a new parser. most likely you will want to use // creates a new parser. most likely you will want to use
// `new_parser_file` or `new_parser_string` instead. // `new_parser_file` or `new_parser_string` instead.
fn (v mut V) new_parser(scanner &Scanner, id string) Parser { fn (v mut V) new_parser(scanner &Scanner) Parser {
v.reset_cgen_file_line_parameters() v.reset_cgen_file_line_parameters()
mut p := Parser { mut p := Parser {
file_path_id: id
scanner: scanner scanner: scanner
v: v v: v
table: v.table table: v.table
@ -153,12 +158,8 @@ fn (v mut V) new_parser(scanner &Scanner, id string) Parser {
os: v.os os: v.os
vroot: v.vroot vroot: v.vroot
local_vars: [Var{}].repeat(MaxLocalVars) local_vars: [Var{}].repeat(MaxLocalVars)
import_table: v.table.get_file_import_table(id) import_table: new_import_table()
v_script: id.ends_with('.vsh')
} }
if p.v_script {
println('new_parser: V script')
}
$if js { $if js {
p.is_js = true p.is_js = true
} }
@ -241,7 +242,7 @@ fn (p &Parser) log(s string) {
fn (p mut Parser) parse(pass Pass) { fn (p mut Parser) parse(pass Pass) {
p.cgen.line = 0 p.cgen.line = 0
p.cgen.file = cescaped_path(os.realpath(p.file_path_id)) p.cgen.file = cescaped_path(os.realpath(p.file_path))
///////////////////////////////////// /////////////////////////////////////
p.pass = pass p.pass = pass
p.token_idx = 0 p.token_idx = 0
@ -281,9 +282,8 @@ fn (p mut Parser) parse(pass Pass) {
} }
// fully qualify the module name, eg base64 to encoding.base64 // fully qualify the module name, eg base64 to encoding.base64
else { else {
p.table.qualify_module(p.mod, p.file_path_id) p.table.qualify_module(p.mod, p.file_path)
} }
p.import_table.module_name = fq_mod
p.table.register_module(fq_mod) p.table.register_module(fq_mod)
p.mod = fq_mod p.mod = fq_mod
@ -294,8 +294,6 @@ fn (p mut Parser) parse(pass Pass) {
if 'builtin' in p.table.imports { if 'builtin' in p.table.imports {
p.error('module `builtin` cannot be imported') p.error('module `builtin` cannot be imported')
} }
// save file import table
p.table.file_imports[p.file_path_id] = p.import_table
return return
} }
// Go through every top level token or throw a compilation error if a non-top level token is met // Go through every top level token or throw a compilation error if a non-top level token is met
@ -496,7 +494,7 @@ fn (p mut Parser) import_statement() {
mod_alias = p.check_name() mod_alias = p.check_name()
} }
// add import to file scope import table // add import to file scope import table
p.import_table.register_alias(mod_alias, mod, import_tok_idx) p.register_import_alias(mod_alias, mod, import_tok_idx)
// Make sure there are no duplicate imports // Make sure there are no duplicate imports
if mod in p.table.imports { if mod in p.table.imports {
return return
@ -2013,7 +2011,7 @@ struct $f.parent_fn {
', fname_tidx) ', fname_tidx)
} }
// Don't allow `arr.data` // Don't allow `arr.data`
if field.access_mod == .private && !p.builtin_mod && !p.pref.translated && p.mod != typ.mod && p.file_path_id != 'vgen' { if field.access_mod == .private && !p.builtin_mod && !p.pref.translated && p.mod != typ.mod && !p.is_vgen {
// println('$typ.name :: $field.name ') // println('$typ.name :: $field.name ')
// println(field.access_mod) // println(field.access_mod)
p.error_with_token_index('cannot refer to unexported field `$struct_field` (type `$typ.name`)\n' + p.error_with_token_index('cannot refer to unexported field `$struct_field` (type `$typ.name`)\n' +
@ -3617,7 +3615,7 @@ fn (p mut Parser) assert_statement() {
p.gen('bool $tmp = ') p.gen('bool $tmp = ')
p.check_types(p.bool_expression(), 'bool') p.check_types(p.bool_expression(), 'bool')
// TODO print "expected: got" for failed tests // TODO print "expected: got" for failed tests
filename := cescaped_path(p.file_path_id) filename := cescaped_path(p.file_path)
p.genln('; p.genln(';
\n \n
@ -3906,7 +3904,7 @@ fn (p mut Parser) check_and_register_used_imported_type(typ_name string) {
fn (p mut Parser) check_unused_imports() { fn (p mut Parser) check_unused_imports() {
// Don't run in the generated V file with `.str()` // Don't run in the generated V file with `.str()`
if p.file_path_id == 'vgen' { if p.is_vgen {
return return
} }
mut output := '' mut output := ''

View File

@ -4,7 +4,6 @@
module compiler module compiler
import os
import strings import strings
struct Table { struct Table {
@ -15,7 +14,6 @@ pub mut:
obf_ids map[string]int // obf_ids['myfunction'] == 23 obf_ids map[string]int // obf_ids['myfunction'] == 23
modules []string // List of all modules registered by the application modules []string // List of all modules registered by the application
imports []string // List of all imports imports []string // List of all imports
file_imports map[string]FileImportTable // List of imports for file
cflags []CFlag // ['-framework Cocoa', '-lglfw3'] cflags []CFlag // ['-framework Cocoa', '-lglfw3']
fn_cnt int //atomic fn_cnt int //atomic
obfuscate bool obfuscate bool
@ -39,16 +37,6 @@ enum NameCategory {
struct Name { struct Name {
cat NameCategory cat NameCategory
idx int // e.g. typ := types[name.idx] idx int // e.g. typ := types[name.idx]
}
// Holds import information scoped to the parsed file
struct FileImportTable {
mut:
module_name string
file_path_id string // file path or id
imports map[string]string // alias => module
used_imports []string // alias
import_tok_idx map[string]int // module => idx
} }
enum AccessMod { enum AccessMod {
@ -872,96 +860,6 @@ fn is_compile_time_const(s_ string) bool {
return true return true
} }
// Once we have a module format we can read from module file instead
// this is not optimal
fn (table &Table) qualify_module(mod string, file_path string) string {
for m in table.imports {
if m.contains('.') && m.contains(mod) {
m_parts := m.split('.')
m_path := m_parts.join(os.path_separator)
if mod == m_parts[m_parts.len-1] && file_path.contains(m_path) {
return m
}
}
}
return mod
}
fn (table &Table) get_file_import_table(file_path_id string) FileImportTable {
if file_path_id in table.file_imports {
return table.file_imports[file_path_id]
}
return new_file_import_table(file_path_id)
}
fn new_file_import_table(file_path_id string) FileImportTable {
return FileImportTable{
file_path_id: file_path_id
imports: map[string]string
}
}
fn (fit &FileImportTable) known_import(mod string) bool {
return mod in fit.imports || fit.is_aliased(mod)
}
fn (fit mut FileImportTable) register_import(mod string, tok_idx int) {
fit.register_alias(mod, mod, tok_idx)
}
fn (fit mut FileImportTable) register_alias(alias string, mod string, tok_idx int) {
// NOTE: come back here
// if alias in fit.imports && fit.imports[alias] == mod {}
if alias in fit.imports && fit.imports[alias] != mod {
verror('cannot import $mod as $alias: import name $alias already in use in "${fit.file_path_id}"')
}
if mod.contains('.internal.') {
mod_parts := mod.split('.')
mut internal_mod_parts := []string
for part in mod_parts {
if part == 'internal' { break }
internal_mod_parts << part
}
internal_parent := internal_mod_parts.join('.')
if !fit.module_name.starts_with(internal_parent) {
verror('module $mod can only be imported internally by libs')
}
}
fit.imports[alias] = mod
fit.import_tok_idx[mod] = tok_idx
}
fn (fit &FileImportTable) get_import_tok_idx(mod string) int {
return fit.import_tok_idx[mod]
}
fn (fit &FileImportTable) known_alias(alias string) bool {
return alias in fit.imports
}
fn (fit &FileImportTable) is_aliased(mod string) bool {
for _, val in fit.imports {
if val == mod {
return true
}
}
return false
}
fn (fit &FileImportTable) resolve_alias(alias string) string {
return fit.imports[alias]
}
fn (fit mut FileImportTable) register_used_import(alias string) {
if !(alias in fit.used_imports) {
fit.used_imports << alias
}
}
fn (fit &FileImportTable) is_used_import(alias string) bool {
return alias in fit.used_imports
}
fn (t &Type) contains_field_type(typ string) bool { fn (t &Type) contains_field_type(typ string) bool {
if !t.name[0].is_capital() { if !t.name[0].is_capital() {
return false return false
@ -982,17 +880,17 @@ fn (p &Parser) identify_typo(name string) string {
min_match := 0.50 // for dice coefficient between 0.0 - 1.0 min_match := 0.50 // for dice coefficient between 0.0 - 1.0
mut output := '' mut output := ''
// check imported modules // check imported modules
mut n := p.table.find_misspelled_imported_mod(name_dotted, p.import_table, min_match) mut n := p.table.find_misspelled_imported_mod(name_dotted, p, min_match)
if n != '' { if n != '' {
output += '\n * module: `$n`' output += '\n * module: `$n`'
} }
// check consts // check consts
n = p.table.find_misspelled_const(name, p.import_table, min_match) n = p.table.find_misspelled_const(name, p, min_match)
if n != '' { if n != '' {
output += '\n * const: `$n`' output += '\n * const: `$n`'
} }
// check functions // check functions
n = p.table.find_misspelled_fn(name, p.import_table, min_match) n = p.table.find_misspelled_fn(name, p, min_match)
if n != '' { if n != '' {
output += '\n * function: `$n`' output += '\n * function: `$n`'
} }
@ -1005,7 +903,7 @@ fn (p &Parser) identify_typo(name string) string {
} }
// find function with closest name to `name` // find function with closest name to `name`
fn (table &Table) find_misspelled_fn(name string, fit &FileImportTable, min_match f32) string { fn (table &Table) find_misspelled_fn(name string, p &Parser, min_match f32) string {
mut closest := f32(0) mut closest := f32(0)
mut closest_fn := '' mut closest_fn := ''
n1 := if name.starts_with('main__') { name.right(6) } else { name } n1 := if name.starts_with('main__') { name.right(6) } else { name }
@ -1013,7 +911,7 @@ fn (table &Table) find_misspelled_fn(name string, fit &FileImportTable, min_matc
if n1.len - f.name.len > 2 || f.name.len - n1.len > 2 { continue } if n1.len - f.name.len > 2 || f.name.len - n1.len > 2 { continue }
if !(f.mod in ['', 'main', 'builtin']) { if !(f.mod in ['', 'main', 'builtin']) {
mut mod_imported := false mut mod_imported := false
for _, m in fit.imports { for _, m in p.import_table.imports {
if f.mod == m { if f.mod == m {
mod_imported = true mod_imported = true
break break
@ -1021,10 +919,10 @@ fn (table &Table) find_misspelled_fn(name string, fit &FileImportTable, min_matc
} }
if !mod_imported { continue } if !mod_imported { continue }
} }
p := strings.dice_coefficient(n1, f.name) c := strings.dice_coefficient(n1, f.name)
f_name_orig := mod_gen_name_rev(f.name.replace('__', '.')) f_name_orig := mod_gen_name_rev(f.name.replace('__', '.'))
if p > closest { if c > closest {
closest = p closest = c
closest_fn = f_name_orig closest_fn = f_name_orig
} }
} }
@ -1032,16 +930,16 @@ fn (table &Table) find_misspelled_fn(name string, fit &FileImportTable, min_matc
} }
// find imported module with closest name to `name` // find imported module with closest name to `name`
fn (table &Table) find_misspelled_imported_mod(name string, fit &FileImportTable, min_match f32) string { fn (table &Table) find_misspelled_imported_mod(name string, p &Parser, min_match f32) string {
mut closest := f32(0) mut closest := f32(0)
mut closest_mod := '' mut closest_mod := ''
n1 := if name.starts_with('main.') { name.right(5) } else { name } n1 := if name.starts_with('main.') { name.right(5) } else { name }
for alias, mod in fit.imports { for alias, mod in p.import_table.imports {
if n1.len - alias.len > 2 || alias.len - n1.len > 2 { continue } if n1.len - alias.len > 2 || alias.len - n1.len > 2 { continue }
mod_alias := if alias == mod { alias } else { '$alias ($mod)' } mod_alias := if alias == mod { alias } else { '$alias ($mod)' }
p := strings.dice_coefficient(n1, alias) c := strings.dice_coefficient(n1, alias)
if p > closest { if c > closest {
closest = p closest = c
closest_mod = '$mod_alias' closest_mod = '$mod_alias'
} }
} }
@ -1049,20 +947,20 @@ fn (table &Table) find_misspelled_imported_mod(name string, fit &FileImportTable
} }
// find const with closest name to `name` // find const with closest name to `name`
fn (table &Table) find_misspelled_const(name string, fit &FileImportTable, min_match f32) string { fn (table &Table) find_misspelled_const(name string, p &Parser, min_match f32) string {
mut closest := f32(0) mut closest := f32(0)
mut closest_const := '' mut closest_const := ''
mut mods_in_scope := ['builtin', 'main'] mut mods_in_scope := ['builtin', 'main']
for _, mod in fit.imports { for _, mod in p.import_table.imports {
mods_in_scope << mod mods_in_scope << mod
} }
for c in table.consts { for cnst in table.consts {
if c.mod != fit.module_name && !(c.mod in mods_in_scope) && c.mod.contains('__') { continue } if cnst.mod != p.mod && !(cnst.mod in mods_in_scope) && cnst.mod.contains('__') { continue }
if name.len - c.name.len > 2 || c.name.len - name.len > 2 { continue } if name.len - cnst.name.len > 2 || cnst.name.len - name.len > 2 { continue }
const_name_orig := mod_gen_name_rev(c.name.replace('__', '.')) const_name_orig := mod_gen_name_rev(cnst.name.replace('__', '.'))
p := strings.dice_coefficient(name, c.name.replace('builtin__', 'main__')) c := strings.dice_coefficient(name, cnst.name.replace('builtin__', 'main__'))
if p > closest { if c > closest {
closest = p closest = c
closest_const = const_name_orig closest_const = const_name_orig
} }
} }