mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
compiler: implement -stats option for running a _test.v file
* Draft implementation of `v -stats file_test.v` . * compiler: call stuff in vlib/benchmark/tests/always_imported.v, when doing `v -stats file_test.v` * Nicer looking output from 'v -stats file_test.v' . * Tweak colors and layout of -stats file_test.v . * Fix a hardcoded path in compiler/main.v . * Show colorized OK/FAIL for the examples in 'v test v' too. * Add some comments about the purpose of the methods inside vlib/benchmark/tests/always_imported.v . * when fails are 0, do not colorize their number at all.
This commit is contained in:

committed by
Alexander Medvednikov

parent
f1923d454c
commit
ac5241b5bd
@ -8,6 +8,7 @@ import (
|
||||
os
|
||||
strings
|
||||
benchmark
|
||||
term
|
||||
)
|
||||
|
||||
const (
|
||||
@ -95,6 +96,7 @@ mut:
|
||||
sanitize bool // use Clang's new "-fsanitize" option
|
||||
is_debuggable bool
|
||||
is_debug bool // keep compiled C files
|
||||
is_stats bool // `v -stats file_test.v` will produce more detailed statistics for the tests that were run
|
||||
no_auto_free bool // `v -nofree` disable automatic `free()` insertion for better performance in some applications (e.g. compilers)
|
||||
cflags string // Additional options which will be passed to the C compiler.
|
||||
// For example, passing -cflags -Os will cause the C compiler to optimize the generated binaries for size.
|
||||
@ -108,7 +110,6 @@ mut:
|
||||
// to increase compilation time.
|
||||
// This is on by default, since a vast majority of users do not
|
||||
// work on the builtin module itself.
|
||||
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@ -181,7 +182,12 @@ fn main() {
|
||||
return
|
||||
}
|
||||
|
||||
v.compile()
|
||||
mut tmark := benchmark.new_benchmark()
|
||||
v.compile()
|
||||
if v.pref.is_stats {
|
||||
tmark.stop()
|
||||
println( 'compilation took: ' + tmark.total_duration().str() + 'ms')
|
||||
}
|
||||
|
||||
if v.pref.is_test {
|
||||
v.run_compiled_executable_and_exit()
|
||||
@ -309,7 +315,8 @@ fn (v mut V) compile() {
|
||||
//cgen.genln('i64 total_m = 0; // For counting total RAM allocated')
|
||||
//if v.pref.is_test {
|
||||
$if !js {
|
||||
cgen.genln('int g_test_ok = 1; ')
|
||||
cgen.genln('int g_test_oks = 0;')
|
||||
cgen.genln('int g_test_fails = 0;')
|
||||
}
|
||||
if imports_json {
|
||||
cgen.genln('
|
||||
@ -459,12 +466,18 @@ string _STR_TMP(const char *fmt, ...) {
|
||||
}
|
||||
// Generate a C `main`, which calls every single test function
|
||||
v.gen_main_start(false)
|
||||
|
||||
if v.pref.is_stats { cgen.genln('BenchedTests bt = main__start_testing();') }
|
||||
|
||||
for _, f in v.table.fns {
|
||||
if f.name.starts_with('main__test_') {
|
||||
cgen.genln('$f.name();')
|
||||
if v.pref.is_stats { cgen.genln('BenchedTests_testing_step_start(&bt, tos3("$f.name"));') }
|
||||
cgen.genln('$f.name();')
|
||||
if v.pref.is_stats { cgen.genln('BenchedTests_testing_step_end(&bt);') }
|
||||
}
|
||||
}
|
||||
v.gen_main_end('return g_test_ok == 0')
|
||||
if v.pref.is_stats { cgen.genln('BenchedTests_end_testing(&bt);') }
|
||||
v.gen_main_end('return g_test_fails > 0')
|
||||
}
|
||||
else if v.table.main_exists() {
|
||||
v.gen_main_start(true)
|
||||
@ -636,6 +649,11 @@ fn (v &V) get_user_files() []string {
|
||||
// Need to store user files separately, because they have to be added after libs, but we dont know
|
||||
// which libs need to be added yet
|
||||
mut user_files := []string
|
||||
|
||||
if v.pref.is_test && v.pref.is_stats {
|
||||
user_files << [v.vroot, 'vlib', 'benchmark', 'tests', 'always_imported.v'].join( os.PathSeparator )
|
||||
}
|
||||
|
||||
// v volt/slack_test.v: compile all .v files to get the environment
|
||||
// I need to implement user packages! TODO
|
||||
is_test_with_imports := dir.ends_with('_test.v') &&
|
||||
@ -883,6 +901,7 @@ fn new_v(args[]string) &V {
|
||||
is_verbose: '-verbose' in args || '--verbose' in args
|
||||
is_debuggable: '-g' in args
|
||||
is_debug: '-debug' in args || '-g' in args
|
||||
is_stats: '-stats' in args
|
||||
obfuscate: obfuscate
|
||||
is_prof: '-prof' in args
|
||||
is_live: '-live' in args
|
||||
@ -1052,6 +1071,8 @@ fn (v &V) test_v() {
|
||||
mut failed := false
|
||||
test_files := os.walk_ext(parent_dir, '_test.v')
|
||||
|
||||
ok := term.ok_message('OK')
|
||||
fail := term.fail_message('FAIL')
|
||||
println('Testing...')
|
||||
mut tmark := benchmark.new_benchmark()
|
||||
for dot_relative_file in test_files {
|
||||
@ -1066,16 +1087,16 @@ fn (v &V) test_v() {
|
||||
r := os.exec(cmd) or {
|
||||
tmark.fail()
|
||||
failed = true
|
||||
println(tmark.step_message('$relative_file FAIL'))
|
||||
println(tmark.step_message('$relative_file $fail'))
|
||||
continue
|
||||
}
|
||||
if r.exit_code != 0 {
|
||||
failed = true
|
||||
tmark.fail()
|
||||
println(tmark.step_message('$relative_file FAIL \n`$file`\n (\n$r.output\n)'))
|
||||
println(tmark.step_message('$relative_file $fail\n`$file`\n (\n$r.output\n)'))
|
||||
} else {
|
||||
tmark.ok()
|
||||
println(tmark.step_message('$relative_file OK'))
|
||||
println(tmark.step_message('$relative_file $ok'))
|
||||
}
|
||||
os.rm( tmpc_filepath )
|
||||
}
|
||||
@ -1097,16 +1118,16 @@ fn (v &V) test_v() {
|
||||
r := os.exec(cmd) or {
|
||||
failed = true
|
||||
bmark.fail()
|
||||
println(bmark.step_message('$relative_file FAIL'))
|
||||
println(bmark.step_message('$relative_file $fail'))
|
||||
continue
|
||||
}
|
||||
if r.exit_code != 0 {
|
||||
failed = true
|
||||
bmark.fail()
|
||||
println(bmark.step_message('$relative_file FAIL \n`$file`\n (\n$r.output\n)'))
|
||||
println(bmark.step_message('$relative_file $fail \n`$file`\n (\n$r.output\n)'))
|
||||
} else {
|
||||
bmark.ok()
|
||||
println(bmark.step_message('$relative_file OK'))
|
||||
println(bmark.step_message('$relative_file $ok'))
|
||||
}
|
||||
os.rm(tmpc_filepath)
|
||||
}
|
||||
|
@ -3821,16 +3821,20 @@ fn (p mut Parser) assert_statement() {
|
||||
p.check_types(p.bool_expression(), 'bool')
|
||||
// TODO print "expected: got" for failed tests
|
||||
filename := p.file_path.replace('\\', '\\\\')
|
||||
p.genln(';\n
|
||||
p.genln(';
|
||||
\n
|
||||
|
||||
if (!$tmp) {
|
||||
println(tos2((byte *)"\\x1B[31mFAILED: $p.cur_fn.name() in $filename:$p.scanner.line_nr\\x1B[0m"));
|
||||
g_test_ok = 0 ;
|
||||
// TODO
|
||||
// Maybe print all vars in a test function if it fails?
|
||||
g_test_fails++;
|
||||
// TODO
|
||||
// Maybe print all vars in a test function if it fails?
|
||||
} else {
|
||||
g_test_oks++;
|
||||
//println(tos2((byte *)"\\x1B[32mPASSED: $p.cur_fn.name()\\x1B[0m"));
|
||||
}
|
||||
else {
|
||||
//puts("\\x1B[32mPASSED: $p.cur_fn.name()\\x1B[0m");
|
||||
}')
|
||||
|
||||
')
|
||||
}
|
||||
|
||||
fn (p mut Parser) return_st() {
|
||||
|
Reference in New Issue
Block a user