2020-04-10 11:59:07 +03:00
|
|
|
import os
|
|
|
|
import term
|
2020-07-26 10:47:55 +03:00
|
|
|
import v.util
|
2020-08-05 19:34:27 +03:00
|
|
|
import v.util.vtest
|
2020-08-07 16:44:49 +03:00
|
|
|
import time
|
|
|
|
import sync
|
|
|
|
import runtime
|
|
|
|
import benchmark
|
|
|
|
|
2020-09-15 19:56:46 +03:00
|
|
|
const (
|
|
|
|
skip_files = [
|
2020-10-15 15:54:44 +03:00
|
|
|
'vlib/v/checker/tests/return_missing_comp_if.vv',
|
|
|
|
'vlib/v/checker/tests/return_missing_comp_if_nested.vv',
|
2020-10-24 10:13:58 +03:00
|
|
|
'vlib/v/checker/tests/custom_comptime_define_if_flag.vv',
|
2020-09-15 19:56:46 +03:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-08-07 16:44:49 +03:00
|
|
|
struct TaskDescription {
|
|
|
|
vexe string
|
|
|
|
dir string
|
|
|
|
voptions string
|
|
|
|
result_extension string
|
|
|
|
path string
|
|
|
|
mut:
|
|
|
|
is_error bool
|
2020-09-15 19:56:46 +03:00
|
|
|
is_skipped bool
|
2020-10-15 15:54:44 +03:00
|
|
|
is_module bool
|
2020-08-07 16:44:49 +03:00
|
|
|
expected string
|
|
|
|
found___ string
|
|
|
|
took time.Duration
|
|
|
|
}
|
2020-04-10 11:59:07 +03:00
|
|
|
|
|
|
|
fn test_all() {
|
|
|
|
vexe := os.getenv('VEXE')
|
2020-04-29 11:48:51 +03:00
|
|
|
vroot := os.dir(vexe)
|
|
|
|
os.chdir(vroot)
|
2020-10-24 10:13:58 +03:00
|
|
|
checker_dir := 'vlib/v/checker/tests'
|
|
|
|
parser_dir := 'vlib/v/parser/tests'
|
|
|
|
module_dir := '$checker_dir/modules'
|
|
|
|
global_dir := '$checker_dir/globals'
|
|
|
|
run_dir := '$checker_dir/run'
|
|
|
|
//
|
|
|
|
checker_tests := get_tests_in_dir(checker_dir, false)
|
|
|
|
parser_tests := get_tests_in_dir(parser_dir, false)
|
2020-09-20 20:51:14 +03:00
|
|
|
global_tests := get_tests_in_dir(global_dir, false)
|
2020-10-15 15:54:44 +03:00
|
|
|
module_tests := get_tests_in_dir(module_dir, true)
|
2020-09-20 20:51:14 +03:00
|
|
|
run_tests := get_tests_in_dir(run_dir, false)
|
2020-10-24 10:13:58 +03:00
|
|
|
// -prod is used for the parser and checker tests, so that warns are errors
|
2020-08-07 16:44:49 +03:00
|
|
|
mut tasks := []TaskDescription{}
|
2020-10-24 10:13:58 +03:00
|
|
|
tasks.add(vexe, parser_dir, '-prod', '.out', parser_tests, false)
|
|
|
|
tasks.add(vexe, checker_dir, '-prod', '.out', checker_tests, false)
|
|
|
|
tasks.add(vexe, checker_dir, '-d mysymbol run', '.mysymbol.run.out', ['custom_comptime_define_error.vv'], false)
|
|
|
|
tasks.add(vexe, checker_dir, '-d mydebug run', '.mydebug.run.out', ['custom_comptime_define_if_flag.vv'], false)
|
|
|
|
tasks.add(vexe, checker_dir, '-d nodebug run', '.nodebug.run.out', ['custom_comptime_define_if_flag.vv'], false)
|
|
|
|
tasks.add(vexe, checker_dir, '--enable-globals run', '.run.out', ['globals_error.vv'], false)
|
|
|
|
tasks.add(vexe, global_dir, '--enable-globals', '.out', global_tests, false)
|
|
|
|
tasks.add(vexe, module_dir, '-prod run', '.out', module_tests, true)
|
|
|
|
tasks.add(vexe, run_dir, 'run', '.run.out', run_tests, false)
|
2020-08-07 16:44:49 +03:00
|
|
|
tasks.run()
|
2020-05-16 17:12:23 +03:00
|
|
|
}
|
|
|
|
|
2020-10-24 10:13:58 +03:00
|
|
|
fn (mut tasks []TaskDescription) add(vexe string, dir string, voptions string, result_extension string, tests []string, is_module bool) {
|
2020-08-05 19:34:27 +03:00
|
|
|
paths := vtest.filter_vtest_only(tests, {
|
|
|
|
basepath: dir
|
|
|
|
})
|
2020-07-09 10:26:14 +03:00
|
|
|
for path in paths {
|
2020-10-24 10:13:58 +03:00
|
|
|
tasks << TaskDescription{
|
2020-08-07 16:44:49 +03:00
|
|
|
vexe: vexe
|
|
|
|
dir: dir
|
|
|
|
voptions: voptions
|
|
|
|
result_extension: result_extension
|
|
|
|
path: path
|
2020-09-20 20:51:14 +03:00
|
|
|
is_module: is_module
|
2020-05-16 17:12:23 +03:00
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
}
|
2020-10-24 10:13:58 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn bstep_message(mut bench benchmark.Benchmark, label string, msg string, sduration time.Duration) string {
|
|
|
|
return bench.step_message_with_label_and_duration(label, msg, sduration)
|
2020-08-07 16:44:49 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// process an array of tasks in parallel, using no more than vjobs worker threads
|
|
|
|
fn (mut tasks []TaskDescription) run() {
|
|
|
|
vjobs := runtime.nr_jobs()
|
|
|
|
mut bench := benchmark.new_benchmark()
|
|
|
|
bench.set_total_expected_steps(tasks.len)
|
2020-08-10 15:21:34 +03:00
|
|
|
mut work := sync.new_channel<TaskDescription>(tasks.len)
|
2020-08-07 16:44:49 +03:00
|
|
|
mut results := sync.new_channel<TaskDescription>(tasks.len)
|
2020-09-15 19:56:46 +03:00
|
|
|
mut m_skip_files := skip_files
|
|
|
|
$if noskip ? {
|
|
|
|
m_skip_files = []
|
|
|
|
}
|
2020-10-17 18:27:06 +03:00
|
|
|
$if tinyc {
|
|
|
|
// NB: tcc does not support __has_include, so the detection mechanism
|
|
|
|
// used for the other compilers does not work. It still provides a
|
|
|
|
// cleaner error message, than a generic C error, but without the explanation.
|
|
|
|
m_skip_files << 'vlib/v/checker/tests/missing_c_lib_header_1.vv'
|
|
|
|
m_skip_files << 'vlib/v/checker/tests/missing_c_lib_header_with_explanation_2.vv'
|
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
for i in 0 .. tasks.len {
|
2020-09-15 19:56:46 +03:00
|
|
|
if tasks[i].path in m_skip_files {
|
|
|
|
tasks[i].is_skipped = true
|
|
|
|
}
|
2020-10-15 15:54:44 +03:00
|
|
|
unsafe {work.push(&tasks[i])}
|
2020-08-07 16:44:49 +03:00
|
|
|
}
|
2020-08-10 15:21:34 +03:00
|
|
|
work.close()
|
2020-08-07 16:44:49 +03:00
|
|
|
for _ in 0 .. vjobs {
|
|
|
|
go work_processor(mut work, mut results)
|
|
|
|
}
|
2020-08-13 12:57:16 +03:00
|
|
|
mut total_errors := 0
|
2020-08-07 16:44:49 +03:00
|
|
|
for _ in 0 .. tasks.len {
|
|
|
|
mut task := TaskDescription{}
|
|
|
|
results.pop(&task)
|
|
|
|
bench.step()
|
2020-09-15 19:56:46 +03:00
|
|
|
if task.is_skipped {
|
|
|
|
bench.skip()
|
2020-10-24 10:13:58 +03:00
|
|
|
eprintln(bstep_message(mut bench, benchmark.b_skip, task.path, task.took))
|
2020-09-15 19:56:46 +03:00
|
|
|
continue
|
2020-10-15 15:54:44 +03:00
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
if task.is_error {
|
2020-08-13 12:57:16 +03:00
|
|
|
total_errors++
|
2020-08-07 16:44:49 +03:00
|
|
|
bench.fail()
|
2020-10-24 10:13:58 +03:00
|
|
|
eprintln(bstep_message(mut bench, benchmark.b_fail, task.path, task.took))
|
2020-05-16 17:12:23 +03:00
|
|
|
println('============')
|
|
|
|
println('expected:')
|
2020-08-07 16:44:49 +03:00
|
|
|
println(task.expected)
|
2020-05-16 17:12:23 +03:00
|
|
|
println('============')
|
|
|
|
println('found:')
|
2020-08-07 16:44:49 +03:00
|
|
|
println(task.found___)
|
2020-05-16 17:12:23 +03:00
|
|
|
println('============\n')
|
2020-08-07 16:44:49 +03:00
|
|
|
diff_content(task.expected, task.found___)
|
2020-05-16 17:12:23 +03:00
|
|
|
} else {
|
2020-08-07 16:44:49 +03:00
|
|
|
bench.ok()
|
2020-10-24 10:13:58 +03:00
|
|
|
eprintln(bstep_message(mut bench, benchmark.b_ok, task.path, task.took))
|
2020-08-07 16:44:49 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
bench.stop()
|
|
|
|
eprintln(term.h_divider('-'))
|
|
|
|
eprintln(bench.total_message('all tests'))
|
2020-08-13 12:57:16 +03:00
|
|
|
assert total_errors == 0
|
2020-08-07 16:44:49 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// a single worker thread spends its time getting work from the `work` channel,
|
|
|
|
// processing the task, and then putting the task in the `results` channel
|
|
|
|
fn work_processor(mut work sync.Channel, mut results sync.Channel) {
|
|
|
|
for {
|
|
|
|
mut task := TaskDescription{}
|
2020-08-10 15:21:34 +03:00
|
|
|
if !work.pop(&task) {
|
2020-08-07 16:44:49 +03:00
|
|
|
break
|
2020-05-16 17:12:23 +03:00
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
sw := time.new_stopwatch({})
|
|
|
|
task.execute()
|
|
|
|
task.took = sw.elapsed()
|
|
|
|
results.push(&task)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// actual processing; NB: no output is done here at all
|
|
|
|
fn (mut task TaskDescription) execute() {
|
2020-09-15 19:56:46 +03:00
|
|
|
if task.is_skipped {
|
2020-10-15 15:54:44 +03:00
|
|
|
return
|
|
|
|
}
|
2020-08-25 18:14:06 +03:00
|
|
|
program := task.path
|
2020-10-15 15:54:44 +03:00
|
|
|
cli_cmd := '$task.vexe $task.voptions $program'
|
2020-08-25 18:14:06 +03:00
|
|
|
res := os.exec(cli_cmd) or {
|
2020-08-07 16:44:49 +03:00
|
|
|
panic(err)
|
|
|
|
}
|
2020-08-25 18:14:06 +03:00
|
|
|
mut expected := os.read_file(program.replace('.vv', '') + task.result_extension) or {
|
2020-08-07 16:44:49 +03:00
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
task.expected = clean_line_endings(expected)
|
|
|
|
task.found___ = clean_line_endings(res.output)
|
2020-09-20 20:51:14 +03:00
|
|
|
$if windows {
|
|
|
|
if task.is_module {
|
|
|
|
task.found___ = task.found___.replace_once('\\', '/')
|
|
|
|
}
|
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
if task.expected != task.found___ {
|
|
|
|
task.is_error = true
|
2020-05-14 08:20:38 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn clean_line_endings(s string) string {
|
2020-07-09 10:26:14 +03:00
|
|
|
mut res := s.trim_space()
|
|
|
|
res = res.replace(' \n', '\n')
|
|
|
|
res = res.replace(' \r\n', '\n')
|
|
|
|
res = res.replace('\r\n', '\n')
|
|
|
|
res = res.trim('\n')
|
|
|
|
return res
|
2020-05-14 08:20:38 +03:00
|
|
|
}
|
2020-07-26 10:47:55 +03:00
|
|
|
|
2020-10-15 15:54:44 +03:00
|
|
|
fn diff_content(s1 string, s2 string) {
|
2020-07-27 13:15:29 +03:00
|
|
|
diff_cmd := util.find_working_diff_command() or {
|
|
|
|
return
|
|
|
|
}
|
2020-07-26 10:47:55 +03:00
|
|
|
println('diff: ')
|
2020-07-27 13:15:29 +03:00
|
|
|
println(util.color_compare_strings(diff_cmd, s1, s2))
|
2020-07-26 10:47:55 +03:00
|
|
|
println('============\n')
|
2020-07-27 13:15:29 +03:00
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
|
2020-09-20 20:51:14 +03:00
|
|
|
fn get_tests_in_dir(dir string, is_module bool) []string {
|
2020-08-07 16:44:49 +03:00
|
|
|
files := os.ls(dir) or {
|
|
|
|
panic(err)
|
|
|
|
}
|
2020-09-20 20:51:14 +03:00
|
|
|
mut tests := files
|
|
|
|
if !is_module {
|
|
|
|
tests = files.filter(it.ends_with('.vv'))
|
|
|
|
} else {
|
|
|
|
tests = files.filter(!it.ends_with('.out'))
|
|
|
|
}
|
2020-08-07 16:44:49 +03:00
|
|
|
tests.sort()
|
|
|
|
return tests
|
|
|
|
}
|