2019-11-09 19:35:26 +03:00
|
|
|
// Build and run files in ./prod/ folder, comparing their output to *.expected.txt files.
|
|
|
|
// (Similar to REPL tests, but in -prod mode.)
|
|
|
|
// import os
|
|
|
|
import compiler.tests.repl.runner
|
|
|
|
import benchmark
|
|
|
|
|
|
|
|
fn test_all_v_prod_files() {
|
|
|
|
// TODO: Fix running this test on Windows:
|
|
|
|
$if !windows {
|
|
|
|
options := runner.new_prod_options()
|
|
|
|
mut bmark := benchmark.new_benchmark()
|
|
|
|
for file in options.files {
|
|
|
|
// println('file:$file')
|
|
|
|
bmark.step()
|
|
|
|
fres := runner.run_prod_file(options.wd, options.vexec, file) or {
|
|
|
|
bmark.fail()
|
2019-12-30 07:23:54 +03:00
|
|
|
eprintln(bmark.step_message_fail(err))
|
2019-11-09 19:35:26 +03:00
|
|
|
assert false
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
bmark.ok()
|
2019-12-30 07:23:54 +03:00
|
|
|
println(bmark.step_message_ok(fres))
|
2019-11-09 19:35:26 +03:00
|
|
|
assert true
|
|
|
|
}
|
|
|
|
bmark.stop()
|
2019-12-30 07:23:54 +03:00
|
|
|
println(bmark.total_message('total time spent running PROD files'))
|
2019-11-09 19:35:26 +03:00
|
|
|
}
|
2019-11-10 03:08:53 +03:00
|
|
|
}
|