2021-09-24 21:13:52 +03:00
|
|
|
import os
|
|
|
|
import toml
|
2021-11-17 18:25:50 +03:00
|
|
|
import toml.ast
|
|
|
|
import x.json2
|
2021-09-24 21:13:52 +03:00
|
|
|
|
2022-01-02 19:13:43 +03:00
|
|
|
const hide_oks = os.getenv('VTEST_HIDE_OK') == '1'
|
|
|
|
|
2021-09-24 21:13:52 +03:00
|
|
|
// Instructions for developers:
|
|
|
|
// The actual tests and data can be obtained by doing:
|
|
|
|
// `cd vlib/toml/tests/testdata`
|
|
|
|
// `git clone --depth 1 https://github.com/BurntSushi/toml-test.git burntsushi/toml-test`
|
|
|
|
// See also the CI toml tests
|
|
|
|
const (
|
2021-11-11 19:30:34 +03:00
|
|
|
// Kept for easier handling of future updates to the tests
|
2022-01-02 19:13:43 +03:00
|
|
|
valid_exceptions = [
|
|
|
|
'comment/everywhere.toml',
|
|
|
|
]
|
|
|
|
invalid_exceptions = [
|
|
|
|
'datetime/hour-over.toml',
|
|
|
|
'datetime/mday-under.toml',
|
|
|
|
'datetime/minute-over.toml',
|
|
|
|
'datetime/month-under.toml',
|
|
|
|
'datetime/second-over.toml',
|
|
|
|
]
|
2021-11-24 21:36:15 +03:00
|
|
|
valid_value_exceptions = []string{}
|
|
|
|
// BUG with string interpolation of '${i64(-9223372036854775808)}') see below for workaround
|
|
|
|
//'integer/long.toml', // TODO https://github.com/vlang/v/issues/9507
|
2021-11-17 09:30:40 +03:00
|
|
|
|
|
|
|
jq = os.find_abs_path_of_executable('jq') or { '' }
|
2022-11-03 10:24:52 +03:00
|
|
|
compare_work_dir_root = os.join_path(os.vtmp_dir(), 'v', 'toml', 'burntsushi')
|
2021-11-17 09:30:40 +03:00
|
|
|
// From: https://stackoverflow.com/a/38266731/1904615
|
|
|
|
jq_normalize = r'# Apply f to composite entities recursively using keys[], and to atoms
|
|
|
|
def sorted_walk(f):
|
|
|
|
. as $in
|
|
|
|
| if type == "object" then
|
|
|
|
reduce keys[] as $key
|
|
|
|
( {}; . + { ($key): ($in[$key] | sorted_walk(f)) } ) | f
|
|
|
|
elif type == "array" then map( sorted_walk(f) ) | f
|
|
|
|
else f
|
|
|
|
end;
|
|
|
|
|
|
|
|
def normalize: sorted_walk(if type == "array" then sort else . end);
|
|
|
|
|
|
|
|
normalize'
|
2021-09-24 21:13:52 +03:00
|
|
|
)
|
|
|
|
|
2021-11-17 09:30:40 +03:00
|
|
|
fn run(args []string) ?string {
|
|
|
|
res := os.execute(args.join(' '))
|
|
|
|
if res.exit_code != 0 {
|
2022-11-15 16:53:13 +03:00
|
|
|
return error('${args[0]} failed with return code ${res.exit_code}.\n${res.output}')
|
2021-11-17 09:30:40 +03:00
|
|
|
}
|
|
|
|
return res.output
|
|
|
|
}
|
|
|
|
|
2021-09-24 21:13:52 +03:00
|
|
|
// test_burnt_sushi_tomltest run though 'testdata/burntsushi/toml-test/*' if found.
|
2022-09-21 19:45:43 +03:00
|
|
|
fn test_burnt_sushi_tomltest() {
|
2021-09-24 21:13:52 +03:00
|
|
|
this_file := @FILE
|
|
|
|
test_root := os.join_path(os.dir(this_file), 'testdata', 'burntsushi', 'toml-test',
|
|
|
|
'tests')
|
2022-01-03 14:25:06 +03:00
|
|
|
if !os.is_dir(test_root) {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('No test data directory found in "${test_root}"')
|
2022-01-03 14:25:06 +03:00
|
|
|
assert true
|
|
|
|
return
|
|
|
|
}
|
|
|
|
valid_folder := os.join_path('toml-test', 'tests', 'valid')
|
|
|
|
invalid_folder := os.join_path('toml-test', 'tests', 'invalid')
|
|
|
|
valid_test_files := os.walk_ext(os.join_path(test_root, 'valid'), '.toml')
|
2022-11-15 16:53:13 +03:00
|
|
|
println('Testing ${valid_test_files.len} valid TOML files...')
|
2022-01-03 14:25:06 +03:00
|
|
|
mut valid := 0
|
|
|
|
mut e := 0
|
|
|
|
for i, valid_test_file in valid_test_files {
|
|
|
|
mut relative := valid_test_file.all_after(valid_folder).trim_left(os.path_separator)
|
|
|
|
$if windows {
|
|
|
|
relative = relative.replace('/', '\\')
|
|
|
|
}
|
|
|
|
if relative in valid_exceptions {
|
|
|
|
e++
|
|
|
|
idx := valid_exceptions.index(relative) + 1
|
2022-11-15 16:53:13 +03:00
|
|
|
println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
|
2022-01-03 14:25:06 +03:00
|
|
|
continue
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
2022-01-03 14:25:06 +03:00
|
|
|
if !hide_oks {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
2022-05-13 06:56:21 +03:00
|
|
|
toml_doc := toml.parse_file(valid_test_file)?
|
2022-01-03 14:25:06 +03:00
|
|
|
valid++
|
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
println('${valid}/${valid_test_files.len} TOML files were parsed correctly')
|
2022-01-03 14:25:06 +03:00
|
|
|
if valid_exceptions.len > 0 {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('TODO Skipped parsing of ${valid_exceptions.len} valid TOML files...')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
2021-09-24 21:13:52 +03:00
|
|
|
|
2022-01-03 14:25:06 +03:00
|
|
|
// If the command-line tool `jq` is installed, value tests can be run as well.
|
|
|
|
if jq != '' {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('Testing value output of ${valid_test_files.len} valid TOML files using "${jq}"...')
|
2021-11-17 09:30:40 +03:00
|
|
|
|
2022-01-03 14:25:06 +03:00
|
|
|
if os.exists(compare_work_dir_root) {
|
2022-10-16 09:28:57 +03:00
|
|
|
os.rmdir_all(compare_work_dir_root)!
|
2021-11-17 09:30:40 +03:00
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
os.mkdir_all(compare_work_dir_root)!
|
2021-11-17 09:30:40 +03:00
|
|
|
|
2022-01-03 14:25:06 +03:00
|
|
|
jq_normalize_path := os.join_path(compare_work_dir_root, 'normalize.jq')
|
2022-10-16 09:28:57 +03:00
|
|
|
os.write_file(jq_normalize_path, jq_normalize)!
|
2022-01-03 14:25:06 +03:00
|
|
|
|
|
|
|
valid = 0
|
2021-09-24 21:13:52 +03:00
|
|
|
e = 0
|
2022-01-03 14:25:06 +03:00
|
|
|
for i, valid_test_file in valid_test_files {
|
|
|
|
mut relative := valid_test_file.all_after(valid_folder).trim_left(os.path_separator)
|
2021-10-28 15:56:56 +03:00
|
|
|
$if windows {
|
|
|
|
relative = relative.replace('/', '\\')
|
|
|
|
}
|
2022-01-03 14:25:06 +03:00
|
|
|
// Skip the file if we know it can't be parsed or we know that the value retrieval needs work.
|
|
|
|
if relative in valid_exceptions {
|
2021-09-24 21:13:52 +03:00
|
|
|
e++
|
2022-01-03 14:25:06 +03:00
|
|
|
idx := valid_exceptions.index(relative) + 1
|
2022-11-15 16:53:13 +03:00
|
|
|
println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID EXCEPTION [${idx}/${valid_exceptions.len}]...')
|
2022-01-03 14:25:06 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if relative in valid_value_exceptions {
|
|
|
|
e++
|
|
|
|
idx := valid_value_exceptions.index(relative) + 1
|
2022-11-15 16:53:13 +03:00
|
|
|
println('SKIP [${i + 1}/${valid_test_files.len}] "${valid_test_file}" VALID VALUE EXCEPTION [${idx}/${valid_value_exceptions.len}]...')
|
2022-01-03 14:25:06 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if !hide_oks {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('OK [${i + 1}/${valid_test_files.len}] "${valid_test_file}"...')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
toml_doc := toml.parse_file(valid_test_file)!
|
2022-01-03 14:25:06 +03:00
|
|
|
|
|
|
|
v_toml_json_path := os.join_path(compare_work_dir_root,
|
|
|
|
os.file_name(valid_test_file).all_before_last('.') + '.v.json')
|
|
|
|
bs_toml_json_path := os.join_path(compare_work_dir_root,
|
|
|
|
os.file_name(valid_test_file).all_before_last('.') + '.json')
|
|
|
|
|
2022-10-16 09:28:57 +03:00
|
|
|
os.write_file(v_toml_json_path, to_burntsushi(toml_doc.ast.table))!
|
2022-01-03 14:25:06 +03:00
|
|
|
|
2022-10-16 09:28:57 +03:00
|
|
|
bs_json := os.read_file(valid_test_file.all_before_last('.') + '.json')!
|
2022-01-03 14:25:06 +03:00
|
|
|
|
2022-10-16 09:28:57 +03:00
|
|
|
os.write_file(bs_toml_json_path, bs_json)!
|
2022-01-03 14:25:06 +03:00
|
|
|
|
2022-11-15 16:53:13 +03:00
|
|
|
v_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', v_toml_json_path]) or {
|
2022-10-16 09:28:57 +03:00
|
|
|
contents := os.read_file(v_toml_json_path)!
|
2022-11-15 16:53:13 +03:00
|
|
|
panic(err.msg() + '\n${contents}')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
bs_normalized_json := run([jq, '-S', '-f "${jq_normalize_path}"', bs_toml_json_path]) or {
|
2022-10-16 09:28:57 +03:00
|
|
|
contents := os.read_file(v_toml_json_path)!
|
2022-11-15 16:53:13 +03:00
|
|
|
panic(err.msg() + '\n${contents}')
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
2022-01-03 14:25:06 +03:00
|
|
|
|
|
|
|
assert bs_normalized_json == v_normalized_json
|
|
|
|
|
|
|
|
valid++
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
println('${valid}/${valid_test_files.len} TOML files were parsed correctly and value checked')
|
2022-01-03 14:25:06 +03:00
|
|
|
if valid_value_exceptions.len > 0 {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('TODO Skipped value checks of ${valid_value_exceptions.len} valid TOML files...')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
invalid_test_files := os.walk_ext(os.join_path(test_root, 'invalid'), '.toml')
|
2022-11-15 16:53:13 +03:00
|
|
|
println('Testing ${invalid_test_files.len} invalid TOML files...')
|
2022-01-03 14:25:06 +03:00
|
|
|
mut invalid := 0
|
|
|
|
e = 0
|
|
|
|
for i, invalid_test_file in invalid_test_files {
|
|
|
|
mut relative := invalid_test_file.all_after(invalid_folder).trim_left(os.path_separator)
|
|
|
|
$if windows {
|
|
|
|
relative = relative.replace('/', '\\')
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
2022-01-03 14:25:06 +03:00
|
|
|
if relative in invalid_exceptions {
|
|
|
|
e++
|
|
|
|
idx := invalid_exceptions.index(relative) + 1
|
2022-11-15 16:53:13 +03:00
|
|
|
println('SKIP [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}" INVALID EXCEPTION [${idx}/${invalid_exceptions.len}]...')
|
2022-01-03 14:25:06 +03:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
if !hide_oks {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('OK [${i + 1}/${invalid_test_files.len}] "${invalid_test_file}"...')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
|
|
|
if toml_doc := toml.parse_file(invalid_test_file) {
|
2022-10-16 09:28:57 +03:00
|
|
|
content_that_should_have_failed := os.read_file(invalid_test_file)!
|
2022-11-15 16:53:13 +03:00
|
|
|
println(' This TOML should have failed:\n${'-'.repeat(40)}\n${content_that_should_have_failed}\n${'-'.repeat(40)}')
|
2022-01-03 14:25:06 +03:00
|
|
|
assert false
|
|
|
|
} else {
|
|
|
|
if !hide_oks {
|
2022-11-15 16:53:13 +03:00
|
|
|
println(' ${err.msg()}')
|
2022-01-03 14:25:06 +03:00
|
|
|
}
|
|
|
|
assert true
|
|
|
|
}
|
|
|
|
invalid++
|
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
println('${invalid}/${invalid_test_files.len} TOML files were parsed correctly')
|
2022-01-03 14:25:06 +03:00
|
|
|
if invalid_exceptions.len > 0 {
|
2022-11-15 16:53:13 +03:00
|
|
|
println('TODO Skipped parsing of ${invalid_exceptions.len} invalid TOML files...')
|
2021-09-24 21:13:52 +03:00
|
|
|
}
|
|
|
|
}
|
2021-11-17 18:25:50 +03:00
|
|
|
|
|
|
|
// to_burntsushi returns a BurntSushi compatible json string converted from the `value` ast.Value.
|
|
|
|
fn to_burntsushi(value ast.Value) string {
|
|
|
|
match value {
|
|
|
|
ast.Quoted {
|
2021-11-20 20:48:44 +03:00
|
|
|
json_text := json2.Any(value.text).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "string", "value": ${json_text} }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.DateTime {
|
|
|
|
// Normalization for json
|
|
|
|
json_text := json2.Any(value.text).json_str().to_upper().replace(' ', 'T')
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-03-06 20:01:22 +03:00
|
|
|
// Note: Since encoding strings in JSON now automatically includes quotes,
|
2022-03-04 14:39:23 +03:00
|
|
|
// I added a somewhat a workaround by adding an ending quote in order to
|
|
|
|
// recognize properly the date time type. - Ned
|
|
|
|
typ := if json_text.ends_with('Z"') || json_text.all_after('T').contains('-')
|
2021-11-17 18:25:50 +03:00
|
|
|
|| json_text.all_after('T').contains('+') {
|
|
|
|
'datetime'
|
|
|
|
} else {
|
|
|
|
'datetime-local'
|
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "${typ}", "value": ${json_text} }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.Date {
|
|
|
|
json_text := json2.Any(value.text).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "date-local", "value": ${json_text} }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.Time {
|
|
|
|
json_text := json2.Any(value.text).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "time-local", "value": ${json_text} }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.Bool {
|
|
|
|
json_text := json2.Any(value.text.bool()).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "bool", "value": "${json_text}" }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.Null {
|
|
|
|
json_text := json2.Any(value.text).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "null", "value": ${json_text} }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
ast.Number {
|
2021-11-18 20:46:19 +03:00
|
|
|
if value.text.contains('inf') || value.text.contains('nan') {
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "float", "value": "${value.text}" }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
2021-11-18 20:46:19 +03:00
|
|
|
if !value.text.starts_with('0x')
|
|
|
|
&& (value.text.contains('.') || value.text.to_lower().contains('e')) {
|
2022-11-15 16:53:13 +03:00
|
|
|
mut val := '${value.f64()}'.replace('.e+', '.0e') // JSON notation
|
2022-10-17 15:41:07 +03:00
|
|
|
if !val.contains('.') && val != '0' { // JSON notation
|
2021-11-18 20:46:19 +03:00
|
|
|
val += '.0'
|
|
|
|
}
|
2022-10-17 15:41:07 +03:00
|
|
|
// Since https://github.com/vlang/v/pull/16079 V's string conversion of a zero (0) will
|
|
|
|
// output "0.0" for float types - the JSON test suite data, however, expects "0" for floats
|
|
|
|
// The following is a correction for that inconsistency
|
|
|
|
if val == '0.0' {
|
|
|
|
val = '0'
|
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "float", "value": "${val}" }'
|
2021-11-18 20:46:19 +03:00
|
|
|
}
|
2021-11-24 21:36:15 +03:00
|
|
|
v := value.i64()
|
|
|
|
// TODO workaround https://github.com/vlang/v/issues/9507
|
|
|
|
if v == i64(-9223372036854775807 - 1) {
|
|
|
|
return '{ "type": "integer", "value": "-9223372036854775808" }'
|
|
|
|
}
|
2022-11-15 16:53:13 +03:00
|
|
|
return '{ "type": "integer", "value": "${v}" }'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
map[string]ast.Value {
|
|
|
|
mut str := '{ '
|
|
|
|
for key, val in value {
|
|
|
|
json_key := json2.Any(key).json_str()
|
2022-11-15 16:53:13 +03:00
|
|
|
str += ' ${json_key}: ${to_burntsushi(val)},'
|
2021-11-17 18:25:50 +03:00
|
|
|
}
|
|
|
|
str = str.trim_right(',')
|
|
|
|
str += ' }'
|
|
|
|
return str
|
|
|
|
}
|
|
|
|
[]ast.Value {
|
|
|
|
mut str := '[ '
|
|
|
|
for val in value {
|
|
|
|
str += ' ${to_burntsushi(val)},'
|
|
|
|
}
|
|
|
|
str = str.trim_right(',')
|
|
|
|
str += ' ]\n'
|
|
|
|
return str
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return '<error>'
|
|
|
|
}
|