mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
toml: add support for validation of structure and values (#12481)
This commit is contained in:
parent
5a89c0a480
commit
bd9ac598f7
4
.github/workflows/toml_ci.yml
vendored
4
.github/workflows/toml_ci.yml
vendored
@ -18,6 +18,10 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install --quiet -y jq
|
||||||
- name: Build V
|
- name: Build V
|
||||||
run: make -j2 && ./v -cc gcc -o v cmd/v
|
run: make -j2 && ./v -cc gcc -o v cmd/v
|
||||||
|
|
||||||
|
@ -9,10 +9,77 @@ import toml
|
|||||||
// TODO Goal: make value retrieval of all of https://github.com/BurntSushi/toml-test/test/ pass
|
// TODO Goal: make value retrieval of all of https://github.com/BurntSushi/toml-test/test/ pass
|
||||||
const (
|
const (
|
||||||
// Kept for easier handling of future updates to the tests
|
// Kept for easier handling of future updates to the tests
|
||||||
valid_exceptions = []string{}
|
valid_exceptions = []string{}
|
||||||
invalid_exceptions = []string{}
|
invalid_exceptions = []string{}
|
||||||
|
|
||||||
|
valid_value_exceptions = [
|
||||||
|
// String
|
||||||
|
'string/double-quote-escape.toml',
|
||||||
|
'string/unicode-escape.toml',
|
||||||
|
'string/raw-multiline.toml',
|
||||||
|
'string/escapes.toml',
|
||||||
|
'string/escaped-escape.toml',
|
||||||
|
'string/nl.toml',
|
||||||
|
'string/escape-tricky.toml',
|
||||||
|
'string/multiline.toml',
|
||||||
|
// Integer
|
||||||
|
'integer/literals.toml',
|
||||||
|
'integer/long.toml',
|
||||||
|
// Float
|
||||||
|
'float/exponent.toml',
|
||||||
|
'float/underscore.toml',
|
||||||
|
'float/inf-and-nan.toml',
|
||||||
|
// Comment
|
||||||
|
'comment/tricky.toml',
|
||||||
|
// Table
|
||||||
|
'table/empty.toml',
|
||||||
|
'table/array-implicit.toml',
|
||||||
|
'table/sub-empty.toml',
|
||||||
|
'table/without-super.toml',
|
||||||
|
'table/whitespace.toml',
|
||||||
|
'table/names.toml',
|
||||||
|
'table/no-eol.toml',
|
||||||
|
'table/keyword.toml',
|
||||||
|
// Array
|
||||||
|
'array/string-quote-comma.toml',
|
||||||
|
'array/string-quote-comma-2.toml',
|
||||||
|
'array/table-array-string-backslash.toml',
|
||||||
|
// Date-time
|
||||||
|
'datetime/milliseconds.toml',
|
||||||
|
// Inline-table
|
||||||
|
'inline-table/multiline.toml',
|
||||||
|
// Key
|
||||||
|
'key/numeric-dotted.toml',
|
||||||
|
'key/alphanum.toml',
|
||||||
|
'key/escapes.toml',
|
||||||
|
]
|
||||||
|
|
||||||
|
jq = os.find_abs_path_of_executable('jq') or { '' }
|
||||||
|
compare_work_dir_root = os.join_path(os.temp_dir(), 'v', 'toml', 'burntsushi')
|
||||||
|
// From: https://stackoverflow.com/a/38266731/1904615
|
||||||
|
jq_normalize = r'# Apply f to composite entities recursively using keys[], and to atoms
|
||||||
|
def sorted_walk(f):
|
||||||
|
. as $in
|
||||||
|
| if type == "object" then
|
||||||
|
reduce keys[] as $key
|
||||||
|
( {}; . + { ($key): ($in[$key] | sorted_walk(f)) } ) | f
|
||||||
|
elif type == "array" then map( sorted_walk(f) ) | f
|
||||||
|
else f
|
||||||
|
end;
|
||||||
|
|
||||||
|
def normalize: sorted_walk(if type == "array" then sort else . end);
|
||||||
|
|
||||||
|
normalize'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
fn run(args []string) ?string {
|
||||||
|
res := os.execute(args.join(' '))
|
||||||
|
if res.exit_code != 0 {
|
||||||
|
return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
|
||||||
|
}
|
||||||
|
return res.output
|
||||||
|
}
|
||||||
|
|
||||||
// test_burnt_sushi_tomltest run though 'testdata/burntsushi/toml-test/*' if found.
|
// test_burnt_sushi_tomltest run though 'testdata/burntsushi/toml-test/*' if found.
|
||||||
fn test_burnt_sushi_tomltest() {
|
fn test_burnt_sushi_tomltest() {
|
||||||
this_file := @FILE
|
this_file := @FILE
|
||||||
@ -32,12 +99,6 @@ fn test_burnt_sushi_tomltest() {
|
|||||||
if relative !in valid_exceptions {
|
if relative !in valid_exceptions {
|
||||||
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||||
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
||||||
|
|
||||||
// parsed_json := toml_doc.to_json().replace(' ','')
|
|
||||||
// mut test_suite_json := os.read_file(valid_test_file.all_before_last('.')+'.json') or { panic(err) }
|
|
||||||
// test_suite_json = test_suite_json.replace('\n ','').replace(' ','')
|
|
||||||
// println(test_suite_json.replace('\n ','').replace(' ',''))
|
|
||||||
// assert parsed_json == test_suite_json
|
|
||||||
valid++
|
valid++
|
||||||
} else {
|
} else {
|
||||||
e++
|
e++
|
||||||
@ -49,6 +110,64 @@ fn test_burnt_sushi_tomltest() {
|
|||||||
println('TODO Skipped parsing of $valid_exceptions.len valid TOML files...')
|
println('TODO Skipped parsing of $valid_exceptions.len valid TOML files...')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If the command-line tool `jq` is installed, value tests can be run as well.
|
||||||
|
if jq != '' {
|
||||||
|
println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
|
||||||
|
|
||||||
|
if os.exists(compare_work_dir_root) {
|
||||||
|
os.rmdir_all(compare_work_dir_root) or { panic(err) }
|
||||||
|
}
|
||||||
|
os.mkdir_all(compare_work_dir_root) or { panic(err) }
|
||||||
|
|
||||||
|
jq_normalize_path := os.join_path(compare_work_dir_root, 'normalize.jq')
|
||||||
|
os.write_file(jq_normalize_path, jq_normalize) or { panic(err) }
|
||||||
|
|
||||||
|
valid = 0
|
||||||
|
e = 0
|
||||||
|
for i, valid_test_file in valid_test_files {
|
||||||
|
mut relative := valid_test_file.all_after(os.join_path('toml-test', 'tests',
|
||||||
|
'valid')).trim_left(os.path_separator)
|
||||||
|
$if windows {
|
||||||
|
relative = relative.replace('/', '\\')
|
||||||
|
}
|
||||||
|
// Skip the file if we know it can't be parsed or we know that the value retrieval needs work.
|
||||||
|
if relative !in valid_exceptions && relative !in valid_value_exceptions {
|
||||||
|
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
|
||||||
|
toml_doc := toml.parse_file(valid_test_file) or { panic(err) }
|
||||||
|
|
||||||
|
v_toml_json_path := os.join_path(compare_work_dir_root,
|
||||||
|
os.file_name(valid_test_file).all_before_last('.') + '.v.json')
|
||||||
|
bs_toml_json_path := os.join_path(compare_work_dir_root,
|
||||||
|
os.file_name(valid_test_file).all_before_last('.') + '.json')
|
||||||
|
|
||||||
|
os.write_file(v_toml_json_path, toml_doc.to_burntsushi()) or { panic(err) }
|
||||||
|
|
||||||
|
bs_json := os.read_file(valid_test_file.all_before_last('.') + '.json') or {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
os.write_file(bs_toml_json_path, bs_json) or { panic(err) }
|
||||||
|
|
||||||
|
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
assert bs_normalized_json == v_normalized_json
|
||||||
|
|
||||||
|
valid++
|
||||||
|
} else {
|
||||||
|
e++
|
||||||
|
println('SKIP [${i + 1}/$valid_test_files.len] "$valid_test_file" EXCEPTION [$e/$valid_value_exceptions.len]...')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
println('$valid/$valid_test_files.len TOML files was parsed correctly and value checked')
|
||||||
|
if valid_value_exceptions.len > 0 {
|
||||||
|
println('TODO Skipped value checks of $valid_value_exceptions.len valid TOML files...')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// TODO test cases where the parser should fail
|
// TODO test cases where the parser should fail
|
||||||
invalid_test_files := os.walk_ext(os.join_path(test_root, 'invalid'), '.toml')
|
invalid_test_files := os.walk_ext(os.join_path(test_root, 'invalid'), '.toml')
|
||||||
println('Testing $invalid_test_files.len invalid TOML files...')
|
println('Testing $invalid_test_files.len invalid TOML files...')
|
||||||
|
@ -9,6 +9,8 @@ import toml.input
|
|||||||
import toml.scanner
|
import toml.scanner
|
||||||
import toml.parser
|
import toml.parser
|
||||||
import time
|
import time
|
||||||
|
import x.json2
|
||||||
|
import strconv
|
||||||
|
|
||||||
// Null is used in sumtype checks as a "default" value when nothing else is possible.
|
// Null is used in sumtype checks as a "default" value when nothing else is possible.
|
||||||
pub struct Null {
|
pub struct Null {
|
||||||
@ -165,11 +167,11 @@ fn (d Doc) ast_to_any(value ast.Value) Any {
|
|||||||
return Any((value as ast.Quoted).text)
|
return Any((value as ast.Quoted).text)
|
||||||
}
|
}
|
||||||
ast.Number {
|
ast.Number {
|
||||||
str := (value as ast.Number).text
|
if value.text.contains('.') || value.text.to_lower().contains('e') {
|
||||||
if str.contains('.') {
|
return Any(value.text.f64())
|
||||||
return Any(str.f64())
|
|
||||||
}
|
}
|
||||||
return Any(str.i64())
|
v := strconv.parse_int(value.text, 0, 0) or { i64(0) }
|
||||||
|
return Any(v)
|
||||||
}
|
}
|
||||||
ast.Bool {
|
ast.Bool {
|
||||||
str := (value as ast.Bool).text
|
str := (value as ast.Bool).text
|
||||||
@ -205,3 +207,75 @@ fn (d Doc) ast_to_any(value ast.Value) Any {
|
|||||||
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' can\'t convert "$value"')
|
// panic(@MOD + '.' + @STRUCT + '.' + @FN + ' can\'t convert "$value"')
|
||||||
// return Any('')
|
// return Any('')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// to_burntsushi returns a BurntSushi compatible json string of the complete document.
|
||||||
|
pub fn (d Doc) to_burntsushi() string {
|
||||||
|
return d.to_burntsushi_(d.ast.table)
|
||||||
|
}
|
||||||
|
|
||||||
|
// to_burntsushi returns a BurntSushi compatible json string of the complete document.
|
||||||
|
fn (d Doc) to_burntsushi_(value ast.Value) string {
|
||||||
|
match value {
|
||||||
|
ast.Quoted {
|
||||||
|
// txt := .replace(r'\\','\\').replace(r'\"','"')
|
||||||
|
json_text := json2.Any(value.text).json_str()
|
||||||
|
return '{ "type": "string", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.DateTime {
|
||||||
|
// Normalization for json
|
||||||
|
json_text := json2.Any(value.text).json_str().to_upper().replace(' ', 'T')
|
||||||
|
typ := if json_text.ends_with('Z') || json_text.all_after('T').contains('-')
|
||||||
|
|| json_text.all_after('T').contains('+') {
|
||||||
|
'datetime'
|
||||||
|
} else {
|
||||||
|
'datetime-local'
|
||||||
|
}
|
||||||
|
return '{ "type": "$typ", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.Date {
|
||||||
|
json_text := json2.Any(value.text).json_str()
|
||||||
|
return '{ "type": "date-local", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.Time {
|
||||||
|
json_text := json2.Any(value.text).json_str()
|
||||||
|
return '{ "type": "time-local", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.Bool {
|
||||||
|
json_text := json2.Any(value.text.bool()).json_str()
|
||||||
|
return '{ "type": "bool", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.Null {
|
||||||
|
json_text := json2.Any(value.text).json_str()
|
||||||
|
return '{ "type": "null", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
ast.Number {
|
||||||
|
if value.text.contains('.') || value.text.to_lower().contains('e') {
|
||||||
|
json_text := value.text.f64()
|
||||||
|
return '{ "type": "float", "value": "$json_text" }'
|
||||||
|
}
|
||||||
|
i64_ := strconv.parse_int(value.text, 0, 0) or { i64(0) }
|
||||||
|
return '{ "type": "integer", "value": "$i64_" }'
|
||||||
|
}
|
||||||
|
map[string]ast.Value {
|
||||||
|
mut str := '{ '
|
||||||
|
for key, val in value {
|
||||||
|
json_key := json2.Any(key).json_str()
|
||||||
|
str += ' "$json_key": ${d.to_burntsushi_(val)},'
|
||||||
|
// str += d.to_burntsushi_(val, indent+1)
|
||||||
|
}
|
||||||
|
str = str.trim_right(',')
|
||||||
|
str += ' }'
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
[]ast.Value {
|
||||||
|
mut str := '[ '
|
||||||
|
for val in value {
|
||||||
|
str += ' ${d.to_burntsushi_(val)},'
|
||||||
|
}
|
||||||
|
str = str.trim_right(',')
|
||||||
|
str += ' ]\n'
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return '<error>'
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user