mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
toml: fix a few string value tests (#12493)
This commit is contained in:
parent
2733319879
commit
2f5fae06ee
@ -1,6 +1,7 @@
|
||||
import os
|
||||
import toml
|
||||
import toml.ast
|
||||
import toml.scanner
|
||||
import x.json2
|
||||
import strconv
|
||||
|
||||
@ -17,12 +18,8 @@ const (
|
||||
|
||||
valid_value_exceptions = [
|
||||
// String
|
||||
'string/double-quote-escape.toml',
|
||||
'string/unicode-escape.toml',
|
||||
'string/raw-multiline.toml',
|
||||
'string/raw-multiline.toml', // This test is not correct. Our parser *correctly* includes the newline at the start of the raw multiline.
|
||||
'string/escapes.toml',
|
||||
'string/escaped-escape.toml',
|
||||
'string/nl.toml',
|
||||
'string/escape-tricky.toml',
|
||||
'string/multiline.toml',
|
||||
// Integer
|
||||
@ -153,10 +150,12 @@ fn test_burnt_sushi_tomltest() {
|
||||
os.write_file(bs_toml_json_path, bs_json) or { panic(err) }
|
||||
|
||||
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
|
||||
panic(err)
|
||||
contents := os.read_file(v_toml_json_path) or { panic(err) }
|
||||
panic(err.msg + '\n$contents')
|
||||
}
|
||||
bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or {
|
||||
panic(err)
|
||||
contents := os.read_file(v_toml_json_path) or { panic(err) }
|
||||
panic(err.msg + '\n$contents')
|
||||
}
|
||||
|
||||
assert bs_normalized_json == v_normalized_json
|
||||
@ -216,7 +215,13 @@ fn test_burnt_sushi_tomltest() {
|
||||
fn to_burntsushi(value ast.Value) string {
|
||||
match value {
|
||||
ast.Quoted {
|
||||
json_text := json2.Any(value.text).json_str()
|
||||
mut json_text := ''
|
||||
if value.quote == `"` {
|
||||
json_text = toml_to_json_escapes(value) or { '<error>' }
|
||||
} else {
|
||||
json_text = json2.Any(value.text).json_str()
|
||||
}
|
||||
|
||||
return '{ "type": "string", "value": "$json_text" }'
|
||||
}
|
||||
ast.DateTime {
|
||||
@ -276,3 +281,49 @@ fn to_burntsushi(value ast.Value) string {
|
||||
}
|
||||
return '<error>'
|
||||
}
|
||||
|
||||
// toml_to_json_escapes is a utility function for normalizing
|
||||
// TOML basic string to JSON string
|
||||
fn toml_to_json_escapes(q ast.Quoted) ?string {
|
||||
mut s := scanner.new_simple(q.text) ?
|
||||
mut r := ''
|
||||
for {
|
||||
ch := s.next()
|
||||
if ch == scanner.end_of_text {
|
||||
break
|
||||
}
|
||||
ch_byte := byte(ch)
|
||||
|
||||
if ch == `"` {
|
||||
if byte(s.peek(-1)) != `\\` {
|
||||
r += '\\'
|
||||
}
|
||||
}
|
||||
|
||||
if ch == `\\` {
|
||||
next_ch := byte(s.at())
|
||||
|
||||
escape := ch_byte.ascii_str() + next_ch.ascii_str()
|
||||
if escape.to_lower() == '\\u' {
|
||||
mut b := s.next()
|
||||
mut unicode_point := ''
|
||||
for {
|
||||
b = s.next()
|
||||
if b != ` ` && b != scanner.end_of_text {
|
||||
unicode_point += byte(b).ascii_str()
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
if unicode_point.len < 8 {
|
||||
unicode_point = '0'.repeat(8 - unicode_point.len) + unicode_point
|
||||
}
|
||||
rn := rune(strconv.parse_int(unicode_point, 16, 0) ?)
|
||||
r += '$rn'
|
||||
continue
|
||||
}
|
||||
}
|
||||
r += ch_byte.ascii_str()
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user