mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
toml: fix parsing of alphanumeric keys (#12517)
This commit is contained in:
parent
eec8788333
commit
a894a6cf36
@ -10,8 +10,10 @@ import toml.token
|
||||
import toml.scanner
|
||||
|
||||
pub const (
|
||||
all_formatting = [token.Kind.whitespace, .tab, .cr, .nl]
|
||||
space_formatting = [token.Kind.whitespace, .tab]
|
||||
all_formatting = [token.Kind.whitespace, .tab, .cr, .nl]
|
||||
space_formatting = [token.Kind.whitespace, .tab]
|
||||
keys_and_space_formatting = [token.Kind.whitespace, .tab, .minus, .bare, .quoted, .boolean,
|
||||
.number, .underscore]
|
||||
)
|
||||
|
||||
type DottedKey = []string
|
||||
@ -117,11 +119,13 @@ fn (mut p Parser) next() ? {
|
||||
p.prev_tok = p.tok
|
||||
p.tok = p.peek_tok
|
||||
if p.tokens.len > 0 {
|
||||
p.peek_tok = p.tokens.pop()
|
||||
p.peek_tok = p.tokens.first()
|
||||
p.tokens.delete(0)
|
||||
p.peek(1) ?
|
||||
} else {
|
||||
p.peek(1) ?
|
||||
p.peek_tok = p.tokens.pop()
|
||||
p.peek_tok = p.tokens.first()
|
||||
p.tokens.delete(0)
|
||||
}
|
||||
}
|
||||
|
||||
@ -402,10 +406,8 @@ pub fn (mut p Parser) root_table() ? {
|
||||
continue
|
||||
}
|
||||
.bare, .quoted, .boolean, .number, .underscore { // NOTE .boolean allows for use of "true" and "false" as table keys
|
||||
mut peek_tok := p.peek_tok
|
||||
|
||||
// Peek forward as far as we can skipping over space formatting tokens.
|
||||
peek_tok, _ = p.peek_over(1, parser.space_formatting) ?
|
||||
peek_tok, _ := p.peek_over(1, parser.keys_and_space_formatting) ?
|
||||
|
||||
if peek_tok.kind == .period {
|
||||
p.ignore_while(parser.space_formatting)
|
||||
@ -482,7 +484,7 @@ pub fn (mut p Parser) root_table() ? {
|
||||
p.ignore_while(parser.space_formatting)
|
||||
|
||||
// Peek forward as far as we can skipping over space formatting tokens.
|
||||
peek_tok, _ = p.peek_over(1, parser.space_formatting) ?
|
||||
peek_tok, _ = p.peek_over(1, parser.keys_and_space_formatting) ?
|
||||
|
||||
if p.tok.kind == .lsbr {
|
||||
// Parse `[[table]]`
|
||||
@ -690,7 +692,7 @@ pub fn (mut p Parser) array_of_tables_contents() ?[]ast.Value {
|
||||
p.ignore_while(parser.all_formatting)
|
||||
|
||||
match p.tok.kind {
|
||||
.bare, .quoted, .boolean, .number {
|
||||
.bare, .quoted, .boolean, .number, .underscore {
|
||||
if p.peek_tok.kind == .period {
|
||||
dotted_key := p.dotted_key() ?
|
||||
p.ignore_while(parser.space_formatting)
|
||||
@ -829,7 +831,7 @@ pub fn (mut p Parser) double_array_of_tables_contents(target_key DottedKey) ?[]a
|
||||
}
|
||||
|
||||
match p.tok.kind {
|
||||
.bare, .quoted, .boolean, .number {
|
||||
.bare, .quoted, .boolean, .number, .underscore {
|
||||
if p.peek_tok.kind == .period {
|
||||
mut dotted_key := p.dotted_key() ?
|
||||
p.ignore_while(parser.space_formatting)
|
||||
@ -1001,9 +1003,11 @@ pub fn (mut p Parser) key() ?ast.Key {
|
||||
if p.peek_tok.kind == .minus {
|
||||
mut lits := p.tok.lit
|
||||
pos := p.tok.position()
|
||||
for p.peek_tok.kind != .assign {
|
||||
for p.peek_tok.kind != .assign && p.peek_tok.kind != .period && p.peek_tok.kind != .rsbr {
|
||||
p.next() ?
|
||||
lits += p.tok.lit
|
||||
if p.tok.kind !in parser.space_formatting {
|
||||
lits += p.tok.lit
|
||||
}
|
||||
}
|
||||
return ast.Key(ast.Bare{
|
||||
text: lits
|
||||
@ -1036,7 +1040,7 @@ pub fn (mut p Parser) key() ?ast.Key {
|
||||
|
||||
if key is ast.Null {
|
||||
return error(@MOD + '.' + @STRUCT + '.' + @FN +
|
||||
' key expected .bare, .number, .quoted or .boolean but got "$p.tok.kind"')
|
||||
' key expected .bare, .underscore, .number, .quoted or .boolean but got "$p.tok.kind"')
|
||||
}
|
||||
|
||||
// A small exception that can't easily be done via `checker`
|
||||
|
@ -25,6 +25,8 @@ mut:
|
||||
line_nr int = 1 // current line number (y coordinate)
|
||||
pos int // current flat/index position in the `text` field
|
||||
header_len int // Length, how many bytes of header was found
|
||||
// Quirks
|
||||
is_left_of_assign bool = true // indicates if the scanner is on the *left* side of an assignment
|
||||
}
|
||||
|
||||
// State is a read-only copy of the scanner's internal state.
|
||||
@ -165,6 +167,7 @@ pub fn (mut s Scanner) scan() ?token.Token {
|
||||
return s.new_token(.plus, ascii, ascii.len)
|
||||
}
|
||||
`=` {
|
||||
s.is_left_of_assign = false
|
||||
util.printdbg(@MOD + '.' + @STRUCT + '.' + @FN, 'identified assignment "$ascii" ($ascii.len)')
|
||||
return s.new_token(.assign, ascii, ascii.len)
|
||||
}
|
||||
@ -345,6 +348,7 @@ fn (mut s Scanner) ignore_line() ?string {
|
||||
fn (mut s Scanner) inc_line_number() {
|
||||
s.col = 0
|
||||
s.line_nr++
|
||||
s.is_left_of_assign = true
|
||||
}
|
||||
|
||||
// extract_key parses and returns a TOML key as a string.
|
||||
@ -568,7 +572,8 @@ fn (mut s Scanner) extract_number() ?string {
|
||||
s.col += 2
|
||||
}
|
||||
c = s.at()
|
||||
if !(byte(c).is_hex_digit() || c in scanner.digit_extras) {
|
||||
if !(byte(c).is_hex_digit() || c in scanner.digit_extras)
|
||||
|| (c == `.` && s.is_left_of_assign) {
|
||||
break
|
||||
}
|
||||
s.pos++
|
||||
|
@ -35,8 +35,6 @@ const (
|
||||
// Inline-table
|
||||
'inline-table/multiline.toml',
|
||||
// Key
|
||||
'key/numeric-dotted.toml',
|
||||
'key/alphanum.toml',
|
||||
'key/escapes.toml',
|
||||
]
|
||||
|
||||
|
27
vlib/toml/tests/key_test.v
Normal file
27
vlib/toml/tests/key_test.v
Normal file
@ -0,0 +1,27 @@
|
||||
import os
|
||||
import toml
|
||||
import toml.to
|
||||
|
||||
fn test_keys() {
|
||||
toml_file :=
|
||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||
'.toml'
|
||||
toml_doc := toml.parse(toml_file) or { panic(err) }
|
||||
|
||||
mut value := toml_doc.value('34-11')
|
||||
assert value.int() == 23
|
||||
|
||||
value = toml_doc.value('1.2')
|
||||
assert value.int() == 3
|
||||
|
||||
value = toml_doc.value('34-12.2')
|
||||
assert value.int() == 42
|
||||
|
||||
toml_json := to.json(toml_doc)
|
||||
out_file :=
|
||||
os.real_path(os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))) +
|
||||
'.out'
|
||||
out_file_json := os.read_file(out_file) or { panic(err) }
|
||||
println(toml_json)
|
||||
assert toml_json == out_file_json
|
||||
}
|
1
vlib/toml/tests/testdata/key_test.out
vendored
Normal file
1
vlib/toml/tests/testdata/key_test.out
vendored
Normal file
@ -0,0 +1 @@
|
||||
{ "34-11": 23, "1": { "2": 3 }, "34-12": { "2": 42 }, "34-20": { }, "5": { "6": { } }, "34-13": { "2": { } } }
|
9
vlib/toml/tests/testdata/key_test.toml
vendored
Normal file
9
vlib/toml/tests/testdata/key_test.toml
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
34-11 = 23 # came out as "34-11 " = 23
|
||||
1.2 = 3 # came out as "1.2" = 3
|
||||
34-12.2 = 42 # came out as "34-12.2" = 42
|
||||
|
||||
[34-20]
|
||||
|
||||
[5.6]
|
||||
|
||||
[34-13.2]
|
Loading…
Reference in New Issue
Block a user