1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

vmod: add v.mod parser

This commit is contained in:
Ned Palacios 2020-05-18 20:35:28 +08:00 committed by GitHub
parent 53ffee1e02
commit ae3df002a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 317 additions and 5 deletions

View File

@ -22,7 +22,6 @@ fn cerror(e string){
fn vmod_content(name, desc string) string {
return [
'#V Project#\n',
'Module {',
' name: \'${name}\',',
' description: \'${desc}\',',

2
v.mod
View File

@ -1,5 +1,3 @@
#V Project#
Module {
name: 'V',
description: 'The V programming language.',

View File

@ -1,5 +1,3 @@
#V Module#
Module {
name: 'mod1',
description: 'A simple module, containing C code.',

View File

@ -0,0 +1,42 @@
import vmod
fn test_from_file() {
data := vmod.from_file('./v.mod') or {
panic(err)
}
assert data.name == 'V'
assert data.description == 'The V programming language.'
assert data.version == '0.1.27'
assert data.dependencies.len == 0
}
fn test_decode() {
content := "
Module {
name: \'foobar\',
description: \'Just a sample module\'
version: \'0.2.0\',
repo_url: \'https://gitlab.com\',
author: \'Fooz Bar\',
license: \'GPL-2.0\',
dependencies: [\'hello\'],
test: \'foo\'
}
"
data := vmod.decode(content) or {
println(err)
exit(1)
}
assert data.name == 'foobar'
assert data.version == '0.2.0'
assert data.description == 'Just a sample module'
assert data.repo_url == 'https://gitlab.com'
assert data.author == 'Fooz Bar'
assert data.license == 'GPL-2.0'
assert data.dependencies[0] == 'hello'
assert data.unknown['test'][0] == 'foo'
_ := vmod.decode('') or {
assert err == 'vmod: no content.'
exit(0)
}
}

275
vlib/v/vmod/parser.v Normal file
View File

@ -0,0 +1,275 @@
module vmod
import os
enum TokenKind {
module_keyword
field_key
lcbr
rcbr
labr
rabr
comma
colon
eof
str
ident
unknown
}
pub struct Manifest {
pub mut:
name string
version string
description string
dependencies []string
license string
repo_url string
author string
unknown map[string][]string
}
struct Scanner {
mut:
pos int
text string
inside_text bool
tokens []Token
}
struct Parser {
mut:
file_path string
scanner Scanner
}
struct Token {
typ TokenKind
val string
}
pub fn from_file(vmod_path string) ?Manifest {
if !os.exists(vmod_path) {
return error('v.mod: v.mod file not found.')
}
contents := os.read_file(vmod_path) or {
panic('v.mod: cannot parse v.mod')
}
return decode(contents)
}
pub fn decode(contents string) ?Manifest {
mut parser := Parser{
scanner: Scanner{
pos: 0
text: contents
}
}
return parser.parse()
}
fn (mut s Scanner) tokenize(t_type TokenKind, val string) {
s.tokens << Token{t_type, val}
}
fn (mut s Scanner) skip_whitespace() {
for s.pos < s.text.len && s.text[s.pos].is_space() {
s.pos++
}
}
fn is_name_alpha(chr byte) bool {
return chr.is_letter() || chr == `_`
}
fn (mut s Scanner) create_string(q byte) string {
mut str := ''
for s.text[s.pos] != q {
if s.text[s.pos] == `\\` && s.text[s.pos + 1] == q {
str += s.text[s.pos..s.pos + 1]
s.pos += 2
} else {
str += s.text[s.pos].str()
s.pos++
}
}
return str
}
fn (mut s Scanner) create_ident() string {
mut text := ''
for is_name_alpha(s.text[s.pos]) {
text += s.text[s.pos].str()
s.pos++
}
return text
}
fn (s Scanner) peek_char(c byte) bool {
return s.pos - 1 < s.text.len && s.text[s.pos - 1] == c
}
fn (mut s Scanner) scan_all() {
for s.pos < s.text.len {
c := s.text[s.pos]
if c.is_space() || c == `\\` {
s.pos++
continue
}
if is_name_alpha(c) {
name := s.create_ident()
if name == 'Module' {
s.tokenize(.module_keyword, name)
s.pos++
continue
} else if s.text[s.pos] == `:` {
s.tokenize(.field_key, name + ':')
s.pos += 2
continue
} else {
s.tokenize(.ident, name)
s.pos++
continue
}
}
if c in [`\'`, `\"`] && !s.peek_char(`\\`) {
s.pos++
str := s.create_string(c)
s.tokenize(.str, str)
s.pos++
continue
}
match c {
`{` { s.tokenize(.lcbr, c.str()) }
`}` { s.tokenize(.rcbr, c.str()) }
`[` { s.tokenize(.labr, c.str()) }
`]` { s.tokenize(.rabr, c.str()) }
`:` { s.tokenize(.colon, c.str()) }
`,` { s.tokenize(.comma, c.str()) }
else { s.tokenize(.unknown, c.str()) }
}
s.pos++
}
s.tokenize(.eof, 'eof')
}
fn get_array_content(tokens []Token, st_idx int) ?([]string, int) {
mut vals := []string{}
mut idx := st_idx
if tokens[idx].typ != .labr {
return error('vmod: not a valid array')
}
idx++
for {
tok := tokens[idx]
match tok.typ {
.str {
vals << tok.val
if tokens[idx + 1].typ !in [.comma, .rabr] {
return error('vmod: invalid separator "${tokens[idx+1].val}"')
}
idx += if tokens[idx + 1].typ == .comma {
2
} else {
1
}
}
.rabr {
idx++
break
}
else {
return error('vmod: invalid token "$tok.val"')
}
}
}
return vals, idx
}
fn (mut p Parser) parse() ?Manifest {
err_label := 'vmod:'
if p.scanner.text.len == 0 {
return error('$err_label no content.')
}
p.scanner.scan_all()
tokens := p.scanner.tokens
mut mn := Manifest{}
if tokens[0].typ != .module_keyword {
panic('not a valid v.mod')
}
mut i := 1
for i < tokens.len {
tok := tokens[i]
match tok.typ {
.lcbr {
if tokens[i + 1].typ !in [.field_key, .rcbr] {
return error('$err_label invalid content after opening brace')
}
i++
continue
}
.rcbr {
break
}
.field_key {
field_name := tok.val.trim_right(':')
if tokens[i + 1].typ !in [.str, .labr] {
return error('$err_label value of field "$field_name" must be either string or an array of strings')
}
field_value := tokens[i + 1].val
match field_name {
'name' {
mn.name = field_value
}
'version' {
mn.version = field_value
}
'license' {
mn.license = field_value
}
'repo_url' {
mn.repo_url = field_value
}
'description' {
mn.description = field_value
}
'author' {
mn.author = field_value
}
'dependencies' {
deps, idx := get_array_content(tokens, i + 1) or {
return error(err)
}
mn.dependencies = deps
i = idx
continue
}
else {
if tokens[i + 1].typ == .labr {
vals, idx := get_array_content(tokens, i + 1) or {
return error(err)
}
mn.unknown[field_name] = vals
i = idx
continue
}
mn.unknown[field_name] = [field_value]
}
}
i += 2
continue
}
.comma {
if tokens[i - 1].typ !in [.str, .rabr] || tokens[i + 1].typ != .field_key {
return error('$err_label invalid comma placement')
}
i++
continue
}
else {
return error('$err_label invalid token "$tok.val"')
}
}
}
return mn
}