1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

all: change optional to result of io (#16075)

This commit is contained in:
yuyi
2022-10-16 14:28:57 +08:00
committed by GitHub
parent 6e46933c55
commit f6844e9766
187 changed files with 1885 additions and 1874 deletions

View File

@@ -54,7 +54,7 @@ def normalize: sorted_walk(if type == "array" then sort else . end);
normalize'
)
fn run(args []string) ?string {
fn run(args []string) !string {
res := os.execute(args.join(' '))
if res.exit_code != 0 {
return error('${args[0]} failed with return code ${res.exit_code}.\n$res.output')
@@ -87,7 +87,7 @@ fn test_alexcrichton_toml_rs() {
if !hide_oks {
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
}
toml_doc := toml.parse_file(valid_test_file)?
toml_doc := toml.parse_file(valid_test_file)!
valid++
}
println('$valid/$valid_test_files.len TOML files were parsed correctly')
@@ -100,12 +100,12 @@ fn test_alexcrichton_toml_rs() {
println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
if os.exists(compare_work_dir_root) {
os.rmdir_all(compare_work_dir_root)?
os.rmdir_all(compare_work_dir_root)!
}
os.mkdir_all(compare_work_dir_root)?
os.mkdir_all(compare_work_dir_root)!
jq_normalize_path := os.join_path(compare_work_dir_root, 'normalize.jq')
os.write_file(jq_normalize_path, jq_normalize)?
os.write_file(jq_normalize_path, jq_normalize)!
valid = 0
e = 0
@@ -147,19 +147,19 @@ fn test_alexcrichton_toml_rs() {
array_type = 2
}
os.write_file(v_toml_json_path, to_alexcrichton(toml_doc.ast.table, array_type))?
os.write_file(v_toml_json_path, to_alexcrichton(toml_doc.ast.table, array_type))!
alexcrichton_json := os.read_file(valid_test_file.all_before_last('.') + '.json')?
alexcrichton_json := os.read_file(valid_test_file.all_before_last('.') + '.json')!
os.write_file(alexcrichton_toml_json_path, alexcrichton_json)?
os.write_file(alexcrichton_toml_json_path, alexcrichton_json)!
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)?
contents := os.read_file(v_toml_json_path)!
panic(err.msg() + '\n$contents')
}
alexcrichton_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"',
alexcrichton_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)?
contents := os.read_file(v_toml_json_path)!
panic(err.msg() + '\n$contents')
}
@@ -194,7 +194,7 @@ fn test_alexcrichton_toml_rs() {
println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
}
if toml_doc := toml.parse_file(invalid_test_file) {
content_that_should_have_failed := os.read_file(invalid_test_file)?
content_that_should_have_failed := os.read_file(invalid_test_file)!
println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
assert false
} else {

View File

@@ -25,10 +25,10 @@ name = "Born in the USA"
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
fn test_nested_array_of_tables() {
mut toml_doc := toml.parse_text(toml_text)?
mut toml_doc := toml.parse_text(toml_text)!
toml_json := to.json(toml_doc)
eprintln(toml_json)
assert toml_json == os.read_file(fprefix + '.out')?
assert toml_json == os.read_file(fprefix + '.out')!
}

View File

@@ -5,11 +5,11 @@ import toml.to
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
fn test_array_of_tables_edge_case_file() {
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml'))?
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml'))!
toml_json := to.json(toml_doc)
out_file_json := os.read_file(os.real_path(fprefix + '.out'))?
out_file_json := os.read_file(os.real_path(fprefix + '.out'))!
println(toml_json)
assert toml_json == out_file_json
}

View File

@@ -95,12 +95,12 @@ fn test_burnt_sushi_tomltest() {
println('Testing value output of $valid_test_files.len valid TOML files using "$jq"...')
if os.exists(compare_work_dir_root) {
os.rmdir_all(compare_work_dir_root)?
os.rmdir_all(compare_work_dir_root)!
}
os.mkdir_all(compare_work_dir_root)?
os.mkdir_all(compare_work_dir_root)!
jq_normalize_path := os.join_path(compare_work_dir_root, 'normalize.jq')
os.write_file(jq_normalize_path, jq_normalize)?
os.write_file(jq_normalize_path, jq_normalize)!
valid = 0
e = 0
@@ -126,25 +126,25 @@ fn test_burnt_sushi_tomltest() {
if !hide_oks {
println('OK [${i + 1}/$valid_test_files.len] "$valid_test_file"...')
}
toml_doc := toml.parse_file(valid_test_file)?
toml_doc := toml.parse_file(valid_test_file)!
v_toml_json_path := os.join_path(compare_work_dir_root,
os.file_name(valid_test_file).all_before_last('.') + '.v.json')
bs_toml_json_path := os.join_path(compare_work_dir_root,
os.file_name(valid_test_file).all_before_last('.') + '.json')
os.write_file(v_toml_json_path, to_burntsushi(toml_doc.ast.table))?
os.write_file(v_toml_json_path, to_burntsushi(toml_doc.ast.table))!
bs_json := os.read_file(valid_test_file.all_before_last('.') + '.json')?
bs_json := os.read_file(valid_test_file.all_before_last('.') + '.json')!
os.write_file(bs_toml_json_path, bs_json)?
os.write_file(bs_toml_json_path, bs_json)!
v_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', v_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)?
contents := os.read_file(v_toml_json_path)!
panic(err.msg() + '\n$contents')
}
bs_normalized_json := run([jq, '-S', '-f "$jq_normalize_path"', bs_toml_json_path]) or {
contents := os.read_file(v_toml_json_path)?
contents := os.read_file(v_toml_json_path)!
panic(err.msg() + '\n$contents')
}
@@ -177,7 +177,7 @@ fn test_burnt_sushi_tomltest() {
println('OK [${i + 1}/$invalid_test_files.len] "$invalid_test_file"...')
}
if toml_doc := toml.parse_file(invalid_test_file) {
content_that_should_have_failed := os.read_file(invalid_test_file)?
content_that_should_have_failed := os.read_file(invalid_test_file)!
println(' This TOML should have failed:\n${'-'.repeat(40)}\n$content_that_should_have_failed\n${'-'.repeat(40)}')
assert false
} else {

View File

@@ -5,11 +5,11 @@ import toml.to
const fprefix = os.join_path(os.dir(@FILE), 'testdata', os.file_name(@FILE).all_before_last('.'))
fn test_parse() {
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml'))?
toml_doc := toml.parse_file(os.real_path(fprefix + '.toml'))!
toml_json := to.json(toml_doc)
println(toml_json)
out_file_json := os.read_file(os.real_path(fprefix + '.out'))?
out_file_json := os.read_file(os.real_path(fprefix + '.out'))!
assert toml_json == out_file_json
}

View File

@@ -7,7 +7,7 @@ fn path_by_extension(ext string) string {
}
fn test_keys() {
toml_doc := toml.parse_file(path_by_extension('toml'))?
toml_doc := toml.parse_file(path_by_extension('toml'))!
mut value := toml_doc.value('34-11')
assert value.int() == 23
@@ -19,7 +19,7 @@ fn test_keys() {
assert value.int() == 42
toml_json := to.json(toml_doc)
out_file_json := os.read_file(path_by_extension('out'))?
out_file_json := os.read_file(path_by_extension('out'))!
println(toml_json)
assert toml_json == out_file_json
//
@@ -36,9 +36,9 @@ fn test_keys() {
}
fn test_parse_dotted_key() {
assert toml.parse_dotted_key('')? == []
assert toml.parse_dotted_key('abc')? == ['abc']
assert toml.parse_dotted_key('tube.test."test.test".h."i.j."."k"')? == ['tube', 'test',
assert toml.parse_dotted_key('')! == []
assert toml.parse_dotted_key('abc')! == ['abc']
assert toml.parse_dotted_key('tube.test."test.test".h."i.j."."k"')! == ['tube', 'test',
'test.test', 'h', 'i.j.', 'k']
if x := toml.parse_dotted_key("'some unclosed string") {
assert false