1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

fmt: remove space in front of ? and ! (#14366)

This commit is contained in:
Daniel Däschle
2022-05-13 05:56:21 +02:00
committed by GitHub
parent df029da942
commit d679146a80
324 changed files with 1865 additions and 1879 deletions

View File

@@ -10,10 +10,10 @@ import net.http
fn main() {
// Decoding
resp := http.get('https://example.com') ?
resp := http.get('https://example.com')?
// raw decode
raw_person := json2.raw_decode(resp.text) ?
raw_person := json2.raw_decode(resp.text)?
// Casting `Any` type / Navigating
person := raw_person.as_map()

View File

@@ -142,8 +142,8 @@ fn new_parser(srce string, convert_type bool) Parser {
fn (mut p Parser) decode() ?Any {
p.next()
p.next_with_err() ?
fi := p.decode_value() ?
p.next_with_err()?
fi := p.decode_value()?
if p.tok.kind != .eof {
return IError(InvalidTokenError{
token: p.tok
@@ -168,7 +168,7 @@ fn (mut p Parser) decode_value() ?Any {
.int_, .float {
tl := p.tok.lit.bytestr()
kind := p.tok.kind
p.next_with_err() ?
p.next_with_err()?
if p.convert_type {
$if !nofloat ? {
if kind == .float {
@@ -181,14 +181,14 @@ fn (mut p Parser) decode_value() ?Any {
}
.bool_ {
lit := p.tok.lit.bytestr()
p.next_with_err() ?
p.next_with_err()?
if p.convert_type {
return Any(lit.bool())
}
return Any(lit)
}
.null {
p.next_with_err() ?
p.next_with_err()?
if p.convert_type {
return Any(null)
}
@@ -196,7 +196,7 @@ fn (mut p Parser) decode_value() ?Any {
}
.str_ {
str := p.tok.lit.bytestr()
p.next_with_err() ?
p.next_with_err()?
return Any(str)
}
else {
@@ -211,13 +211,13 @@ fn (mut p Parser) decode_value() ?Any {
[manualfree]
fn (mut p Parser) decode_array() ?Any {
mut items := []Any{}
p.next_with_err() ?
p.next_with_err()?
p.n_level++
for p.tok.kind != .rsbr {
item := p.decode_value() ?
item := p.decode_value()?
items << item
if p.tok.kind == .comma {
p.next_with_err() ?
p.next_with_err()?
if p.tok.kind == .rsbr {
return IError(InvalidTokenError{
token: p.tok
@@ -230,14 +230,14 @@ fn (mut p Parser) decode_array() ?Any {
})
}
}
p.next_with_err() ?
p.next_with_err()?
p.n_level--
return Any(items)
}
fn (mut p Parser) decode_object() ?Any {
mut fields := map[string]Any{}
p.next_with_err() ?
p.next_with_err()?
p.n_level++
for p.tok.kind != .rcbr {
if p.tok.kind != .str_ {
@@ -248,7 +248,7 @@ fn (mut p Parser) decode_object() ?Any {
}
cur_key := p.tok.lit.bytestr()
p.next_with_err() ?
p.next_with_err()?
if p.tok.kind != .colon {
return IError(InvalidTokenError{
token: p.tok
@@ -256,18 +256,18 @@ fn (mut p Parser) decode_object() ?Any {
})
}
p.next_with_err() ?
fields[cur_key] = p.decode_value() ?
p.next_with_err()?
fields[cur_key] = p.decode_value()?
if p.tok.kind != .comma && p.tok.kind != .rcbr {
return IError(UnknownTokenError{
token: p.tok
kind: .object
})
} else if p.tok.kind == .comma {
p.next_with_err() ?
p.next_with_err()?
}
}
p.next_with_err() ?
p.next_with_err()?
p.n_level--
return Any(fields)
}

View File

@@ -1,43 +1,43 @@
module json2
fn test_raw_decode_string() ? {
str := raw_decode('"Hello!"') ?
str := raw_decode('"Hello!"')?
assert str.str() == 'Hello!'
}
fn test_raw_decode_string_escape() ? {
jstr := raw_decode('"\u001b"') ?
jstr := raw_decode('"\u001b"')?
str := jstr.str()
assert str.len == 1
assert str[0] == 27
}
fn test_raw_decode_number() ? {
num := raw_decode('123') ?
num := raw_decode('123')?
assert num.int() == 123
}
fn test_raw_decode_array() ? {
raw_arr := raw_decode('["Foo", 1]') ?
raw_arr := raw_decode('["Foo", 1]')?
arr := raw_arr.arr()
assert arr[0] or { 0 }.str() == 'Foo'
assert arr[1] or { 0 }.int() == 1
}
fn test_raw_decode_bool() ? {
bol := raw_decode('false') ?
bol := raw_decode('false')?
assert bol.bool() == false
}
fn test_raw_decode_map() ? {
raw_mp := raw_decode('{"name":"Bob","age":20}') ?
raw_mp := raw_decode('{"name":"Bob","age":20}')?
mp := raw_mp.as_map()
assert mp['name'] or { 0 }.str() == 'Bob'
assert mp['age'] or { 0 }.int() == 20
}
fn test_raw_decode_null() ? {
nul := raw_decode('null') ?
nul := raw_decode('null')?
assert nul is Null
}
@@ -50,12 +50,12 @@ fn test_raw_decode_invalid() ? {
}
fn test_raw_decode_string_with_dollarsign() ? {
str := raw_decode(r'"Hello $world"') ?
str := raw_decode(r'"Hello $world"')?
assert str.str() == r'Hello $world'
}
fn test_raw_decode_map_with_whitespaces() ? {
raw_mp := raw_decode(' \n\t{"name":"Bob","age":20}\n\t') ?
raw_mp := raw_decode(' \n\t{"name":"Bob","age":20}\n\t')?
mp := raw_mp.as_map()
assert mp['name'] or { 0 }.str() == 'Bob'
assert mp['age'] or { 0 }.int() == 20
@@ -63,7 +63,7 @@ fn test_raw_decode_map_with_whitespaces() ? {
fn test_nested_array_object() ? {
mut parser := new_parser(r'[[[[[],[],[]]]],{"Test":{}},[[]]]', false)
decoded := parser.decode() ?
decoded := parser.decode()?
assert parser.n_level == 0
}

View File

@@ -39,14 +39,14 @@ const escaped_chars = [(r'\b').bytes(), (r'\f').bytes(), (r'\n').bytes(),
// encode_value encodes an `Any` value to the specific writer.
pub fn (e &Encoder) encode_value(f Any, mut wr io.Writer) ? {
e.encode_value_with_level(f, 1, mut wr) ?
e.encode_value_with_level(f, 1, mut wr)?
}
fn (e &Encoder) encode_newline(level int, mut wr io.Writer) ? {
if e.newline != 0 {
wr.write([e.newline]) ?
wr.write([e.newline])?
for j := 0; j < level * e.newline_spaces_count; j++ {
wr.write(json2.space_bytes) ?
wr.write(json2.space_bytes)?
}
}
}
@@ -54,62 +54,62 @@ fn (e &Encoder) encode_newline(level int, mut wr io.Writer) ? {
fn (e &Encoder) encode_value_with_level(f Any, level int, mut wr io.Writer) ? {
match f {
string {
e.encode_string(f, mut wr) ?
e.encode_string(f, mut wr)?
}
bool {
if f == true {
wr.write(json2.true_in_bytes) ?
wr.write(json2.true_in_bytes)?
} else {
wr.write(json2.false_in_bytes) ?
wr.write(json2.false_in_bytes)?
}
}
int, u64, i64 {
wr.write(f.str().bytes()) ?
wr.write(f.str().bytes())?
}
f32, f64 {
$if !nofloat ? {
str_float := f.str().bytes()
wr.write(str_float) ?
wr.write(str_float)?
if str_float[str_float.len - 1] == `.` {
wr.write(json2.zero_in_bytes) ?
wr.write(json2.zero_in_bytes)?
}
return
}
wr.write(json2.zero_in_bytes) ?
wr.write(json2.zero_in_bytes)?
}
map[string]Any {
wr.write([u8(`{`)]) ?
wr.write([u8(`{`)])?
mut i := 0
for k, v in f {
e.encode_newline(level, mut wr) ?
e.encode_string(k, mut wr) ?
wr.write(json2.colon_bytes) ?
e.encode_newline(level, mut wr)?
e.encode_string(k, mut wr)?
wr.write(json2.colon_bytes)?
if e.newline != 0 {
wr.write(json2.space_bytes) ?
wr.write(json2.space_bytes)?
}
e.encode_value_with_level(v, level + 1, mut wr) ?
e.encode_value_with_level(v, level + 1, mut wr)?
if i < f.len - 1 {
wr.write(json2.comma_bytes) ?
wr.write(json2.comma_bytes)?
}
i++
}
e.encode_newline(level - 1, mut wr) ?
wr.write([u8(`}`)]) ?
e.encode_newline(level - 1, mut wr)?
wr.write([u8(`}`)])?
}
[]Any {
wr.write([u8(`[`)]) ?
wr.write([u8(`[`)])?
for i, v in f {
e.encode_newline(level, mut wr) ?
e.encode_value_with_level(v, level + 1, mut wr) ?
e.encode_newline(level, mut wr)?
e.encode_value_with_level(v, level + 1, mut wr)?
if i < f.len - 1 {
wr.write(json2.comma_bytes) ?
wr.write(json2.comma_bytes)?
}
}
e.encode_newline(level - 1, mut wr) ?
wr.write([u8(`]`)]) ?
e.encode_newline(level - 1, mut wr)?
wr.write([u8(`]`)])?
}
Null {
wr.write(json2.null_in_bytes) ?
wr.write(json2.null_in_bytes)?
}
}
}
@@ -195,42 +195,42 @@ fn (e &Encoder) encode_string(s string, mut wr io.Writer) ? {
text: s
}
mut i := 0
wr.write(json2.quote_bytes) ?
wr.write(json2.quote_bytes)?
for char_len in char_lens {
if char_len == 1 {
chr := s[i]
if chr in important_escapable_chars {
for j := 0; j < important_escapable_chars.len; j++ {
if chr == important_escapable_chars[j] {
wr.write(json2.escaped_chars[j]) ?
wr.write(json2.escaped_chars[j])?
break
}
}
} else if chr == `"` || chr == `/` || chr == `\\` {
wr.write([u8(`\\`), chr]) ?
wr.write([u8(`\\`), chr])?
} else if int(chr) < 0x20 {
hex_code := chr.hex().bytes()
wr.write(json2.unicode_escape_chars) ? // \u
wr.write(json2.zero_in_bytes) ? // \u0
wr.write(json2.zero_in_bytes) ? // \u00
wr.write(hex_code) ? // \u00xxxx
wr.write(json2.unicode_escape_chars)? // \u
wr.write(json2.zero_in_bytes)? // \u0
wr.write(json2.zero_in_bytes)? // \u00
wr.write(hex_code)? // \u00xxxx
} else {
wr.write([u8(chr)]) ?
wr.write([u8(chr)])?
}
} else {
slice := s[i..i + char_len]
hex_code := slice.utf32_code().hex().bytes()
if !e.escape_unicode || hex_code.len < 4 {
// unescaped non-ASCII char
wr.write(slice.bytes()) ?
wr.write(slice.bytes())?
} else if hex_code.len == 4 {
// a unicode endpoint
wr.write(json2.unicode_escape_chars) ?
wr.write(hex_code) ?
wr.write(json2.unicode_escape_chars)?
wr.write(hex_code)?
} else {
// TODO: still figuring out what
// to do with more than 4 chars
wr.write(json2.space_bytes) ?
wr.write(json2.space_bytes)?
}
unsafe {
slice.free()
@@ -240,5 +240,5 @@ fn (e &Encoder) encode_string(s string, mut wr io.Writer) ? {
i += char_len
}
wr.write(json2.quote_bytes) ?
wr.write(json2.quote_bytes)?
}

View File

@@ -39,7 +39,7 @@ fn test_json_string_non_ascii() {
fn test_utf8_strings_are_not_modified() ? {
original := '{"s":"Schilddrüsenerkrankungen"}'
// dump(original)
deresult := json2.raw_decode(original) ?
deresult := json2.raw_decode(original)?
// dump(deresult)
assert deresult.str() == original
}
@@ -51,13 +51,13 @@ fn test_encoder_unescaped_utf32() ? {
}
mut sb := strings.new_builder(20)
enc.encode_value(jap_text, mut sb) ?
enc.encode_value(jap_text, mut sb)?
assert sb.str() == '"$jap_text"'
sb.go_back_to(0)
emoji_text := json2.Any('🐈')
enc.encode_value(emoji_text, mut sb) ?
enc.encode_value(emoji_text, mut sb)?
assert sb.str() == '"$emoji_text"'
}
@@ -74,7 +74,7 @@ fn test_encoder_prettify() ? {
newline_spaces_count: 2
}
mut sb := strings.new_builder(20)
enc.encode_value(obj, mut sb) ?
enc.encode_value(obj, mut sb)?
assert sb.str() == '{
"hello": "world",
"arr": [

View File

@@ -26,7 +26,7 @@ pub fn fast_raw_decode(src string) ?Any {
// decode is a generic function that decodes a JSON string into the target type.
pub fn decode<T>(src string) ?T {
res := raw_decode(src) ?
res := raw_decode(src)?
mut typ := T{}
typ.from_json(res)
return typ