2022-01-04 12:21:08 +03:00
|
|
|
// Copyright (c) 2019-2022 Alexander Medvednikov. All rights reserved.
|
2020-09-10 13:05:40 +03:00
|
|
|
// Use of this source code is governed by an MIT license
|
|
|
|
// that can be found in the LICENSE file.
|
|
|
|
module json2
|
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
import io
|
2020-09-10 13:05:40 +03:00
|
|
|
import strings
|
2022-12-11 17:54:28 +03:00
|
|
|
import time
|
2020-10-09 17:11:55 +03:00
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
// Encoder encodes the an `Any` type into JSON representation.
|
|
|
|
// It provides parameters in order to change the end result.
|
|
|
|
pub struct Encoder {
|
2022-04-15 16:24:02 +03:00
|
|
|
newline u8
|
2022-03-04 14:39:23 +03:00
|
|
|
newline_spaces_count int
|
|
|
|
escape_unicode bool = true
|
2020-10-09 17:11:55 +03:00
|
|
|
}
|
|
|
|
|
2022-11-18 12:09:24 +03:00
|
|
|
pub const default_encoder = Encoder{}
|
|
|
|
|
2022-11-19 12:52:17 +03:00
|
|
|
// byte array versions of the most common tokens/chars to avoid reallocations
|
2022-03-04 14:39:23 +03:00
|
|
|
const null_in_bytes = 'null'.bytes()
|
2020-10-09 17:11:55 +03:00
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
const true_in_bytes = 'true'.bytes()
|
|
|
|
|
|
|
|
const false_in_bytes = 'false'.bytes()
|
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const zero_in_bytes = [u8(`0`)]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const comma_bytes = [u8(`,`)]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const colon_bytes = [u8(`:`)]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const space_bytes = [u8(` `)]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const unicode_escape_chars = [u8(`\\`), `u`]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-04-15 14:58:56 +03:00
|
|
|
const quote_bytes = [u8(`"`)]
|
2022-03-04 14:39:23 +03:00
|
|
|
|
|
|
|
const escaped_chars = [(r'\b').bytes(), (r'\f').bytes(), (r'\n').bytes(),
|
|
|
|
(r'\r').bytes(), (r'\t').bytes()]
|
|
|
|
|
2023-03-24 14:45:26 +03:00
|
|
|
const curly_open = [u8(`{`)]
|
|
|
|
|
|
|
|
const curly_close = [u8(`}`)]
|
|
|
|
|
2022-11-18 12:09:24 +03:00
|
|
|
// encode_value encodes a value to the specific writer.
|
2022-11-26 19:23:26 +03:00
|
|
|
pub fn (e &Encoder) encode_value[T](val T, mut wr io.Writer) ! {
|
|
|
|
e.encode_value_with_level[T](val, 1, mut wr)!
|
2020-09-10 13:05:40 +03:00
|
|
|
}
|
2020-10-09 17:11:55 +03:00
|
|
|
|
2022-10-16 09:28:57 +03:00
|
|
|
fn (e &Encoder) encode_newline(level int, mut wr io.Writer) ! {
|
2022-03-04 14:39:23 +03:00
|
|
|
if e.newline != 0 {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write([e.newline])!
|
2022-03-04 14:39:23 +03:00
|
|
|
for j := 0; j < level * e.newline_spaces_count; j++ {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.space_bytes)!
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-18 12:09:24 +03:00
|
|
|
fn (e &Encoder) encode_any(val Any, level int, mut wr io.Writer) ! {
|
|
|
|
match val {
|
2020-11-29 16:54:45 +03:00
|
|
|
string {
|
2022-11-18 12:09:24 +03:00
|
|
|
e.encode_string(val, mut wr)!
|
2020-11-29 16:54:45 +03:00
|
|
|
}
|
2022-03-04 14:39:23 +03:00
|
|
|
bool {
|
2022-11-18 12:09:24 +03:00
|
|
|
if val == true {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.true_in_bytes)!
|
2022-03-04 14:39:23 +03:00
|
|
|
} else {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.false_in_bytes)!
|
2021-03-22 17:45:29 +03:00
|
|
|
}
|
2020-11-29 16:54:45 +03:00
|
|
|
}
|
2022-11-19 12:52:17 +03:00
|
|
|
i8, i16, int, i64 {
|
|
|
|
wr.write(val.str().bytes())!
|
|
|
|
}
|
|
|
|
u8, u16, u32, u64 {
|
2022-11-18 12:09:24 +03:00
|
|
|
wr.write(val.str().bytes())!
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
|
|
|
f32, f64 {
|
2022-01-10 13:42:41 +03:00
|
|
|
$if !nofloat ? {
|
2022-11-18 12:09:24 +03:00
|
|
|
str_float := val.str().bytes()
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(str_float)!
|
2022-03-04 14:39:23 +03:00
|
|
|
if str_float[str_float.len - 1] == `.` {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.zero_in_bytes)!
|
2022-01-10 13:42:41 +03:00
|
|
|
}
|
2022-03-04 14:39:23 +03:00
|
|
|
return
|
2021-03-22 17:45:29 +03:00
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.zero_in_bytes)!
|
2020-11-29 16:54:45 +03:00
|
|
|
}
|
|
|
|
map[string]Any {
|
2023-03-24 14:45:26 +03:00
|
|
|
wr.write(json2.curly_open)!
|
2022-03-04 14:39:23 +03:00
|
|
|
mut i := 0
|
2022-11-18 12:09:24 +03:00
|
|
|
for k, v in val {
|
2022-10-16 09:28:57 +03:00
|
|
|
e.encode_newline(level, mut wr)!
|
|
|
|
e.encode_string(k, mut wr)!
|
|
|
|
wr.write(json2.colon_bytes)!
|
2022-03-04 14:39:23 +03:00
|
|
|
if e.newline != 0 {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.space_bytes)!
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
e.encode_value_with_level(v, level + 1, mut wr)!
|
2022-11-18 12:09:24 +03:00
|
|
|
if i < val.len - 1 {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.comma_bytes)!
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
|
|
|
i++
|
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
e.encode_newline(level - 1, mut wr)!
|
2023-03-24 14:45:26 +03:00
|
|
|
wr.write(json2.curly_close)!
|
2020-11-29 16:54:45 +03:00
|
|
|
}
|
2021-02-26 09:36:02 +03:00
|
|
|
[]Any {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write([u8(`[`)])!
|
2022-11-18 12:09:24 +03:00
|
|
|
for i in 0 .. val.len {
|
2022-10-16 09:28:57 +03:00
|
|
|
e.encode_newline(level, mut wr)!
|
2022-11-18 12:09:24 +03:00
|
|
|
e.encode_value_with_level(val[i], level + 1, mut wr)!
|
|
|
|
if i < val.len - 1 {
|
|
|
|
wr.write(json2.comma_bytes)!
|
|
|
|
}
|
|
|
|
}
|
2022-10-16 09:28:57 +03:00
|
|
|
e.encode_newline(level - 1, mut wr)!
|
|
|
|
wr.write([u8(`]`)])!
|
2021-02-26 09:36:02 +03:00
|
|
|
}
|
2022-12-11 17:54:28 +03:00
|
|
|
time.Time {}
|
2020-11-29 16:54:45 +03:00
|
|
|
Null {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.null_in_bytes)!
|
2020-11-29 16:54:45 +03:00
|
|
|
}
|
2020-09-10 13:05:40 +03:00
|
|
|
}
|
|
|
|
}
|
2021-03-01 12:22:36 +03:00
|
|
|
|
2022-11-26 19:23:26 +03:00
|
|
|
fn (e &Encoder) encode_value_with_level[T](val T, level int, mut wr io.Writer) ! {
|
2022-11-18 12:09:24 +03:00
|
|
|
$if T is string {
|
|
|
|
e.encode_string(val, mut wr)!
|
|
|
|
} $else $if T is Any {
|
|
|
|
e.encode_any(val, level, mut wr)!
|
|
|
|
} $else $if T is map[string]Any {
|
|
|
|
// weird quirk but val is destructured immediately to Any
|
|
|
|
e.encode_any(val, level, mut wr)!
|
2023-03-24 14:45:26 +03:00
|
|
|
} $else $if T is $map {
|
|
|
|
// FIXME - `e.encode_struct` can not encode `map[string]map[string]int` type
|
2022-11-18 12:09:24 +03:00
|
|
|
} $else $if T is []Any {
|
|
|
|
e.encode_any(val, level, mut wr)!
|
|
|
|
} $else $if T is Encodable {
|
|
|
|
wr.write(val.json_str().bytes())!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if T is $struct {
|
2022-11-18 12:09:24 +03:00
|
|
|
e.encode_struct(val, level, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if T is $enum {
|
2022-11-18 12:09:24 +03:00
|
|
|
e.encode_any(Any(int(val)), level, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if T in [Null, bool, $float, $int] {
|
2023-03-16 22:19:03 +03:00
|
|
|
e.encode_any(val, level, mut wr)!
|
2022-11-18 12:09:24 +03:00
|
|
|
} $else {
|
2022-11-20 12:18:14 +03:00
|
|
|
// dump(val.str())
|
2022-11-18 12:09:24 +03:00
|
|
|
return error('cannot encode value with ${typeof(val).name} type')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-26 19:23:26 +03:00
|
|
|
fn (e &Encoder) encode_struct[U](val U, level int, mut wr io.Writer) ! {
|
2023-03-24 14:45:26 +03:00
|
|
|
wr.write(json2.curly_open)!
|
2022-11-18 12:09:24 +03:00
|
|
|
mut i := 0
|
|
|
|
mut fields_len := 0
|
|
|
|
$for field in U.fields {
|
2023-01-15 13:30:33 +03:00
|
|
|
if val.$(field.name).str() != 'Option(error: none)' {
|
2022-12-09 19:08:24 +03:00
|
|
|
fields_len++
|
2022-11-18 12:09:24 +03:00
|
|
|
}
|
2022-12-09 19:08:24 +03:00
|
|
|
}
|
|
|
|
$for field in U.fields {
|
2023-03-08 22:54:28 +03:00
|
|
|
mut ignore_field := false
|
2022-12-09 19:08:24 +03:00
|
|
|
value := val.$(field.name)
|
2023-01-05 16:17:38 +03:00
|
|
|
mut json_name := ''
|
|
|
|
for attr in field.attrs {
|
|
|
|
if attr.contains('json: ') {
|
|
|
|
json_name = attr.replace('json: ', '')
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-01-09 09:36:45 +03:00
|
|
|
$if field.is_option {
|
2023-01-05 16:17:38 +03:00
|
|
|
is_none := value.str() == 'Option(error: none)'
|
|
|
|
|
|
|
|
if !is_none {
|
|
|
|
e.encode_newline(level, mut wr)!
|
|
|
|
if json_name != '' {
|
|
|
|
e.encode_string(json_name, mut wr)!
|
|
|
|
} else {
|
|
|
|
e.encode_string(field.name, mut wr)!
|
|
|
|
}
|
|
|
|
wr.write(json2.colon_bytes)!
|
|
|
|
|
|
|
|
if e.newline != 0 {
|
|
|
|
wr.write(json2.space_bytes)!
|
|
|
|
}
|
|
|
|
|
|
|
|
$if field.typ is ?string {
|
2023-01-15 13:30:33 +03:00
|
|
|
e.encode_string(val.$(field.name) ?.str()#[8..-2], mut wr)!
|
|
|
|
} $else $if field.typ is ?bool || field.typ is ?f32 || field.typ is ?f64
|
|
|
|
|| field.typ is ?i8 || field.typ is ?i16 || field.typ is ?int
|
|
|
|
|| field.typ is ?i64 || field.typ is ?u8 || field.typ is ?u16
|
|
|
|
|| field.typ is ?u32 || field.typ is ?u64 {
|
|
|
|
wr.write(val.$(field.name) ?.str()#[7..-1].bytes())!
|
2023-01-05 16:17:38 +03:00
|
|
|
} $else $if field.typ is ?time.Time {
|
2023-01-09 09:36:45 +03:00
|
|
|
option_value := val.$(field.name) as ?time.Time
|
|
|
|
parsed_time := option_value as time.Time
|
2023-01-05 16:17:38 +03:00
|
|
|
e.encode_string(parsed_time.format_rfc3339(), mut wr)!
|
|
|
|
} $else $if field.is_array {
|
|
|
|
e.encode_array(value, level + 1, mut wr)!
|
2023-01-15 13:30:33 +03:00
|
|
|
} $else $if field.is_struct {
|
|
|
|
e.encode_struct(value, level + 1, mut wr)!
|
2023-01-11 11:18:45 +03:00
|
|
|
} $else $if field.is_enum {
|
2023-01-15 13:30:33 +03:00
|
|
|
// FIXME - checker and cast error
|
|
|
|
// wr.write(int(val.$(field.name)?).str().bytes())!
|
|
|
|
return error('type ${typeof(val).name} cannot be encoded yet')
|
2023-01-05 16:17:38 +03:00
|
|
|
} $else $if field.is_alias {
|
|
|
|
match field.unaliased_typ {
|
|
|
|
typeof[string]().idx {
|
|
|
|
e.encode_string(value.str(), mut wr)!
|
|
|
|
}
|
|
|
|
typeof[bool]().idx, typeof[f32]().idx, typeof[f64]().idx, typeof[i8]().idx,
|
|
|
|
typeof[i16]().idx, typeof[int]().idx, typeof[i64]().idx, typeof[u8]().idx,
|
|
|
|
typeof[u16]().idx, typeof[u32]().idx, typeof[u64]().idx {
|
|
|
|
wr.write(value.str().bytes())!
|
|
|
|
}
|
|
|
|
typeof[[]byte]().idx, typeof[[]int]().idx {
|
|
|
|
// FIXME - error: could not infer generic type `U` in call to `encode_array`
|
|
|
|
// e.encode_array(value, level, mut wr)!
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// e.encode_value_with_level(value, level + 1, mut wr)!
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} $else {
|
|
|
|
return error('type ${typeof(val).name} cannot be array encoded')
|
2022-11-20 12:18:14 +03:00
|
|
|
}
|
2023-03-08 22:54:28 +03:00
|
|
|
} else {
|
|
|
|
ignore_field = true
|
2022-11-20 12:18:14 +03:00
|
|
|
}
|
2023-01-05 16:17:38 +03:00
|
|
|
} $else {
|
2023-01-21 21:26:55 +03:00
|
|
|
is_none := val.$(field.name).str() == 'unknown sum type value'
|
|
|
|
if !is_none {
|
|
|
|
e.encode_newline(level, mut wr)!
|
|
|
|
if json_name != '' {
|
|
|
|
e.encode_string(json_name, mut wr)!
|
|
|
|
} else {
|
|
|
|
e.encode_string(field.name, mut wr)!
|
|
|
|
}
|
|
|
|
wr.write(json2.colon_bytes)!
|
2023-01-05 16:17:38 +03:00
|
|
|
|
2023-01-21 21:26:55 +03:00
|
|
|
if e.newline != 0 {
|
|
|
|
wr.write(json2.space_bytes)!
|
|
|
|
}
|
2022-12-09 19:08:24 +03:00
|
|
|
}
|
2023-01-05 16:17:38 +03:00
|
|
|
|
2022-12-09 19:08:24 +03:00
|
|
|
$if field.typ is string {
|
2023-01-16 00:52:48 +03:00
|
|
|
e.encode_string(val.$(field.name).str(), mut wr)!
|
2022-12-11 17:54:28 +03:00
|
|
|
} $else $if field.typ is time.Time {
|
2023-01-21 11:45:38 +03:00
|
|
|
wr.write(json2.quote_bytes)!
|
|
|
|
wr.write(val.$(field.name).format_rfc3339().bytes())!
|
|
|
|
wr.write(json2.quote_bytes)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ in [bool, $float, $int] {
|
2023-01-16 00:52:48 +03:00
|
|
|
wr.write(val.$(field.name).str().bytes())!
|
2023-01-05 16:17:38 +03:00
|
|
|
} $else $if field.is_array {
|
2023-03-23 02:02:42 +03:00
|
|
|
// TODO - replace for `field.typ is $array`
|
2023-01-05 16:17:38 +03:00
|
|
|
e.encode_array(value, level + 1, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ is $array {
|
2023-01-16 00:52:48 +03:00
|
|
|
// e.encode_array(value, level + 1, mut wr)! // FIXME - error: could not infer generic type `U` in call to `encode_array`
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ is $struct {
|
2023-01-05 16:17:38 +03:00
|
|
|
e.encode_struct(value, level + 1, mut wr)!
|
2023-03-24 14:45:26 +03:00
|
|
|
} $else $if field.is_map {
|
|
|
|
wr.write(json2.curly_open)!
|
|
|
|
mut idx := 0
|
|
|
|
for k, v in value {
|
|
|
|
e.encode_newline(level, mut wr)!
|
|
|
|
e.encode_string(k.str(), mut wr)!
|
|
|
|
wr.write(json2.colon_bytes)!
|
|
|
|
if e.newline != 0 {
|
|
|
|
wr.write(json2.space_bytes)!
|
|
|
|
}
|
|
|
|
e.encode_value_with_level(v, level + 1, mut wr)!
|
|
|
|
if idx < value.len - 1 {
|
|
|
|
wr.write(json2.comma_bytes)!
|
|
|
|
}
|
|
|
|
idx++
|
|
|
|
}
|
|
|
|
e.encode_newline(level, mut wr)!
|
|
|
|
wr.write(json2.curly_close)!
|
2023-01-11 11:18:45 +03:00
|
|
|
} $else $if field.is_enum {
|
2023-03-23 02:02:42 +03:00
|
|
|
// TODO - replace for `field.typ is $enum`
|
2023-01-11 11:18:45 +03:00
|
|
|
wr.write(int(val.$(field.name)).str().bytes())!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ is $enum {
|
2023-01-16 00:52:48 +03:00
|
|
|
// wr.write(int(val.$(field.name)).str().bytes())! // FIXME - error: cannot cast string to `int`, use `val.$field.name.int()` instead.
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ is $sumtype {
|
2023-01-21 21:26:55 +03:00
|
|
|
// dump(val.$(field.name).str())
|
|
|
|
// dump(is_none)
|
|
|
|
sum_type_value := value.str()#[typeof(val.$(field.name)).name.len + 1..-1]
|
|
|
|
|
|
|
|
is_string := sum_type_value[0] == "'"[0]
|
|
|
|
|
|
|
|
// mut is_struct := false
|
|
|
|
// mut is_sumtype := false
|
|
|
|
// mut is_enum := false
|
|
|
|
// mut is_array := false
|
|
|
|
|
|
|
|
match sum_type_value[0] {
|
|
|
|
`0`...`9` {
|
|
|
|
if sum_type_value.contains_any(' /:-') {
|
|
|
|
date_time_str := time.parse(sum_type_value)!
|
|
|
|
wr.write(date_time_str.format_rfc3339().bytes())!
|
|
|
|
} else {
|
|
|
|
wr.write(sum_type_value.bytes())!
|
|
|
|
}
|
|
|
|
}
|
|
|
|
`A`...`Z` {
|
|
|
|
// SumTypes(0)
|
|
|
|
if sum_type_value.contains('(') {
|
|
|
|
if !sum_type_value.all_before('(').contains_any(' "\'[') {
|
|
|
|
// is_sumtype = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// StructType{
|
|
|
|
// StructType[int]{
|
|
|
|
if sum_type_value.contains('{') {
|
|
|
|
if !sum_type_value.all_before('{').contains_any(' "\'') {
|
|
|
|
// is_struct = true
|
|
|
|
// TODO
|
|
|
|
// e.encode_struct_from_sumtype(value, level + 1, mut wr)!
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
`a`...`z` {
|
|
|
|
if sum_type_value in ['true', 'false'] {
|
|
|
|
wr.write(sum_type_value.bytes())!
|
|
|
|
} else {
|
|
|
|
// is_enum = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// dump('else')
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// dump(sum_type_value)
|
|
|
|
|
|
|
|
// dump(is_none)
|
|
|
|
// dump(is_string)
|
|
|
|
// dump(is_struct)
|
|
|
|
// dump(is_sumtype)
|
|
|
|
// dump(is_enum)
|
|
|
|
// dump(is_array)
|
|
|
|
if is_string {
|
|
|
|
e.encode_string(sum_type_value#[1..-1], mut wr)!
|
|
|
|
}
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.typ is $alias {
|
2023-01-14 22:30:29 +03:00
|
|
|
$if field.unaliased_typ is string {
|
2023-01-16 00:52:48 +03:00
|
|
|
e.encode_string(val.$(field.name).str(), mut wr)!
|
2023-01-14 22:30:29 +03:00
|
|
|
} $else $if field.unaliased_typ is time.Time {
|
2023-02-15 12:40:11 +03:00
|
|
|
parsed_time := time.parse(val.$(field.name).str()) or { time.Time{} }
|
2023-01-14 22:30:29 +03:00
|
|
|
e.encode_string(parsed_time.format_rfc3339(), mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.unaliased_typ in [bool, $float, $int] {
|
2023-01-16 00:52:48 +03:00
|
|
|
wr.write(val.$(field.name).str().bytes())!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.unaliased_typ is $array {
|
2023-01-16 00:52:48 +03:00
|
|
|
// e.encode_array(val.$(field.name), level + 1, mut wr)! // FIXME - error: could not infer generic type `U` in call to `encode_array`
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.unaliased_typ is $struct {
|
2023-01-16 00:52:48 +03:00
|
|
|
// e.encode_struct(val.$(field.name), level + 1, mut wr)! // FIXME - error: cannot use `BoolAlias` as `StringAlias` in argument 1 to `x.json2.Encoder.encode_struct`
|
2023-01-14 22:30:29 +03:00
|
|
|
e.encode_struct(value, level + 1, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.unaliased_typ is $enum {
|
2023-01-16 00:52:48 +03:00
|
|
|
// enum_value := val.$(field.name)
|
|
|
|
// dump(int(val.$(field.name))) // FIXME
|
|
|
|
// dump(val.$(field.name).int()) // FIXME - error: unknown method or field: `BoolAlias.int`
|
|
|
|
// dump(val.$(field.name).int()) // FIXME - error: cannot convert 'enum <anonymous>' to 'struct string'
|
|
|
|
|
|
|
|
// wr.write(val.$(field.name).int().str().bytes())! // FIXME - error: unknown method or field: `BoolAlias.int`
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if field.unaliased_typ is $sumtype {
|
2023-01-16 00:52:48 +03:00
|
|
|
} $else {
|
|
|
|
return error('the alias ${typeof(val).name} cannot be encoded')
|
2022-11-20 12:18:14 +03:00
|
|
|
}
|
2023-01-05 16:17:38 +03:00
|
|
|
} $else {
|
|
|
|
return error('type ${typeof(val).name} cannot be array encoded')
|
2022-11-20 12:18:14 +03:00
|
|
|
}
|
2023-01-05 16:17:38 +03:00
|
|
|
}
|
2022-12-09 19:08:24 +03:00
|
|
|
|
2023-03-08 22:54:28 +03:00
|
|
|
if i < fields_len - 1 && !ignore_field {
|
2023-01-05 16:17:38 +03:00
|
|
|
wr.write(json2.comma_bytes)!
|
2022-11-20 12:18:14 +03:00
|
|
|
}
|
2023-03-08 22:54:28 +03:00
|
|
|
if !ignore_field {
|
|
|
|
i++
|
|
|
|
}
|
2022-11-18 12:09:24 +03:00
|
|
|
}
|
|
|
|
e.encode_newline(level - 1, mut wr)!
|
2023-03-24 14:45:26 +03:00
|
|
|
wr.write(json2.curly_close)!
|
2022-11-18 12:09:24 +03:00
|
|
|
}
|
|
|
|
|
2023-01-05 16:17:38 +03:00
|
|
|
fn (e &Encoder) encode_array[U](val []U, level int, mut wr io.Writer) ! {
|
|
|
|
wr.write([u8(`[`)])!
|
|
|
|
for i in 0 .. val.len {
|
|
|
|
e.encode_newline(level, mut wr)!
|
2022-12-09 19:08:24 +03:00
|
|
|
|
2023-01-05 16:17:38 +03:00
|
|
|
$if U is string {
|
|
|
|
e.encode_any(val[i], level + 1, mut wr)!
|
|
|
|
} $else $if U is bool {
|
|
|
|
e.encode_any(bool(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is f32 {
|
|
|
|
e.encode_any(f32(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is f64 {
|
|
|
|
e.encode_any(f64(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is i8 {
|
|
|
|
e.encode_any(i8(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is i16 {
|
|
|
|
e.encode_any(i16(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is int {
|
|
|
|
e.encode_any(int(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is i64 {
|
|
|
|
e.encode_any(i64(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is u8 {
|
|
|
|
e.encode_any(u8(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is byte {
|
|
|
|
e.encode_any(u8(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is u16 {
|
|
|
|
e.encode_any(u16(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is u32 {
|
|
|
|
e.encode_any(u32(val[i]), level + 1, mut wr)!
|
|
|
|
} $else $if U is u64 {
|
|
|
|
e.encode_any(u64(val[i]), level + 1, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if U is $array {
|
2023-01-05 16:17:38 +03:00
|
|
|
// FIXME - error: could not infer generic type `U` in call to `encode_array`
|
|
|
|
// e.encode_array(val[i], level + 1, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if U is $struct {
|
2023-01-05 16:17:38 +03:00
|
|
|
e.encode_struct(val[i], level + 1, mut wr)!
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if U is $sumtype {
|
2023-02-01 17:52:58 +03:00
|
|
|
$if U is Any {
|
|
|
|
e.encode_any(val[i], level + 1, mut wr)!
|
|
|
|
} $else {
|
|
|
|
// TODO
|
|
|
|
}
|
2023-03-23 02:02:42 +03:00
|
|
|
} $else $if U is $enum {
|
2023-01-05 16:17:38 +03:00
|
|
|
e.encode_any(i64(val[i]), level + 1, mut wr)!
|
|
|
|
} $else {
|
|
|
|
return error('type ${typeof(val).name} cannot be array encoded')
|
|
|
|
}
|
|
|
|
if i < val.len - 1 {
|
|
|
|
wr.write(json2.comma_bytes)!
|
2022-11-18 12:09:24 +03:00
|
|
|
}
|
|
|
|
}
|
2023-01-05 16:17:38 +03:00
|
|
|
|
|
|
|
e.encode_newline(level - 1, mut wr)!
|
|
|
|
wr.write([u8(`]`)])!
|
2022-11-18 12:09:24 +03:00
|
|
|
}
|
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
// str returns the JSON string representation of the `map[string]Any` type.
|
|
|
|
pub fn (f map[string]Any) str() string {
|
|
|
|
return Any(f).json_str()
|
|
|
|
}
|
|
|
|
|
|
|
|
// str returns the JSON string representation of the `[]Any` type.
|
|
|
|
pub fn (f []Any) str() string {
|
|
|
|
return Any(f).json_str()
|
|
|
|
}
|
|
|
|
|
|
|
|
// str returns the string representation of the `Any` type. Use the `json_str` method
|
|
|
|
// if you want to use the escaped str() version of the `Any` type.
|
|
|
|
pub fn (f Any) str() string {
|
|
|
|
if f is string {
|
|
|
|
return f
|
|
|
|
} else {
|
|
|
|
return f.json_str()
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
// json_str returns the JSON string representation of the `Any` type.
|
|
|
|
[manualfree]
|
|
|
|
pub fn (f Any) json_str() string {
|
2022-11-18 12:09:24 +03:00
|
|
|
return encode(f)
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
2021-03-01 12:22:36 +03:00
|
|
|
|
2022-03-04 14:39:23 +03:00
|
|
|
// prettify_json_str returns the pretty-formatted JSON string representation of the `Any` type.
|
2021-03-01 12:22:36 +03:00
|
|
|
[manualfree]
|
2022-03-04 14:39:23 +03:00
|
|
|
pub fn (f Any) prettify_json_str() string {
|
|
|
|
mut sb := strings.new_builder(4096)
|
|
|
|
defer {
|
|
|
|
unsafe { sb.free() }
|
|
|
|
}
|
|
|
|
mut enc := Encoder{
|
|
|
|
newline: `\n`
|
2022-12-10 21:44:23 +03:00
|
|
|
newline_spaces_count: 2
|
2022-03-04 14:39:23 +03:00
|
|
|
}
|
2022-11-18 12:09:24 +03:00
|
|
|
enc.encode_value(f, mut sb) or {}
|
2022-03-04 14:39:23 +03:00
|
|
|
return sb.str()
|
|
|
|
}
|
|
|
|
|
|
|
|
// CharLengthIterator is an iterator that generates a char
|
|
|
|
// length value of every iteration based on the given text.
|
|
|
|
// (e.g.: "t✔" => [t => 1, ✔ => 2])
|
|
|
|
struct CharLengthIterator {
|
|
|
|
text string
|
|
|
|
mut:
|
|
|
|
idx int
|
|
|
|
}
|
|
|
|
|
|
|
|
fn (mut iter CharLengthIterator) next() ?int {
|
|
|
|
if iter.idx >= iter.text.len {
|
|
|
|
return none
|
|
|
|
}
|
2021-03-01 12:22:36 +03:00
|
|
|
defer {
|
2022-03-04 14:39:23 +03:00
|
|
|
iter.idx++
|
|
|
|
}
|
|
|
|
mut len := 1
|
|
|
|
c := iter.text[iter.idx]
|
|
|
|
if (c & (1 << 7)) != 0 {
|
2022-04-15 14:58:56 +03:00
|
|
|
for t := u8(1 << 6); (c & t) != 0; t >>= 1 {
|
2022-03-04 14:39:23 +03:00
|
|
|
len++
|
|
|
|
iter.idx++
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|
|
|
|
}
|
2022-03-04 14:39:23 +03:00
|
|
|
return len
|
|
|
|
}
|
|
|
|
|
2023-01-21 11:45:38 +03:00
|
|
|
// TODO - Need refactor. Is so slow. The longer the string, the lower the performance.
|
2022-03-04 14:39:23 +03:00
|
|
|
// encode_string returns the JSON spec-compliant version of the string.
|
|
|
|
[manualfree]
|
2022-10-16 09:28:57 +03:00
|
|
|
fn (e &Encoder) encode_string(s string, mut wr io.Writer) ! {
|
2022-03-04 14:39:23 +03:00
|
|
|
mut char_lens := CharLengthIterator{
|
|
|
|
text: s
|
|
|
|
}
|
|
|
|
mut i := 0
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.quote_bytes)!
|
2021-03-01 12:22:36 +03:00
|
|
|
for char_len in char_lens {
|
|
|
|
if char_len == 1 {
|
|
|
|
chr := s[i]
|
2021-03-22 17:45:29 +03:00
|
|
|
if chr in important_escapable_chars {
|
|
|
|
for j := 0; j < important_escapable_chars.len; j++ {
|
|
|
|
if chr == important_escapable_chars[j] {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.escaped_chars[j])!
|
2021-03-01 12:22:36 +03:00
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else if chr == `"` || chr == `/` || chr == `\\` {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write([u8(`\\`), chr])!
|
2021-11-18 08:34:00 +03:00
|
|
|
} else if int(chr) < 0x20 {
|
2022-03-04 14:39:23 +03:00
|
|
|
hex_code := chr.hex().bytes()
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.unicode_escape_chars)! // \u
|
|
|
|
wr.write(json2.zero_in_bytes)! // \u0
|
|
|
|
wr.write(json2.zero_in_bytes)! // \u00
|
|
|
|
wr.write(hex_code)! // \u00xxxx
|
2021-03-01 12:22:36 +03:00
|
|
|
} else {
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write([u8(chr)])!
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|
|
|
|
} else {
|
2021-03-22 17:45:29 +03:00
|
|
|
slice := s[i..i + char_len]
|
2022-03-04 14:39:23 +03:00
|
|
|
hex_code := slice.utf32_code().hex().bytes()
|
|
|
|
if !e.escape_unicode || hex_code.len < 4 {
|
|
|
|
// unescaped non-ASCII char
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(slice.bytes())!
|
2021-06-29 11:40:44 +03:00
|
|
|
} else if hex_code.len == 4 {
|
2022-03-04 14:39:23 +03:00
|
|
|
// a unicode endpoint
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.unicode_escape_chars)!
|
|
|
|
wr.write(hex_code)!
|
2021-03-01 12:22:36 +03:00
|
|
|
} else {
|
|
|
|
// TODO: still figuring out what
|
|
|
|
// to do with more than 4 chars
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.space_bytes)!
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|
|
|
|
unsafe {
|
|
|
|
slice.free()
|
|
|
|
hex_code.free()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
i += char_len
|
|
|
|
}
|
2022-03-04 14:39:23 +03:00
|
|
|
|
2022-10-16 09:28:57 +03:00
|
|
|
wr.write(json2.quote_bytes)!
|
2021-03-01 12:22:36 +03:00
|
|
|
}
|