1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

vfmt: fix typename DecompressParams -> DezParams in function parameters, on import compress as z (fix #17216)

This commit is contained in:
Delyan Angelov 2023-02-04 23:37:31 +02:00
parent e8ca2e62a7
commit d349648cda
No known key found for this signature in database
GPG Key ID: 66886C0F12D595ED
3 changed files with 195 additions and 15 deletions

View File

@ -160,9 +160,7 @@ fn stringify_fn_after_name(node &FnDecl, mut f strings.Builder, t &Table, cur_mo
}
}
s = util.no_cur_mod(s, cur_mod)
for mod, alias in m2a {
s = s.replace(mod, alias)
}
s = shorten_full_name_based_on_aliases(s, m2a)
if should_add_type {
if !is_type_only {
f.write_string(' ')
@ -181,7 +179,7 @@ fn stringify_fn_after_name(node &FnDecl, mut f strings.Builder, t &Table, cur_mo
if node.return_type != void_type {
sreturn_type := util.no_cur_mod(t.type_to_str(node.return_type), cur_mod)
short_sreturn_type := shorten_full_name_based_on_aliases(sreturn_type, m2a)
f.write_string(' ' + short_sreturn_type)
f.write_string(' ${short_sreturn_type}')
}
}
@ -192,6 +190,11 @@ struct StringifyModReplacement {
}
fn shorten_full_name_based_on_aliases(input string, m2a map[string]string) string {
if m2a.len == 0 || -1 == input.index_u8(`.`) {
// a simple typename, like `string` or `[]bool`; no module aliasings apply,
// (or there just are not any mappings)
return input
}
// Shorten the full names to their aliases, but replace the longer mods first, so that:
// `import user.project`
// `import user.project.routes`
@ -199,20 +202,41 @@ fn shorten_full_name_based_on_aliases(input string, m2a map[string]string) strin
// Also take into account the nesting level, so `a.e.c.d` will be shortened before `a.xyz.b`, even though they are the same length.
mut replacements := []StringifyModReplacement{cap: m2a.len}
for mod, alias in m2a {
if input.contains(mod) {
replacements << StringifyModReplacement{
mod: mod
alias: alias
weight: mod.count('.') * 100 + mod.len
}
if mod == alias {
// for vlib modules like `import strings` -> mod: `strings` | alias: `strings`
// ... which is the same, so no replacements are needed
continue
}
if !input.contains(mod) {
continue
}
replacements << StringifyModReplacement{
mod: mod
alias: alias
weight: mod.count('.') * 100 + mod.len
}
}
mut res := input.clone()
if replacements.len > 0 {
if replacements.len == 0 {
return input
}
//
mut res := input
if replacements.len > 1 {
replacements.sort(a.weight > b.weight)
for r in replacements {
res = res.replace(r.mod, r.alias)
}
for r in replacements {
if -1 == res.index_u8(`.`) {
// there are no remaining module parts left in the type name, it is a local one after all
break
}
if !res.contains(r.mod) {
// nothing to replace as well (just minimises modifications and string clonings)
continue
}
// r.mod: `v.token` | r.alias: `xyz` | res: `v.token.Abc` -> `xyz.Abc`
// r.mod: `v.ast` | r.alias: `ast` | res: `v.ast.AliasTypeDecl` -> `ast.AliasTypeDecl`
// r.mod: `v.ast` | r.alias: `ast` | res: `[]v.ast.InterfaceEmbedding` -> `[]ast.InterfaceEmbedding`
res = res.replace(r.mod, r.alias)
}
return res
}

View File

@ -98,8 +98,11 @@ fn prepare_bin2v_file(mut fmt_bench benchmark.Benchmark) {
}
fn write_bin2v_keep_content() ! {
// Note: do not put large files here; the goal of this particular test is
// just to guarantee that the output of `v bin2v` is invariant to vfmt, not
// to stress out bin2v or vfmt...
img0 := os.join_path('vlib', 'v', 'embed_file', 'tests', 'v.png')
img1 := os.join_path('tutorials', 'building_a_simple_web_blog_with_vweb', 'img', 'time.png')
img1 := os.join_path('examples', 'assets', 'logo.png')
os.rm(b2v_keep_path)!
res := os.execute('${os.quoted_path(vexe)} bin2v -w ${os.quoted_path(b2v_keep_path)} ${os.quoted_path(img0)} ${os.quoted_path(img1)}')
if res.exit_code != 0 {

View File

@ -0,0 +1,153 @@
// [rfc1952](https://datatracker.ietf.org/doc/html/rfc1952) compliant
// gzip compression/decompression
module gzip
import compress as z
import hash.crc32
// compresses an array of bytes using gzip and returns the compressed bytes in a new array
// Example: compressed := gzip.compress(b)?
pub fn compress(data []u8) ![]u8 {
compressed := compress.compress(data, 0)!
// header
mut result := [
u8(0x1f), // magic numbers (1F 8B)
0x8b,
0x08, // deflate
0x00, // header flags
0x00, // 4-byte timestamp, 0 = no timestamp (00 00 00 00)
0x00,
0x00,
0x00,
0x00, // extra flags
0xff, // operating system id (0xff = unknown)
] // 10 bytes
result << compressed
// trailer
checksum := crc32.sum(data)
length := data.len
result << [
u8(checksum >> 24),
u8(checksum >> 16),
u8(checksum >> 8),
u8(checksum),
u8(length >> 24),
u8(length >> 16),
u8(length >> 8),
u8(length),
] // 8 bytes
return result
}
[params]
pub struct DecompressParams {
verify_header_checksum bool = true
verify_length bool = true
verify_checksum bool = true
}
pub const (
reserved_bits = 0b1110_0000
ftext = 0b0000_0001
fextra = 0b0000_0100
fname = 0b0000_1000
fcomment = 0b0001_0000
fhcrc = 0b0000_0010
)
const min_header_length = 18
[noinit]
pub struct GzipHeader {
pub mut:
length int = 10
extra []u8
filename []u8
comment []u8
modification_time u32
operating_system u8
}
// validate validates the header and returns its details if valid
pub fn validate(data []u8, params DecompressParams) !GzipHeader {
if data.len < gzip.min_header_length {
return error('data is too short, not gzip compressed?')
} else if data[0] != 0x1f || data[1] != 0x8b {
return error('wrong magic numbers, not gzip compressed?')
} else if data[2] != 0x08 {
return error('gzip data is not compressed with DEFLATE')
}
mut header := GzipHeader{}
// parse flags, we ignore most of them, but we still need to parse them
// correctly, so we dont accidently decompress something that belongs
// to the header
if data[3] & gzip.reserved_bits > 0 {
// rfc 1952 2.3.1.2 Compliance
// A compliant decompressor must give an error indication if any
// reserved bit is non-zero, since such a bit could indicate the
// presence of a new field that would cause subsequent data to be
// interpreted incorrectly.
return error('reserved flags are set, unsupported field detected')
}
if data[3] & gzip.fextra > 0 {
xlen := data[header.length]
header.extra = data[header.length + 1..header.length + 1 + xlen]
header.length += xlen + 1
}
if data[3] & gzip.fname > 0 {
// filename is zero-terminated, so skip until we hit a zero byte
for header.length < data.len && data[header.length] != 0x00 {
header.filename << data[header.length]
header.length++
}
header.length++
}
if data[3] & gzip.fcomment > 0 {
// comment is zero-terminated, so skip until we hit a zero byte
for header.length < data.len && data[header.length] != 0x00 {
header.comment << data[header.length]
header.length++
}
header.length++
}
if data[3] & gzip.fhcrc > 0 {
if header.length + 12 > data.len {
return error('data too short')
}
checksum_header := crc32.sum(data[..header.length])
checksum_header_expected := (u32(data[header.length]) << 24) | (u32(data[header.length + 1]) << 16) | (u32(data[
header.length + 2]) << 8) | data[header.length + 3]
if params.verify_header_checksum && checksum_header != checksum_header_expected {
return error('header checksum verification failed')
}
header.length += 4
}
if header.length + 8 > data.len {
return error('data too short')
}
header.operating_system = data[9]
return header
}
// decompresses an array of bytes using zlib and returns the decompressed bytes in a new array
// Example: decompressed := gzip.decompress(b)?
pub fn decompress(data []u8, params DecompressParams) ![]u8 {
gzip_header := validate(data, params)!
header_length := gzip_header.length
decompressed := compress.decompress(data[header_length..data.len - 8], 0)!
length_expected := (u32(data[data.len - 4]) << 24) | (u32(data[data.len - 3]) << 16) | (u32(data[data.len - 2]) << 8) | data[data.len - 1]
if params.verify_length && decompressed.len != length_expected {
return error('length verification failed, got ${decompressed.len}, expected ${length_expected}')
}
checksum := crc32.sum(decompressed)
checksum_expected := (u32(data[data.len - 8]) << 24) | (u32(data[data.len - 7]) << 16) | (u32(data[data.len - 6]) << 8) | data[data.len - 5]
if params.verify_checksum && checksum != checksum_expected {
return error('checksum verification failed')
}
return decompressed
}