1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

check unused and unmodified vars in all modules, not just main

This commit is contained in:
Alexander Medvednikov 2019-12-06 15:24:53 +03:00
parent bdaa421e8a
commit c8d111924d
31 changed files with 133 additions and 119 deletions

View File

@ -1 +1,2 @@
println('Hello, World!') println('Hello, World!')

View File

@ -81,11 +81,11 @@ pub fn (b mut Benchmark) neither_fail_nor_ok() {
b.step_end_time = benchmark.now() b.step_end_time = benchmark.now()
} }
pub fn (b mut Benchmark) step_message(msg string) string { pub fn (b &Benchmark) step_message(msg string) string {
return b.tdiff_in_ms(msg, b.step_start_time, b.step_end_time) return b.tdiff_in_ms(msg, b.step_start_time, b.step_end_time)
} }
pub fn (b mut Benchmark) total_message(msg string) string { pub fn (b &Benchmark) total_message(msg string) string {
mut tmsg := '$msg \n ok, fail, total = ' + mut tmsg := '$msg \n ok, fail, total = ' +
term.ok_message('${b.nok:5d}') + ', ' + term.ok_message('${b.nok:5d}') + ', ' +
if b.nfail > 0 { term.fail_message('${b.nfail:5d}') } else { '${b.nfail:5d}' } + ', ' + if b.nfail > 0 { term.fail_message('${b.nfail:5d}') } else { '${b.nfail:5d}' } + ', ' +
@ -96,12 +96,12 @@ pub fn (b mut Benchmark) total_message(msg string) string {
return b.tdiff_in_ms(tmsg, b.bench_start_time, b.bench_end_time) return b.tdiff_in_ms(tmsg, b.bench_start_time, b.bench_end_time)
} }
pub fn (b mut Benchmark) total_duration() i64 { pub fn (b &Benchmark) total_duration() i64 {
return (b.bench_end_time - b.bench_start_time) return (b.bench_end_time - b.bench_start_time)
} }
//////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////
fn (b mut Benchmark) tdiff_in_ms(s string, sticks i64, eticks i64) string { fn (b &Benchmark) tdiff_in_ms(s string, sticks i64, eticks i64) string {
if b.verbose { if b.verbose {
tdiff := (eticks - sticks) tdiff := (eticks - sticks)
return '${tdiff:6lld} ms | $s' return '${tdiff:6lld} ms | $s'

View File

@ -43,7 +43,7 @@ mut:
fn new_cgen(out_name_c string) &CGen { fn new_cgen(out_name_c string) &CGen {
path := out_name_c path := out_name_c
mut out := os.create(path) or { out := os.create(path) or {
println('failed to create $path') println('failed to create $path')
return &CGen{} return &CGen{}
} }
@ -192,7 +192,7 @@ fn (g mut CGen) register_thread_fn(wrapper_name, wrapper_text, struct_text strin
} }
fn (v &V) prof_counters() string { fn (v &V) prof_counters() string {
mut res := []string res := []string
// Global fns // Global fns
//for f in c.table.fns { //for f in c.table.fns {
//res << 'double ${c.table.cgen_name(f)}_time;' //res << 'double ${c.table.cgen_name(f)}_time;'
@ -212,7 +212,7 @@ fn (v &V) prof_counters() string {
} }
fn (p &Parser) print_prof_counters() string { fn (p &Parser) print_prof_counters() string {
mut res := []string res := []string
// Global fns // Global fns
//for f in p.table.fns { //for f in p.table.fns {
//counter := '${p.table.cgen_name(f)}_time' //counter := '${p.table.cgen_name(f)}_time'
@ -410,10 +410,10 @@ fn (v &V) interface_table() string {
sb.writeln('// NR methods = $t.gen_types.len') sb.writeln('// NR methods = $t.gen_types.len')
for i, gen_type in t.gen_types { for i, gen_type in t.gen_types {
methods += '{' methods += '{'
for i, method in t.methods { for j, method in t.methods {
// Cat_speak // Cat_speak
methods += '${gen_type}_${method.name}' methods += '${gen_type}_${method.name}'
if i < t.methods.len - 1 { if j < t.methods.len - 1 {
methods += ', ' methods += ', '
} }
} }

View File

@ -213,8 +213,8 @@ fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
s.last_nl_pos = scp.last_nl_pos s.last_nl_pos = scp.last_nl_pos
} }
fn (s mut Scanner) get_last_nl_from_pos(_pos int) int { fn (s &Scanner) get_last_nl_from_pos(_pos int) int {
mut pos := if _pos >= s.text.len { s.text.len-1 } else { _pos } pos := if _pos >= s.text.len { s.text.len-1 } else { _pos }
for i := pos; i >= 0; i-- { for i := pos; i >= 0; i-- {
if s.text[i] == `\n` { if s.text[i] == `\n` {
return i return i
@ -223,7 +223,7 @@ fn (s mut Scanner) get_last_nl_from_pos(_pos int) int {
return 0 return 0
} }
fn (s mut Scanner) get_scanner_pos_of_token(tok &Token) ScannerPos { fn (s &Scanner) get_scanner_pos_of_token(tok &Token) ScannerPos {
return ScannerPos{ return ScannerPos{
pos: tok.pos pos: tok.pos
line_nr: tok.line_nr line_nr: tok.line_nr

View File

@ -265,7 +265,7 @@ fn (p mut Parser) comptime_method_call(typ Type) {
p.check(.dollar) p.check(.dollar)
var := p.check_name() var := p.check_name()
mut j := 0 mut j := 0
for i, method in typ.methods { for method in typ.methods {
if method.typ != 'void' { if method.typ != 'void' {
continue continue

View File

@ -392,7 +392,7 @@ fn (p mut Parser) expression() string {
//p.print_tok() //p.print_tok()
//} //}
ph := p.cgen.add_placeholder() ph := p.cgen.add_placeholder()
mut typ := p.indot_expr() typ := p.indot_expr()
is_str := typ=='string' is_str := typ=='string'
is_ustr := typ=='ustring' is_ustr := typ=='ustring'
// `a << b` ==> `array_push(&a, b)` // `a << b` ==> `array_push(&a, b)`

View File

@ -72,7 +72,7 @@ fn (a []TypeInst) str() string {
return r.str() return r.str()
} }
fn (p mut Parser) find_var_or_const(name string) ?Var { fn (p &Parser) find_var_or_const(name string) ?Var {
if p.known_var(name) { if p.known_var(name) {
return p.find_var(name) return p.find_var(name)
} }
@ -150,14 +150,14 @@ fn (p mut Parser) mark_arg_moved(v Var) {
p.table.fns[p.cur_fn.name] = p.cur_fn p.table.fns[p.cur_fn.name] = p.cur_fn
} }
fn (p mut Parser) known_var(name string) bool { fn (p &Parser) known_var(name string) bool {
_ = p.find_var(name) or { _ = p.find_var(name) or {
return false return false
} }
return true return true
} }
fn (p mut Parser) known_var_check_new_var(name string) bool { fn (p &Parser) known_var_check_new_var(name string) bool {
_ = p.find_var_check_new_var(name) or { _ = p.find_var_check_new_var(name) or {
return false return false
} }
@ -561,15 +561,18 @@ fn (p mut Parser) fn_decl() {
// p.error('unclosed {') // p.error('unclosed {')
} }
// Make sure all vars in this function are used (only in main for now) // Make sure all vars in this function are used (only in main for now)
/*
if p.mod != 'main' { if p.mod != 'main' {
p.genln('}') p.genln('}')
return return
} }
*/
p.genln('}') p.genln('}')
p.check_unused_variables() if !p.builtin_mod && p.mod != 'os' {
p.set_current_fn( EmptyFn ) p.check_unused_and_mut_vars()
}
p.set_current_fn(EmptyFn)
p.returns = false p.returns = false
} }
[inline] [inline]
@ -611,7 +614,7 @@ fn (p &Parser) get_linkage_prefix() string {
} }
} }
fn (p mut Parser) check_unused_variables() { fn (p mut Parser) check_unused_and_mut_vars() {
for var in p.local_vars { for var in p.local_vars {
if var.name == '' { if var.name == '' {
break break
@ -1365,7 +1368,7 @@ fn (p mut Parser) fn_call_vargs(f Fn) (string, []string) {
return '', []string return '', []string
} }
last_arg := f.args.last() last_arg := f.args.last()
mut varg_def_type := last_arg.typ[3..] //varg_def_type := last_arg.typ[3..]
mut types := []string mut types := []string
mut values := []string mut values := []string
for p.tok != .rpar { for p.tok != .rpar {
@ -1450,7 +1453,7 @@ fn (p mut Parser) dispatch_generic_fn_instance(f mut Fn, ti &TypeInst) {
} }
if !new_inst { if !new_inst {
rename_generic_fn_instance(mut f, ti) rename_generic_fn_instance(mut f, ti)
_f := p.table.find_fn(f.name) or { _ = p.table.find_fn(f.name) or {
p.error('function instance `$f.name` not found') p.error('function instance `$f.name` not found')
return return
} }

View File

@ -12,7 +12,7 @@ fn (p mut Parser) get_type2() Type{
mut mul := false mut mul := false
mut nr_muls := 0 mut nr_muls := 0
mut typ := '' mut typ := ''
mut cat := TypeCategory.struct_ cat := TypeCategory.struct_
// multiple returns // multiple returns
if p.tok == .lpar { if p.tok == .lpar {
//p.warn('`()` are no longer necessary in multiple returns' + //p.warn('`()` are no longer necessary in multiple returns' +

View File

@ -133,7 +133,7 @@ pub mut:
} }
// Should be called by main at the end of the compilation process, to cleanup // Should be called by main at the end of the compilation process, to cleanup
pub fn (v mut V) finalize_compilation(){ pub fn (v &V) finalize_compilation(){
// TODO remove // TODO remove
if v.pref.autofree { if v.pref.autofree {
/* /*
@ -1184,7 +1184,7 @@ pub fn set_vroot_folder(vroot_path string) {
// Preparation for the compiler module: // Preparation for the compiler module:
// VEXE env variable is needed so that compiler.vexe_path() // VEXE env variable is needed so that compiler.vexe_path()
// can return it later to whoever needs it: // can return it later to whoever needs it:
mut vname := if os.user_os() == 'windows' { 'v.exe' } else { 'v' } vname := if os.user_os() == 'windows' { 'v.exe' } else { 'v' }
os.setenv('VEXE', os.realpath( [vroot_path, vname].join(os.path_separator) ), true) os.setenv('VEXE', os.realpath( [vroot_path, vname].join(os.path_separator) ), true)
} }

View File

@ -45,7 +45,7 @@ fn generate_vh(mod string) {
out.writeln('module $mod_def\n') out.writeln('module $mod_def\n')
// Consts // Consts
println(full_mod_path) println(full_mod_path)
mut vfiles := os.walk_ext(full_mod_path, '.v') vfiles := os.walk_ext(full_mod_path, '.v')
//mut vfiles := os.ls(full_mod_path) or { //mut vfiles := os.ls(full_mod_path) or {
//exit(1) //exit(1)
//} //}

View File

@ -282,7 +282,7 @@ fn (p &Parser) log(s string) {
*/ */
} }
pub fn (p mut Parser) save_state() ParserState { pub fn (p &Parser) save_state() ParserState {
return ParserState{ return ParserState{
scanner_line_nr: p.scanner.line_nr scanner_line_nr: p.scanner.line_nr
scanner_text : p.scanner.text scanner_text : p.scanner.text
@ -1577,7 +1577,7 @@ fn (p mut Parser) var_decl() {
p.gen('$var_name = ${p.var_decl_name}.var_$i') p.gen('$var_name = ${p.var_decl_name}.var_$i')
continue continue
} }
// decleration // declaration
p.gen('$var_type $var_name = ${p.var_decl_name}.var_$i') p.gen('$var_type $var_name = ${p.var_decl_name}.var_$i')
} }
p.register_var(Var { p.register_var(Var {
@ -1722,6 +1722,12 @@ fn (p mut Parser) var_expr(v Var) string {
// println('var expr is_tmp=$p.cgen.is_tmp\n') // println('var expr is_tmp=$p.cgen.is_tmp\n')
if !v.is_const { if !v.is_const {
p.mark_var_used(v) p.mark_var_used(v)
// `C.foo(&var)` means that `var` is changed. Mark it as changed
// to avoid `var was declared as mutable but was never changed` errors.
if p.calling_c && !v.is_changed {
//println('marking C var changed: $v.name')
p.mark_var_changed(v)
}
} }
fn_ph := p.cgen.add_placeholder() fn_ph := p.cgen.add_placeholder()
p.expr_var = v p.expr_var = v
@ -1806,10 +1812,10 @@ fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
//if p.fileis('orm_test') { //if p.fileis('orm_test') {
//println('ORM dot $str_typ') //println('ORM dot $str_typ')
//} //}
mut str_typ := str_typ_ str_typ := str_typ_
p.check(.dot) p.check(.dot)
is_variadic_arg := str_typ.starts_with('varg_') is_variadic_arg := str_typ.starts_with('varg_')
mut typ := p.find_type(str_typ) typ := p.find_type(str_typ)
if typ.name.len == 0 { if typ.name.len == 0 {
p.error('dot(): cannot find type `$str_typ`') p.error('dot(): cannot find type `$str_typ`')
} }
@ -2513,7 +2519,6 @@ fn (p mut Parser) array_init() string {
mut typ := '' mut typ := ''
new_arr_ph := p.cgen.add_placeholder() new_arr_ph := p.cgen.add_placeholder()
mut i := 0 mut i := 0
pos := p.cgen.cur_line.len// remember cur line to fetch first number in cgen for [0; 10]
for p.tok != .rsbr { for p.tok != .rsbr {
val_typ := p.bool_expression() val_typ := p.bool_expression()
// Get the type of the first expression // Get the type of the first expression
@ -3095,7 +3100,7 @@ fn (p mut Parser) check_unused_imports() {
p.production_error_with_token_index( 'the following imports were never used: $output', 0 ) p.production_error_with_token_index( 'the following imports were never used: $output', 0 )
} }
fn (p mut Parser) is_expr_fn_call(start_tok_idx int) (bool, string) { fn (p &Parser) is_expr_fn_call(start_tok_idx int) (bool, string) {
mut expr := p.tokens[start_tok_idx-1].str() mut expr := p.tokens[start_tok_idx-1].str()
mut is_fn_call := p.tokens[start_tok_idx].tok == .lpar mut is_fn_call := p.tokens[start_tok_idx].tok == .lpar
if !is_fn_call { if !is_fn_call {

View File

@ -121,12 +121,12 @@ fn (p mut Parser) fnext() {
if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr { if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr {
p.fmt_dec() p.fmt_dec()
} }
mut s := p.strtok() s := p.strtok()
if p.tok != .eof { if p.tok != .eof {
p.fgen(s) p.fgen(s)
} }
// vfmt: increase indentation on `{` unless it's `{}` // vfmt: increase indentation on `{` unless it's `{}`
mut inc_indent := false inc_indent := false
if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr { if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr {
p.fgen_nl() p.fgen_nl()
p.fmt_inc() p.fmt_inc()
@ -139,7 +139,7 @@ fn (p mut Parser) fnext() {
p.fgen_nl() p.fgen_nl()
p.fgen_nl() p.fgen_nl()
} }
is_rcbr := p.tok == .rcbr //is_rcbr := p.tok == .rcbr
for p.token_idx < p.tokens.len - 1 { for p.token_idx < p.tokens.len - 1 {
i := p.token_idx i := p.token_idx
tok := p.tokens[p.token_idx].tok tok := p.tokens[p.token_idx].tok

View File

@ -162,7 +162,7 @@ fn (g mut Gen) mov64(reg Register, val i64) {
fn (g mut Gen) call(addr int) { fn (g mut Gen) call(addr int) {
//rel := g.abs_to_rel_addr(addr) //rel := g.abs_to_rel_addr(addr)
rel := 0xffffffff - int(abs(addr - g.buf.len))-1 //rel := 0xffffffff - int(abs(addr - g.buf.len))-1
println('call addr=$addr rel_addr=$addr pos=$g.buf.len') println('call addr=$addr rel_addr=$addr pos=$g.buf.len')
g.write8(0xe8) g.write8(0xe8)

View File

@ -11,7 +11,7 @@ import (
crypto.internal.subtle crypto.internal.subtle
) )
const ( pub const (
// The AES block size in bytes. // The AES block size in bytes.
block_size = 16 block_size = 16
) )

View File

@ -48,7 +48,7 @@ pub fn new_cbc(b AesCipher, iv []byte) AesCbc {
pub fn (x &AesCbc) block_size() int { return x.block_size } pub fn (x &AesCbc) block_size() int { return x.block_size }
pub fn (x mut AesCbc) encrypt_blocks(dst mut []byte, src_ []byte) { pub fn (x &AesCbc) encrypt_blocks(dst mut []byte, src_ []byte) {
mut src := src_ mut src := src_
if src.len%x.block_size != 0 { if src.len%x.block_size != 0 {
panic('crypto.cipher: input not full blocks') panic('crypto.cipher: input not full blocks')
@ -124,7 +124,7 @@ pub fn (x mut AesCbc) decrypt_blocks(dst mut []byte, src []byte) {
x.tmp = x.iv x.tmp = x.iv
} }
fn (x mut AesCbc) set_iv(iv []byte) { fn (x &AesCbc) set_iv(iv []byte) {
if iv.len != x.iv.len { if iv.len != x.iv.len {
panic('cipher: incorrect length IV') panic('cipher: incorrect length IV')
} }

View File

@ -20,7 +20,7 @@ fn test_crypto_aes() {
if ciphertext.len%aes.block_size != 0 { if ciphertext.len%aes.block_size != 0 {
panic('ciphertext is not a multiple of the block size') panic('ciphertext is not a multiple of the block size')
} }
mut mode := aes.new_cbc(block, iv) mode := aes.new_cbc(block, iv)
mode.encrypt_blocks(mut ciphertext, ciphertext) mode.encrypt_blocks(mut ciphertext, ciphertext)
assert ciphertext.hex() == 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1' assert ciphertext.hex() == 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1'

View File

@ -14,7 +14,7 @@ module md5
import encoding.binary import encoding.binary
const ( pub const (
// The size of an MD5 checksum in bytes. // The size of an MD5 checksum in bytes.
size = 16 size = 16
// The blocksize of MD5 in bytes. // The blocksize of MD5 in bytes.

View File

@ -14,7 +14,7 @@ module sha1
import encoding.binary import encoding.binary
const( pub const(
// The size of a SHA-1 checksum in bytes. // The size of a SHA-1 checksum in bytes.
size = 20 size = 20
// The blocksize of SHA-1 in bytes. // The blocksize of SHA-1 in bytes.

View File

@ -12,7 +12,7 @@ module sha256
import encoding.binary import encoding.binary
const ( pub const (
// The size of a SHA256 checksum in bytes. // The size of a SHA256 checksum in bytes.
size = 32 size = 32
// The size of a SHA224 checksum in bytes. // The size of a SHA224 checksum in bytes.
@ -194,7 +194,7 @@ pub fn sum224(data []byte) []byte {
mut d := new224() mut d := new224()
d.write(data) d.write(data)
sum := d.checksum() sum := d.checksum()
mut sum224 := [byte(0)].repeat(size224) sum224 := [byte(0)].repeat(size224)
copy(sum224, sum[..size224]) copy(sum224, sum[..size224])
return sum224 return sum224
} }

View File

@ -15,7 +15,7 @@ import (
encoding.binary encoding.binary
) )
const ( pub const (
// size is the size, in bytes, of a SHA-512 checksum. // size is the size, in bytes, of a SHA-512 checksum.
size = 64 size = 64
// size224 is the size, in bytes, of a SHA-512/224 checksum. // size224 is the size, in bytes, of a SHA-512/224 checksum.
@ -183,7 +183,7 @@ fn (d mut Digest) write(p_ []byte) int {
return nn return nn
} }
fn (d mut Digest) sum(b_in []byte) []byte { fn (d &Digest) sum(b_in []byte) []byte {
// Make a copy of d so that caller can keep writing and summing. // Make a copy of d so that caller can keep writing and summing.
mut d0 := *d mut d0 := *d
hash := d0.checksum() hash := d0.checksum()
@ -264,7 +264,7 @@ pub fn sum384(data []byte) []byte {
mut d := new_digest(.sha384) mut d := new_digest(.sha384)
d.write(data) d.write(data)
sum := d.checksum() sum := d.checksum()
mut sum384 := [byte(0)].repeat(size384) sum384 := [byte(0)].repeat(size384)
copy(sum384, sum[..size384]) copy(sum384, sum[..size384])
return sum384 return sum384
} }
@ -274,7 +274,7 @@ pub fn sum512_224(data []byte) []byte {
mut d := new_digest(.sha512_224) mut d := new_digest(.sha512_224)
d.write(data) d.write(data)
sum := d.checksum() sum := d.checksum()
mut sum224 := [byte(0)].repeat(size224) sum224 := [byte(0)].repeat(size224)
copy(sum224, sum[..size224]) copy(sum224, sum[..size224])
return sum224 return sum224
} }
@ -284,7 +284,7 @@ pub fn sum512_256(data []byte) []byte {
mut d := new_digest(.sha512_256) mut d := new_digest(.sha512_256)
d.write(data) d.write(data)
sum := d.checksum() sum := d.checksum()
mut sum256 := [byte(0)].repeat(size256) sum256 := [byte(0)].repeat(size256)
copy(sum256, sum[..size256]) copy(sum256, sum[..size256])
return sum256 return sum256
} }

View File

@ -140,8 +140,8 @@ pub fn new_context(cfg gg.Cfg) &FreeType {
if !cfg.use_ortho { if !cfg.use_ortho {
return &FreeType{} return &FreeType{}
} }
mut width := cfg.width * scale width := cfg.width * scale
mut height := cfg.height * scale height := cfg.height * scale
font_size := cfg.font_size * scale font_size := cfg.font_size * scale
// exit('fs=$font_size') // exit('fs=$font_size')
// if false { // if false {
@ -221,7 +221,7 @@ pub fn new_context(cfg gg.Cfg) &FreeType {
// # glVertexAttribPointer(0, 4, GL_FLOAT,false, 4 * sizeof(GLf32), 0); // # glVertexAttribPointer(0, 4, GL_FLOAT,false, 4 * sizeof(GLf32), 0);
// gl.bind_buffer(GL_ARRAY_BUFFER, uint(0)) // gl.bind_buffer(GL_ARRAY_BUFFER, uint(0))
// # glBindVertexArray(0); // # glBindVertexArray(0);
mut ctx := &FreeType { ctx := &FreeType {
shader: shader shader: shader
width: width width: width
height: height height: height

View File

@ -275,8 +275,8 @@ fn todo_remove_me(cfg Cfg, scale int) {
if !cfg.use_ortho { if !cfg.use_ortho {
return return
} }
mut width := cfg.width * scale width := cfg.width * scale
mut height := cfg.height * scale height := cfg.height * scale
font_size := cfg.font_size * scale font_size := cfg.font_size * scale
gl.enable(C.GL_BLEND) gl.enable(C.GL_BLEND)
//# glBlendFunc(C.GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); //# glBlendFunc(C.GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

View File

@ -92,8 +92,8 @@ pub fn new_shader(name string) Shader {
if name.starts_with('/') { if name.starts_with('/') {
dir = '' dir = ''
} }
vertex_path := '${dir}${name}.vert' //vertex_path := '${dir}${name}.vert'
fragment_path := '${dir}${name}.frag' //fragment_path := '${dir}${name}.frag'
//println('shader path=$vertex_path,\n fpath="$fragment_path"') //println('shader path=$vertex_path,\n fpath="$fragment_path"')
// vertex_src := os.read_file(vertex_path.trim_space()) // vertex_src := os.read_file(vertex_path.trim_space())
mut vertex_src := '' mut vertex_src := ''

View File

@ -58,6 +58,6 @@ pub fn new(poly int) &Crc32 {
// calculate crc32 using ieee // calculate crc32 using ieee
pub fn sum(b []byte) u32 { pub fn sum(b []byte) u32 {
mut c := new(ieee) c := new(ieee)
return c.sum32(b) return c.sum32(b)
} }

View File

@ -84,7 +84,7 @@ fn (req &Request) ssl_do(port int, method, host_name, path string) ?Response {
res = C.SSL_set_tlsext_host_name(ssl, host_name.str) res = C.SSL_set_tlsext_host_name(ssl, host_name.str)
res = C.BIO_do_connect(web) res = C.BIO_do_connect(web)
res = C.BIO_do_handshake(web) res = C.BIO_do_handshake(web)
cert := C.SSL_get_peer_certificate(ssl) C.SSL_get_peer_certificate(ssl)
res = C.SSL_get_verify_result(ssl) res = C.SSL_get_verify_result(ssl)
/////// ///////
s := req.build_request_headers(method, host_name, path) s := req.build_request_headers(method, host_name, path)

View File

@ -137,8 +137,8 @@ pub fn (req &Request) do() ?Response {
fn (req &Request) method_and_url_to_response(method string, url net_dot_urllib.URL) ?Response { fn (req &Request) method_and_url_to_response(method string, url net_dot_urllib.URL) ?Response {
host_name := url.hostname() host_name := url.hostname()
scheme := url.scheme scheme := url.scheme
mut p := url.path.trim_left('/') p := url.path.trim_left('/')
mut path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' } path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
mut nport := url.port().int() mut nport := url.port().int()
if nport == 0 { if nport == 0 {
if scheme == 'http' { nport = 80 } if scheme == 'http' { nport = 80 }

View File

@ -80,7 +80,7 @@ pub fn (l mut Log) set_output_path(output_file_path string) {
if l.ofile.is_opened() { l.ofile.close() } if l.ofile.is_opened() { l.ofile.close() }
l.output_to_file = true l.output_to_file = true
l.output_file_name = filepath.join( os.realpath( output_file_path ) , l.output_label ) l.output_file_name = filepath.join( os.realpath( output_file_path ) , l.output_label )
mut ofile := os.open_append( l.output_file_name ) or { ofile := os.open_append( l.output_file_name ) or {
panic('error while opening log file ${l.output_file_name} for appending') panic('error while opening log file ${l.output_file_name} for appending')
} }
l.ofile = ofile l.ofile = ofile
@ -96,7 +96,7 @@ fn (l mut Log) log_file(s string, level LogLevel) {
l.ofile.writeln('$timestamp [$e] $s') l.ofile.writeln('$timestamp [$e] $s')
} }
fn (l mut Log) log_cli(s string, level LogLevel) { fn (l &Log) log_cli(s string, level LogLevel) {
f := tag(level) f := tag(level)
t := time.now() t := time.now()
println('[$f ${t.format_ss()}] $s') println('[$f ${t.format_ss()}] $s')

View File

@ -134,10 +134,10 @@ pub fn listen(port int) ?Socket {
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or { s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or {
return error(err) return error(err)
} }
bind_res := s.bind(port) or { _ = s.bind(port) or {
return error(err) return error(err)
} }
listen_res := s.listen() or { _ = s.listen() or {
return error(err) return error(err)
} }
return s return s
@ -196,7 +196,7 @@ pub fn dial(address string, port int) ?Socket {
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or { s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or {
return error(err) return error(err)
} }
res := s.connect(address, port) or { _ = s.connect(address, port) or {
return error(err) return error(err)
} }
return s return s

View File

@ -260,7 +260,7 @@ fn escape(s string, mode EncodingMode) string {
return s return s
} }
mut buf := [byte(0)].repeat(64) buf := [byte(0)].repeat(64)
mut t := []byte mut t := []byte
required := s.len + 2*hex_count required := s.len + 2*hex_count
@ -677,7 +677,7 @@ fn (u mut URL) set_path(p string) ?bool {
// reading u.raw_path directly. // reading u.raw_path directly.
fn (u &URL) escaped_path() string { fn (u &URL) escaped_path() string {
if u.raw_path != '' && valid_encoded_path(u.raw_path) { if u.raw_path != '' && valid_encoded_path(u.raw_path) {
p := unescape(u.raw_path, .encode_path) unescape(u.raw_path, .encode_path) or { return '' }
return u.raw_path return u.raw_path
} }
if u.path == '*' { if u.path == '*' {

View File

@ -15,7 +15,7 @@ const (
pub fn compile_template(path string) string { pub fn compile_template(path string) string {
//lines := os.read_lines(path) //lines := os.read_lines(path)
mut html := os.read_file(path) or { html := os.read_file(path) or {
panic('html failed') panic('html failed')
} }
mut header := '' mut header := ''
@ -27,7 +27,7 @@ pub fn compile_template(path string) string {
} }
lines := html.split_into_lines() lines := html.split_into_lines()
mut s := strings.new_builder(1000) mut s := strings.new_builder(1000)
base := path.all_after('/').replace('.html', '') //base := path.all_after('/').replace('.html', '')
s.writeln(' s.writeln('
mut sb := strings.new_builder(${lines.len * 30}) mut sb := strings.new_builder(${lines.len * 30})
header := \'$header\' header := \'$header\'

View File

@ -44,6 +44,7 @@ mut:
} }
pub fn (ctx Context) html(html string) { pub fn (ctx Context) html(html string) {
//println('$html HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n$ctx.headers\r\n\r\n$html')
ctx.conn.write('HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n$ctx.headers\r\n\r\n$html') or { panic(err) } ctx.conn.write('HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n$ctx.headers\r\n\r\n$html') or { panic(err) }
} }
@ -64,10 +65,11 @@ pub fn (ctx Context) not_found(s string) {
} }
pub fn (ctx mut Context) set_cookie(key, val string) { // TODO support directives, escape cookie value (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie) pub fn (ctx mut Context) set_cookie(key, val string) { // TODO support directives, escape cookie value (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie)
//println('Set-Cookie $key=$val')
ctx.add_header('Set-Cookie', '$key=$val') ctx.add_header('Set-Cookie', '$key=$val')
} }
pub fn (ctx mut Context) get_cookie(key string) ?string { // TODO refactor pub fn (ctx &Context) get_cookie(key string) ?string { // TODO refactor
cookie_header := ctx.get_header('Cookie') cookie_header := ctx.get_header('Cookie')
cookie := if cookie_header.contains(';') { cookie := if cookie_header.contains(';') {
cookie_header.find_between('$key=', ';') cookie_header.find_between('$key=', ';')
@ -81,10 +83,13 @@ pub fn (ctx mut Context) get_cookie(key string) ?string { // TODO refactor
} }
fn (ctx mut Context) add_header(key, val string) { fn (ctx mut Context) add_header(key, val string) {
ctx.headers = ctx.headers + if ctx.headers == '' { '$key: val' } else { '\r\n$key: val' } //println('add_header($key, $val)')
ctx.headers = ctx.headers +
if ctx.headers == '' { '$key: $val' } else { '\r\n$key: $val' }
//println(ctx.headers)
} }
fn (ctx mut Context) get_header(key string) string { fn (ctx &Context) get_header(key string) string {
return ctx.headers.find_between('\r\n$key: ', '\r\n') return ctx.headers.find_between('\r\n$key: ', '\r\n')
} }