1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

check unused and unmodified vars in all modules, not just main

This commit is contained in:
Alexander Medvednikov 2019-12-06 15:24:53 +03:00
parent bdaa421e8a
commit c8d111924d
31 changed files with 133 additions and 119 deletions

View File

@ -1 +1,2 @@
println('Hello, World!')

View File

@ -81,11 +81,11 @@ pub fn (b mut Benchmark) neither_fail_nor_ok() {
b.step_end_time = benchmark.now()
}
pub fn (b mut Benchmark) step_message(msg string) string {
pub fn (b &Benchmark) step_message(msg string) string {
return b.tdiff_in_ms(msg, b.step_start_time, b.step_end_time)
}
pub fn (b mut Benchmark) total_message(msg string) string {
pub fn (b &Benchmark) total_message(msg string) string {
mut tmsg := '$msg \n ok, fail, total = ' +
term.ok_message('${b.nok:5d}') + ', ' +
if b.nfail > 0 { term.fail_message('${b.nfail:5d}') } else { '${b.nfail:5d}' } + ', ' +
@ -96,12 +96,12 @@ pub fn (b mut Benchmark) total_message(msg string) string {
return b.tdiff_in_ms(tmsg, b.bench_start_time, b.bench_end_time)
}
pub fn (b mut Benchmark) total_duration() i64 {
pub fn (b &Benchmark) total_duration() i64 {
return (b.bench_end_time - b.bench_start_time)
}
////////////////////////////////////////////////////////////////////
fn (b mut Benchmark) tdiff_in_ms(s string, sticks i64, eticks i64) string {
fn (b &Benchmark) tdiff_in_ms(s string, sticks i64, eticks i64) string {
if b.verbose {
tdiff := (eticks - sticks)
return '${tdiff:6lld} ms | $s'

View File

@ -43,7 +43,7 @@ mut:
fn new_cgen(out_name_c string) &CGen {
path := out_name_c
mut out := os.create(path) or {
out := os.create(path) or {
println('failed to create $path')
return &CGen{}
}
@ -192,7 +192,7 @@ fn (g mut CGen) register_thread_fn(wrapper_name, wrapper_text, struct_text strin
}
fn (v &V) prof_counters() string {
mut res := []string
res := []string
// Global fns
//for f in c.table.fns {
//res << 'double ${c.table.cgen_name(f)}_time;'
@ -212,7 +212,7 @@ fn (v &V) prof_counters() string {
}
fn (p &Parser) print_prof_counters() string {
mut res := []string
res := []string
// Global fns
//for f in p.table.fns {
//counter := '${p.table.cgen_name(f)}_time'
@ -410,10 +410,10 @@ fn (v &V) interface_table() string {
sb.writeln('// NR methods = $t.gen_types.len')
for i, gen_type in t.gen_types {
methods += '{'
for i, method in t.methods {
for j, method in t.methods {
// Cat_speak
methods += '${gen_type}_${method.name}'
if i < t.methods.len - 1 {
if j < t.methods.len - 1 {
methods += ', '
}
}

View File

@ -213,8 +213,8 @@ fn (s mut Scanner) goto_scanner_position(scp ScannerPos) {
s.last_nl_pos = scp.last_nl_pos
}
fn (s mut Scanner) get_last_nl_from_pos(_pos int) int {
mut pos := if _pos >= s.text.len { s.text.len-1 } else { _pos }
fn (s &Scanner) get_last_nl_from_pos(_pos int) int {
pos := if _pos >= s.text.len { s.text.len-1 } else { _pos }
for i := pos; i >= 0; i-- {
if s.text[i] == `\n` {
return i
@ -223,7 +223,7 @@ fn (s mut Scanner) get_last_nl_from_pos(_pos int) int {
return 0
}
fn (s mut Scanner) get_scanner_pos_of_token(tok &Token) ScannerPos {
fn (s &Scanner) get_scanner_pos_of_token(tok &Token) ScannerPos {
return ScannerPos{
pos: tok.pos
line_nr: tok.line_nr

View File

@ -265,7 +265,7 @@ fn (p mut Parser) comptime_method_call(typ Type) {
p.check(.dollar)
var := p.check_name()
mut j := 0
for i, method in typ.methods {
for method in typ.methods {
if method.typ != 'void' {
continue

View File

@ -392,7 +392,7 @@ fn (p mut Parser) expression() string {
//p.print_tok()
//}
ph := p.cgen.add_placeholder()
mut typ := p.indot_expr()
typ := p.indot_expr()
is_str := typ=='string'
is_ustr := typ=='ustring'
// `a << b` ==> `array_push(&a, b)`

View File

@ -72,7 +72,7 @@ fn (a []TypeInst) str() string {
return r.str()
}
fn (p mut Parser) find_var_or_const(name string) ?Var {
fn (p &Parser) find_var_or_const(name string) ?Var {
if p.known_var(name) {
return p.find_var(name)
}
@ -150,14 +150,14 @@ fn (p mut Parser) mark_arg_moved(v Var) {
p.table.fns[p.cur_fn.name] = p.cur_fn
}
fn (p mut Parser) known_var(name string) bool {
fn (p &Parser) known_var(name string) bool {
_ = p.find_var(name) or {
return false
}
return true
}
fn (p mut Parser) known_var_check_new_var(name string) bool {
fn (p &Parser) known_var_check_new_var(name string) bool {
_ = p.find_var_check_new_var(name) or {
return false
}
@ -561,15 +561,18 @@ fn (p mut Parser) fn_decl() {
// p.error('unclosed {')
}
// Make sure all vars in this function are used (only in main for now)
/*
if p.mod != 'main' {
p.genln('}')
return
}
*/
p.genln('}')
p.check_unused_variables()
p.set_current_fn( EmptyFn )
if !p.builtin_mod && p.mod != 'os' {
p.check_unused_and_mut_vars()
}
p.set_current_fn(EmptyFn)
p.returns = false
}
[inline]
@ -611,7 +614,7 @@ fn (p &Parser) get_linkage_prefix() string {
}
}
fn (p mut Parser) check_unused_variables() {
fn (p mut Parser) check_unused_and_mut_vars() {
for var in p.local_vars {
if var.name == '' {
break
@ -1365,7 +1368,7 @@ fn (p mut Parser) fn_call_vargs(f Fn) (string, []string) {
return '', []string
}
last_arg := f.args.last()
mut varg_def_type := last_arg.typ[3..]
//varg_def_type := last_arg.typ[3..]
mut types := []string
mut values := []string
for p.tok != .rpar {
@ -1450,7 +1453,7 @@ fn (p mut Parser) dispatch_generic_fn_instance(f mut Fn, ti &TypeInst) {
}
if !new_inst {
rename_generic_fn_instance(mut f, ti)
_f := p.table.find_fn(f.name) or {
_ = p.table.find_fn(f.name) or {
p.error('function instance `$f.name` not found')
return
}

View File

@ -12,7 +12,7 @@ fn (p mut Parser) get_type2() Type{
mut mul := false
mut nr_muls := 0
mut typ := ''
mut cat := TypeCategory.struct_
cat := TypeCategory.struct_
// multiple returns
if p.tok == .lpar {
//p.warn('`()` are no longer necessary in multiple returns' +

View File

@ -133,7 +133,7 @@ pub mut:
}
// Should be called by main at the end of the compilation process, to cleanup
pub fn (v mut V) finalize_compilation(){
pub fn (v &V) finalize_compilation(){
// TODO remove
if v.pref.autofree {
/*
@ -1184,7 +1184,7 @@ pub fn set_vroot_folder(vroot_path string) {
// Preparation for the compiler module:
// VEXE env variable is needed so that compiler.vexe_path()
// can return it later to whoever needs it:
mut vname := if os.user_os() == 'windows' { 'v.exe' } else { 'v' }
vname := if os.user_os() == 'windows' { 'v.exe' } else { 'v' }
os.setenv('VEXE', os.realpath( [vroot_path, vname].join(os.path_separator) ), true)
}

View File

@ -23,8 +23,8 @@ mut:
fns strings.Builder
types strings.Builder
tokens []Token
}
}
// `mod` == "vlib/os"
fn generate_vh(mod string) {
@ -45,10 +45,10 @@ fn generate_vh(mod string) {
out.writeln('module $mod_def\n')
// Consts
println(full_mod_path)
mut vfiles := os.walk_ext(full_mod_path, '.v')
vfiles := os.walk_ext(full_mod_path, '.v')
//mut vfiles := os.ls(full_mod_path) or {
//exit(1)
//}
//}
filtered := vfiles.filter(it.ends_with('.v') && !it.ends_with('test.v') &&
!it.ends_with('_windows.v') && !it.ends_with('_win.v') &&
!it.ends_with('_lin.v') &&
@ -74,20 +74,20 @@ fn generate_vh(mod string) {
for ; g.i < p.tokens.len; g.i++ {
if !p.tokens[g.i].tok.is_decl() {
continue
}
}
match g.tokens[g.i].tok {
.key_fn { g.generate_fn() }
.key_const { g.generate_const() }
.key_struct { g.generate_type() }
.key_type { g.generate_alias() }
}
}
}
}
}
}
result :=
g.types.str() +
g.consts.str() +
g.fns.str().replace('\n\n\n', '\n').replace('\n\n', '\n')
out.writeln(result.replace('[ ] ', '[]').replace('? ', '?'))
out.close()
}
@ -95,22 +95,22 @@ fn generate_vh(mod string) {
fn (g mut VhGen) generate_fn() {
if g.i >= g.tokens.len - 2 {
return
}
}
mut next := g.tokens[g.i+1]
if g.i > 0 && g.tokens[g.i-1].tok != .key_pub {
// Skip private fns
//return ''
}
if next.tok == .name && next.lit == 'C' {
//println('skipping C')
return
}
}
//out.write('pub ')
mut tok := g.tokens[g.i]
for g.i < g.tokens.len - 1 && tok.tok != .lcbr {
next = g.tokens[g.i+1]
g.fns.write(tok.str())
if tok.tok != .lpar && !(next.tok in [.comma, .rpar]) {
// No space after (), [], etc
@ -118,10 +118,10 @@ fn (g mut VhGen) generate_fn() {
}
g.i++
tok = g.tokens[g.i]
}
}
g.fns.writeln('')
//g.i--
}
}
fn (g mut VhGen) generate_alias() {
mut tok := g.tokens[g.i]
@ -130,7 +130,7 @@ fn (g mut VhGen) generate_alias() {
g.types.write(' ')
if tok.line_nr != g.tokens[g.i+1].line_nr {
break
}
}
g.i++
tok = g.tokens[g.i]
}
@ -145,7 +145,7 @@ fn (g mut VhGen) generate_const() {
g.consts.write(' ')
if g.tokens[g.i+2].tok == .assign {
g.consts.write('\n\t')
}
}
g.i++
tok = g.tokens[g.i]
}
@ -161,7 +161,7 @@ fn (g mut VhGen) generate_type() {
g.types.write(' ')
if g.tokens[g.i+1].line_nr != g.tokens[g.i].line_nr {
g.types.write('\n\t')
}
}
g.i++
tok = g.tokens[g.i]
}

View File

@ -282,7 +282,7 @@ fn (p &Parser) log(s string) {
*/
}
pub fn (p mut Parser) save_state() ParserState {
pub fn (p &Parser) save_state() ParserState {
return ParserState{
scanner_line_nr: p.scanner.line_nr
scanner_text : p.scanner.text
@ -1577,7 +1577,7 @@ fn (p mut Parser) var_decl() {
p.gen('$var_name = ${p.var_decl_name}.var_$i')
continue
}
// decleration
// declaration
p.gen('$var_type $var_name = ${p.var_decl_name}.var_$i')
}
p.register_var(Var {
@ -1722,6 +1722,12 @@ fn (p mut Parser) var_expr(v Var) string {
// println('var expr is_tmp=$p.cgen.is_tmp\n')
if !v.is_const {
p.mark_var_used(v)
// `C.foo(&var)` means that `var` is changed. Mark it as changed
// to avoid `var was declared as mutable but was never changed` errors.
if p.calling_c && !v.is_changed {
//println('marking C var changed: $v.name')
p.mark_var_changed(v)
}
}
fn_ph := p.cgen.add_placeholder()
p.expr_var = v
@ -1806,10 +1812,10 @@ fn (p mut Parser) dot(str_typ_ string, method_ph int) string {
//if p.fileis('orm_test') {
//println('ORM dot $str_typ')
//}
mut str_typ := str_typ_
str_typ := str_typ_
p.check(.dot)
is_variadic_arg := str_typ.starts_with('varg_')
mut typ := p.find_type(str_typ)
typ := p.find_type(str_typ)
if typ.name.len == 0 {
p.error('dot(): cannot find type `$str_typ`')
}
@ -2513,7 +2519,6 @@ fn (p mut Parser) array_init() string {
mut typ := ''
new_arr_ph := p.cgen.add_placeholder()
mut i := 0
pos := p.cgen.cur_line.len// remember cur line to fetch first number in cgen for [0; 10]
for p.tok != .rsbr {
val_typ := p.bool_expression()
// Get the type of the first expression
@ -3095,7 +3100,7 @@ fn (p mut Parser) check_unused_imports() {
p.production_error_with_token_index( 'the following imports were never used: $output', 0 )
}
fn (p mut Parser) is_expr_fn_call(start_tok_idx int) (bool, string) {
fn (p &Parser) is_expr_fn_call(start_tok_idx int) (bool, string) {
mut expr := p.tokens[start_tok_idx-1].str()
mut is_fn_call := p.tokens[start_tok_idx].tok == .lpar
if !is_fn_call {

View File

@ -13,7 +13,7 @@ fn (scanner mut Scanner) fgen(s_ string) {
if scanner.fmt_line_empty {
s = strings.repeat(`\t`, scanner.fmt_indent) + s
}
//scanner.fmt_out << s
scanner.fmt_out.write(s)
scanner.fmt_line_empty = false
@ -42,7 +42,7 @@ fn (scanner mut Scanner) fgen_nl() {
fn (p mut Parser) fgen(s string) {
if p.pass != .main {
return
}
}
p.scanner.fgen(s)
}
@ -50,7 +50,7 @@ fn (p mut Parser) fgen(s string) {
fn (p mut Parser) fspace() {
if p.first_pass() {
return
}
}
p.fgen(' ')
}
@ -59,7 +59,7 @@ fn (p mut Parser) fspace() {
fn (p mut Parser) fgenln(s string) {
if p.pass != .main {
return
}
}
p.scanner.fgenln(s)
}
@ -67,11 +67,11 @@ fn (p mut Parser) fgenln(s string) {
fn (p mut Parser) fgen_nl() {
if p.pass != .main {
return
}
}
println(p.tok)
if p.prev_tok == .line_comment {
return
}
}
p.scanner.fgen_nl()
}
@ -92,7 +92,7 @@ fn (p mut Parser) peek() TokenKind {
fn (p mut Parser) fmt_inc() {
if p.pass != .main {
return
}
}
p.scanner.fmt_indent++
}
@ -100,7 +100,7 @@ fn (p mut Parser) fmt_inc() {
fn (p mut Parser) fmt_dec() {
if p.pass != .main {
return
}
}
p.scanner.fmt_indent--
}
@ -121,31 +121,31 @@ fn (p mut Parser) fnext() {
if p.tok == .rcbr && !p.inside_if_expr && p.prev_tok != .lcbr {
p.fmt_dec()
}
mut s := p.strtok()
s := p.strtok()
if p.tok != .eof {
p.fgen(s)
}
// vfmt: increase indentation on `{` unless it's `{}`
mut inc_indent := false
inc_indent := false
if p.tok == .lcbr && !p.inside_if_expr && p.peek() != .rcbr {
p.fgen_nl()
p.fmt_inc()
}
// Skip comments and add them to vfmt output
if p.tokens[p.token_idx].tok in [.line_comment, .mline_comment] {
// Newline before the comment and after consts and closing }
if p.inside_const {
p.fgen_nl()
p.fgen_nl()
}
is_rcbr := p.tok == .rcbr
}
//is_rcbr := p.tok == .rcbr
for p.token_idx < p.tokens.len - 1 {
i := p.token_idx
tok := p.tokens[p.token_idx].tok
if tok != .line_comment && tok != .mline_comment {
break
}
}
comment_token := p.tokens[p.token_idx]
next := p.tokens[p.token_idx+1]
comment_on_new_line := p.token_idx == 0 ||
@ -155,11 +155,11 @@ fn (p mut Parser) fnext() {
if i > 0 && p.tokens[i-1].tok != .line_comment &&
comment_token.line_nr > p.tokens[i-1].line_nr {
p.fgen_nl()
}
}
if tok == .line_comment {
if !comment_on_new_line { //prev_token.line_nr < comment_token.line_nr {
p.fgen(' ')
}
}
p.fgen('// ' + comment)
/*
if false && i > 0 {
@ -168,19 +168,19 @@ fn (p mut Parser) fnext() {
'line_nr=$comment_token.line_nr next=${next.str()} next_line_nr=$next.line_nr')
}
*/
} else {
p.fgen(comment)
}
}
if next.tok == .line_comment && comment_token.line_nr < next.line_nr {
p.fgen_nl()
}
p.token_idx++
}
}
if inc_indent {
p.fgen_nl()
}
}
}
}
@ -192,13 +192,13 @@ fn (p mut Parser) gen_fmt() {
}
if p.file_name == '' {
return
}
}
//s := p.scanner.fmt_out.str().replace('\n\n\n', '\n').trim_space()
s := p.scanner.fmt_out.str().trim_space()
//s := p.scanner.fmt_out.join('').trim_space()
if s == '' {
return
}
}
println('generating ${p.file_name}.v')
mut out := os.create('/var/tmp/fmt/' + p.file_name) or {
verror('failed to create fmt.v')

View File

@ -162,7 +162,7 @@ fn (g mut Gen) mov64(reg Register, val i64) {
fn (g mut Gen) call(addr int) {
//rel := g.abs_to_rel_addr(addr)
rel := 0xffffffff - int(abs(addr - g.buf.len))-1
//rel := 0xffffffff - int(abs(addr - g.buf.len))-1
println('call addr=$addr rel_addr=$addr pos=$g.buf.len')
g.write8(0xe8)

View File

@ -11,7 +11,7 @@ import (
crypto.internal.subtle
)
const (
pub const (
// The AES block size in bytes.
block_size = 16
)

View File

@ -48,7 +48,7 @@ pub fn new_cbc(b AesCipher, iv []byte) AesCbc {
pub fn (x &AesCbc) block_size() int { return x.block_size }
pub fn (x mut AesCbc) encrypt_blocks(dst mut []byte, src_ []byte) {
pub fn (x &AesCbc) encrypt_blocks(dst mut []byte, src_ []byte) {
mut src := src_
if src.len%x.block_size != 0 {
panic('crypto.cipher: input not full blocks')
@ -124,7 +124,7 @@ pub fn (x mut AesCbc) decrypt_blocks(dst mut []byte, src []byte) {
x.tmp = x.iv
}
fn (x mut AesCbc) set_iv(iv []byte) {
fn (x &AesCbc) set_iv(iv []byte) {
if iv.len != x.iv.len {
panic('cipher: incorrect length IV')
}

View File

@ -20,7 +20,7 @@ fn test_crypto_aes() {
if ciphertext.len%aes.block_size != 0 {
panic('ciphertext is not a multiple of the block size')
}
mut mode := aes.new_cbc(block, iv)
mode := aes.new_cbc(block, iv)
mode.encrypt_blocks(mut ciphertext, ciphertext)
assert ciphertext.hex() == 'c210459b514668ddc44674885e4979215265a6c44431a248421254ef357a8c2a308a8bddf5623af9df91737562041cf1'

View File

@ -14,7 +14,7 @@ module md5
import encoding.binary
const (
pub const (
// The size of an MD5 checksum in bytes.
size = 16
// The blocksize of MD5 in bytes.

View File

@ -14,7 +14,7 @@ module sha1
import encoding.binary
const(
pub const(
// The size of a SHA-1 checksum in bytes.
size = 20
// The blocksize of SHA-1 in bytes.

View File

@ -12,7 +12,7 @@ module sha256
import encoding.binary
const (
pub const (
// The size of a SHA256 checksum in bytes.
size = 32
// The size of a SHA224 checksum in bytes.
@ -194,7 +194,7 @@ pub fn sum224(data []byte) []byte {
mut d := new224()
d.write(data)
sum := d.checksum()
mut sum224 := [byte(0)].repeat(size224)
sum224 := [byte(0)].repeat(size224)
copy(sum224, sum[..size224])
return sum224
}

View File

@ -15,7 +15,7 @@ import (
encoding.binary
)
const (
pub const (
// size is the size, in bytes, of a SHA-512 checksum.
size = 64
// size224 is the size, in bytes, of a SHA-512/224 checksum.
@ -183,7 +183,7 @@ fn (d mut Digest) write(p_ []byte) int {
return nn
}
fn (d mut Digest) sum(b_in []byte) []byte {
fn (d &Digest) sum(b_in []byte) []byte {
// Make a copy of d so that caller can keep writing and summing.
mut d0 := *d
hash := d0.checksum()
@ -237,7 +237,7 @@ fn (d mut Digest) checksum() []byte {
}
mut digest := [byte(0)].repeat(size)
binary.big_endian_put_u64(mut digest, d.h[0])
binary.big_endian_put_u64(mut digest[8..], d.h[1])
binary.big_endian_put_u64(mut digest[16..], d.h[2])
@ -264,7 +264,7 @@ pub fn sum384(data []byte) []byte {
mut d := new_digest(.sha384)
d.write(data)
sum := d.checksum()
mut sum384 := [byte(0)].repeat(size384)
sum384 := [byte(0)].repeat(size384)
copy(sum384, sum[..size384])
return sum384
}
@ -274,7 +274,7 @@ pub fn sum512_224(data []byte) []byte {
mut d := new_digest(.sha512_224)
d.write(data)
sum := d.checksum()
mut sum224 := [byte(0)].repeat(size224)
sum224 := [byte(0)].repeat(size224)
copy(sum224, sum[..size224])
return sum224
}
@ -284,7 +284,7 @@ pub fn sum512_256(data []byte) []byte {
mut d := new_digest(.sha512_256)
d.write(data)
sum := d.checksum()
mut sum256 := [byte(0)].repeat(size256)
sum256 := [byte(0)].repeat(size256)
copy(sum256, sum[..size256])
return sum256
}

View File

@ -84,7 +84,7 @@ struct C.Bitmap {
struct C.Advance {
x int
}
struct C.Glyph {
bitmap Bitmap
bitmap_left int
@ -140,8 +140,8 @@ pub fn new_context(cfg gg.Cfg) &FreeType {
if !cfg.use_ortho {
return &FreeType{}
}
mut width := cfg.width * scale
mut height := cfg.height * scale
width := cfg.width * scale
height := cfg.height * scale
font_size := cfg.font_size * scale
// exit('fs=$font_size')
// if false {
@ -221,7 +221,7 @@ pub fn new_context(cfg gg.Cfg) &FreeType {
// # glVertexAttribPointer(0, 4, GL_FLOAT,false, 4 * sizeof(GLf32), 0);
// gl.bind_buffer(GL_ARRAY_BUFFER, uint(0))
// # glBindVertexArray(0);
mut ctx := &FreeType {
ctx := &FreeType {
shader: shader
width: width
height: height

View File

@ -275,8 +275,8 @@ fn todo_remove_me(cfg Cfg, scale int) {
if !cfg.use_ortho {
return
}
mut width := cfg.width * scale
mut height := cfg.height * scale
width := cfg.width * scale
height := cfg.height * scale
font_size := cfg.font_size * scale
gl.enable(C.GL_BLEND)
//# glBlendFunc(C.GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);

View File

@ -92,8 +92,8 @@ pub fn new_shader(name string) Shader {
if name.starts_with('/') {
dir = ''
}
vertex_path := '${dir}${name}.vert'
fragment_path := '${dir}${name}.frag'
//vertex_path := '${dir}${name}.vert'
//fragment_path := '${dir}${name}.frag'
//println('shader path=$vertex_path,\n fpath="$fragment_path"')
// vertex_src := os.read_file(vertex_path.trim_space())
mut vertex_src := ''

View File

@ -36,7 +36,7 @@ fn(c mut Crc32) generate_table(poly int) {
c.table << crc
}
}
fn(c &Crc32) sum32(b []byte) u32 {
mut crc := ~u32(0)
for i := 0; i < b.len; i++ {
@ -58,6 +58,6 @@ pub fn new(poly int) &Crc32 {
// calculate crc32 using ieee
pub fn sum(b []byte) u32 {
mut c := new(ieee)
c := new(ieee)
return c.sum32(b)
}

View File

@ -84,7 +84,7 @@ fn (req &Request) ssl_do(port int, method, host_name, path string) ?Response {
res = C.SSL_set_tlsext_host_name(ssl, host_name.str)
res = C.BIO_do_connect(web)
res = C.BIO_do_handshake(web)
cert := C.SSL_get_peer_certificate(ssl)
C.SSL_get_peer_certificate(ssl)
res = C.SSL_get_verify_result(ssl)
///////
s := req.build_request_headers(method, host_name, path)

View File

@ -137,8 +137,8 @@ pub fn (req &Request) do() ?Response {
fn (req &Request) method_and_url_to_response(method string, url net_dot_urllib.URL) ?Response {
host_name := url.hostname()
scheme := url.scheme
mut p := url.path.trim_left('/')
mut path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
p := url.path.trim_left('/')
path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
mut nport := url.port().int()
if nport == 0 {
if scheme == 'http' { nport = 80 }

View File

@ -44,7 +44,7 @@ pub struct Log {
mut:
level LogLevel
output_label string
ofile os.File
output_to_file bool
pub:
@ -70,7 +70,7 @@ pub fn (l mut Log) set_full_logpath(full_log_path string) {
rlog_file := os.realpath( full_log_path )
l.set_output_label( os.filename( rlog_file ) )
l.set_output_path( os.basedir( rlog_file ) )
}
}
pub fn (l mut Log) set_output_label(label string){
l.output_label = label
@ -80,10 +80,10 @@ pub fn (l mut Log) set_output_path(output_file_path string) {
if l.ofile.is_opened() { l.ofile.close() }
l.output_to_file = true
l.output_file_name = filepath.join( os.realpath( output_file_path ) , l.output_label )
mut ofile := os.open_append( l.output_file_name ) or {
ofile := os.open_append( l.output_file_name ) or {
panic('error while opening log file ${l.output_file_name} for appending')
}
l.ofile = ofile
l.ofile = ofile
}
pub fn (l mut Log) close(){
@ -96,7 +96,7 @@ fn (l mut Log) log_file(s string, level LogLevel) {
l.ofile.writeln('$timestamp [$e] $s')
}
fn (l mut Log) log_cli(s string, level LogLevel) {
fn (l &Log) log_cli(s string, level LogLevel) {
f := tag(level)
t := time.now()
println('[$f ${t.format_ss()}] $s')

View File

@ -134,10 +134,10 @@ pub fn listen(port int) ?Socket {
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or {
return error(err)
}
bind_res := s.bind(port) or {
_ = s.bind(port) or {
return error(err)
}
listen_res := s.listen() or {
_ = s.listen() or {
return error(err)
}
return s
@ -196,7 +196,7 @@ pub fn dial(address string, port int) ?Socket {
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) or {
return error(err)
}
res := s.connect(address, port) or {
_ = s.connect(address, port) or {
return error(err)
}
return s

View File

@ -260,7 +260,7 @@ fn escape(s string, mode EncodingMode) string {
return s
}
mut buf := [byte(0)].repeat(64)
buf := [byte(0)].repeat(64)
mut t := []byte
required := s.len + 2*hex_count
@ -677,7 +677,7 @@ fn (u mut URL) set_path(p string) ?bool {
// reading u.raw_path directly.
fn (u &URL) escaped_path() string {
if u.raw_path != '' && valid_encoded_path(u.raw_path) {
p := unescape(u.raw_path, .encode_path)
unescape(u.raw_path, .encode_path) or { return '' }
return u.raw_path
}
if u.path == '*' {

View File

@ -15,7 +15,7 @@ const (
pub fn compile_template(path string) string {
//lines := os.read_lines(path)
mut html := os.read_file(path) or {
html := os.read_file(path) or {
panic('html failed')
}
mut header := ''
@ -27,7 +27,7 @@ pub fn compile_template(path string) string {
}
lines := html.split_into_lines()
mut s := strings.new_builder(1000)
base := path.all_after('/').replace('.html', '')
//base := path.all_after('/').replace('.html', '')
s.writeln('
mut sb := strings.new_builder(${lines.len * 30})
header := \'$header\'

View File

@ -44,6 +44,7 @@ mut:
}
pub fn (ctx Context) html(html string) {
//println('$html HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n$ctx.headers\r\n\r\n$html')
ctx.conn.write('HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n$ctx.headers\r\n\r\n$html') or { panic(err) }
}
@ -64,10 +65,11 @@ pub fn (ctx Context) not_found(s string) {
}
pub fn (ctx mut Context) set_cookie(key, val string) { // TODO support directives, escape cookie value (https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie)
//println('Set-Cookie $key=$val')
ctx.add_header('Set-Cookie', '$key=$val')
}
pub fn (ctx mut Context) get_cookie(key string) ?string { // TODO refactor
pub fn (ctx &Context) get_cookie(key string) ?string { // TODO refactor
cookie_header := ctx.get_header('Cookie')
cookie := if cookie_header.contains(';') {
cookie_header.find_between('$key=', ';')
@ -81,10 +83,13 @@ pub fn (ctx mut Context) get_cookie(key string) ?string { // TODO refactor
}
fn (ctx mut Context) add_header(key, val string) {
ctx.headers = ctx.headers + if ctx.headers == '' { '$key: val' } else { '\r\n$key: val' }
//println('add_header($key, $val)')
ctx.headers = ctx.headers +
if ctx.headers == '' { '$key: $val' } else { '\r\n$key: $val' }
//println(ctx.headers)
}
fn (ctx mut Context) get_header(key string) string {
fn (ctx &Context) get_header(key string) string {
return ctx.headers.find_between('\r\n$key: ', '\r\n')
}
@ -147,11 +152,11 @@ pub fn run<T>(app mut T, port int) {
line := conn.read_line()
if line == '' || line == '\r\n' {
break
}
}
//if line.contains('POST') || line == '' {
//break
//}
}
//}
}
line := conn.read_line()
app.vweb.parse_form(line)
}