1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00

ci: fix warnings/errors due to the vfmt change

This commit is contained in:
Delyan Angelov
2020-10-15 16:17:52 +03:00
parent 50a2b033b7
commit 31ef921ef2
33 changed files with 466 additions and 570 deletions

View File

@@ -2,22 +2,19 @@ module flag
// data object storing information about a defined flag
pub struct Flag {
pub:
pub:
name string // name as it appears on command line
abbr byte // shortcut
abbr byte // shortcut
usage string // help message
val_desc string // something like '<arg>' that appears in usage,
// and also the default value, when the flag is not given
}
pub fn (f Flag) str() string {
return ''
+' flag:\n'
+' name: $f.name\n'
+' abbr: $f.abbr\n'
+' usag: $f.usage\n'
+' desc: $f.val_desc'
return '' + ' flag:\n' + ' name: $f.name\n' + ' abbr: $f.abbr\n' +
' usag: $f.usage\n' + ' desc: $f.val_desc'
}
pub fn (af []Flag) str() string {
mut res := []string{}
res << '\n []Flag = ['
@@ -27,32 +24,34 @@ pub fn (af []Flag) str() string {
res << ' ]'
return res.join('\n')
}
//
pub struct FlagParser {
pub mut:
args []string // the arguments to be parsed
max_free_args int
flags []Flag // registered flags
pub mut:
args []string // the arguments to be parsed
max_free_args int
flags []Flag // registered flags
application_name string
application_version string
application_description string
min_free_args int
min_free_args int
args_description string
}
pub const (
// used for formating usage message
space = ' '
underline = '-----------------------------------------------'
space = ' '
underline = '-----------------------------------------------'
max_args_number = 4048
)
// create a new flag set for parsing command line arguments
// TODO use INT_MAX some how
pub fn new_flag_parser(args []string) &FlagParser {
return &FlagParser{args: args.clone(), max_free_args: max_args_number}
return &FlagParser{
args: args.clone()
max_free_args: max_args_number
}
}
// change the application name to be used in 'usage' output
@@ -78,22 +77,22 @@ pub fn (mut fs FlagParser) skip_executable() {
// private helper to register a flag
fn (mut fs FlagParser) add_flag(name string, abbr byte, usage string, desc string) {
fs.flags << Flag{
name: name,
abbr: abbr,
usage: usage,
name: name
abbr: abbr
usage: usage
val_desc: desc
}
}
// private: general parsing a single argument
// - search args for existence
// if true
// extract the defined value as string
// else
// return an (dummy) error -> argument is not defined
// - search args for existence
// if true
// extract the defined value as string
// else
// return an (dummy) error -> argument is not defined
//
// - the name, usage are registered
// - found arguments and corresponding values are removed from args list
// - the name, usage are registered
// - found arguments and corresponding values are removed from args list
fn (mut fs FlagParser) parse_value(longhand string, shorthand byte) []string {
full := '--$longhand'
mut found_entries := []string{}
@@ -105,36 +104,36 @@ fn (mut fs FlagParser) parse_value(longhand string, shorthand byte) []string {
continue
}
if arg == '--' {
//End of input. We're done here.
// End of input. We're done here.
break
}
if arg[0] != `-` {
continue
}
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand ) || arg == full {
if i+1 >= fs.args.len {
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand) || arg == full {
if i + 1 >= fs.args.len {
return []
}
nextarg := fs.args[i+1]
nextarg := fs.args[i + 1]
if nextarg.len > 2 && nextarg[..2] == '--' {
//It could be end of input (--) or another argument (--abc).
//Both are invalid so die.
// It could be end of input (--) or another argument (--abc).
// Both are invalid so die.
return []
}
found_entries << fs.args[i+1]
found_entries << fs.args[i + 1]
to_delete << i
to_delete << i+1
to_delete << i + 1
should_skip_one = true
continue
}
if arg.len > full.len+1 && arg[..full.len+1] == '$full=' {
found_entries << arg[full.len+1..]
if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
found_entries << arg[full.len + 1..]
to_delete << i
continue
}
}
for i, del in to_delete {
//i entrys are deleted so it's shifted left i times.
// i entrys are deleted so it's shifted left i times.
fs.args.delete(del - i)
}
return found_entries
@@ -150,7 +149,7 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
full := '--$longhand'
for i, arg in fs.args {
if arg == '--' {
//End of input. We're done.
// End of input. We're done.
break
}
if arg.len == 0 {
@@ -159,10 +158,10 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
if arg[0] != `-` {
continue
}
if ( arg.len == 2 && arg[0] == `-` && arg[1] == shorthand ) || arg == full {
if fs.args.len > i+1 && (fs.args[i+1] in ['true', 'false']) {
val := fs.args[i+1]
fs.args.delete(i+1)
if (arg.len == 2 && arg[0] == `-` && arg[1] == shorthand) || arg == full {
if fs.args.len > i + 1 && (fs.args[i + 1] in ['true', 'false']) {
val := fs.args[i + 1]
fs.args.delete(i + 1)
fs.args.delete(i)
return val
} else {
@@ -170,9 +169,9 @@ fn (mut fs FlagParser) parse_bool_value(longhand string, shorthand byte) ?string
return 'true'
}
}
if arg.len > full.len+1 && arg[..full.len+1] == '$full=' {
if arg.len > full.len + 1 && arg[..full.len + 1] == '$full=' {
// Flag abc=true
val := arg[full.len+1..]
val := arg[full.len + 1..]
fs.args.delete(i)
return val
}
@@ -195,12 +194,12 @@ pub fn (mut fs FlagParser) bool_opt(name string, abbr byte, usage string) ?bool
}
// defining and parsing a bool flag
// if defined
// the value is returned (true/false)
// else
// the default value is returned
// if defined
// the value is returned (true/false)
// else
// the default value is returned
// version with abbr
//TODO error handling for invalid string to bool conversion
// TODO error handling for invalid string to bool conversion
pub fn (mut fs FlagParser) bool(name string, abbr byte, bdefault bool, usage string) bool {
value := fs.bool_opt(name, abbr, usage) or {
return bdefault
@@ -232,12 +231,12 @@ pub fn (mut fs FlagParser) int_opt(name string, abbr byte, usage string) ?int {
}
// defining and parsing an int flag
// if defined
// the value is returned (int)
// else
// the default value is returned
// if defined
// the value is returned (int)
// else
// the default value is returned
// version with abbr
//TODO error handling for invalid string to int conversion
// TODO error handling for invalid string to int conversion
pub fn (mut fs FlagParser) int(name string, abbr byte, idefault int, usage string) int {
value := fs.int_opt(name, abbr, usage) or {
return idefault
@@ -269,12 +268,12 @@ pub fn (mut fs FlagParser) float_opt(name string, abbr byte, usage string) ?f64
}
// defining and parsing a float flag
// if defined
// the value is returned (float)
// else
// the default value is returned
// if defined
// the value is returned (float)
// else
// the default value is returned
// version with abbr
//TODO error handling for invalid string to float conversion
// TODO error handling for invalid string to float conversion
pub fn (mut fs FlagParser) float(name string, abbr byte, fdefault f64, usage string) f64 {
value := fs.float_opt(name, abbr, usage) or {
return fdefault
@@ -301,10 +300,10 @@ pub fn (mut fs FlagParser) string_opt(name string, abbr byte, usage string) ?str
}
// defining and parsing a string flag
// if defined
// the value is returned (string)
// else
// the default value is returned
// if defined
// the value is returned (string)
// else
// the default value is returned
// version with abbr
pub fn (mut fs FlagParser) string(name string, abbr byte, sdefault string, usage string) string {
value := fs.string_opt(name, abbr, usage) or {
@@ -336,7 +335,7 @@ pub fn (mut fs FlagParser) limit_free_args_to_exactly(n int) {
// this will cause an error in finalize() if free args are out of range
// (min, ..., max)
pub fn (mut fs FlagParser) limit_free_args(min, max int) {
pub fn (mut fs FlagParser) limit_free_args(min int, max int) {
if min > max {
panic('flag.limit_free_args expect min < max, got $min >= $max')
}
@@ -344,42 +343,44 @@ pub fn (mut fs FlagParser) limit_free_args(min, max int) {
fs.max_free_args = max
}
pub fn (mut fs FlagParser) arguments_description(description string){
pub fn (mut fs FlagParser) arguments_description(description string) {
fs.args_description = description
}
// collect all given information and
pub fn (fs FlagParser) usage() string {
positive_min_arg := ( fs.min_free_args > 0 )
positive_max_arg := ( fs.max_free_args > 0 && fs.max_free_args != max_args_number )
no_arguments := ( fs.min_free_args == 0 && fs.max_free_args == 0 )
positive_min_arg := (fs.min_free_args > 0)
positive_max_arg := (fs.max_free_args > 0 && fs.max_free_args != max_args_number)
no_arguments := (fs.min_free_args == 0 && fs.max_free_args == 0)
mut adesc := if fs.args_description.len > 0 { fs.args_description } else { '[ARGS]' }
if no_arguments { adesc = '' }
if no_arguments {
adesc = ''
}
mut use := ''
if fs.application_version != '' {
use += '$fs.application_name $fs.application_version\n'
use += '$underline\n'
}
use += 'Usage: ${fs.application_name} [options] $adesc\n'
use += 'Usage: $fs.application_name [options] $adesc\n'
use += '\n'
if fs.application_description != '' {
use += 'Description:\n'
use += '$fs.application_description'
use += '\n\n'
}
// show a message about the [ARGS]:
if positive_min_arg || positive_max_arg || no_arguments {
if no_arguments {
use += 'This application does not expect any arguments\n\n'
goto end_of_arguments_handling
}
mut s:= []string{}
if positive_min_arg { s << 'at least $fs.min_free_args' }
if positive_max_arg { s << 'at most $fs.max_free_args' }
mut s := []string{}
if positive_min_arg {
s << 'at least $fs.min_free_args'
}
if positive_max_arg {
s << 'at most $fs.max_free_args'
}
if positive_min_arg && positive_max_arg && fs.min_free_args == fs.max_free_args {
s = ['exactly $fs.min_free_args']
}
@@ -387,32 +388,30 @@ pub fn (fs FlagParser) usage() string {
use += 'The arguments should be $sargs in number.\n\n'
}
end_of_arguments_handling:
if fs.flags.len > 0 {
use += 'Options:\n'
for f in fs.flags {
mut onames := []string{}
if f.abbr != 0 {
onames << '-${f.abbr.str()}'
onames << '-$f.abbr.str()'
}
if f.name != '' {
if !f.val_desc.contains('<bool>') {
onames << '--${f.name} $f.val_desc'
}else{
onames << '--${f.name}'
onames << '--$f.name $f.val_desc'
} else {
onames << '--$f.name'
}
}
option_names := ' ' + onames.join(', ')
mut xspace := ''
if option_names.len > space.len-2 {
xspace = '\n${space}'
if option_names.len > space.len - 2 {
xspace = '\n$space'
} else {
xspace = space[option_names.len..]
}
use += '${option_names}${xspace}${f.usage}\n'
use += '$option_names$xspace$f.usage\n'
}
}
return use
}
@@ -426,14 +425,14 @@ pub fn (fs FlagParser) usage() string {
pub fn (fs FlagParser) finalize() ?[]string {
for a in fs.args {
if a.len >= 2 && a[..2] == '--' {
return error('Unknown argument \'${a[2..]}\'')
return error("Unknown argument \'${a[2..]}\'")
}
}
if fs.args.len < fs.min_free_args && fs.min_free_args > 0 {
return error('Expected at least ${fs.min_free_args} arguments, but given $fs.args.len')
return error('Expected at least $fs.min_free_args arguments, but given $fs.args.len')
}
if fs.args.len > fs.max_free_args && fs.max_free_args > 0 {
return error('Expected at most ${fs.max_free_args} arguments, but given $fs.args.len')
return error('Expected at most $fs.max_free_args arguments, but given $fs.args.len')
}
if fs.args.len > 0 && fs.max_free_args == 0 && fs.min_free_args == 0 {
return error('Expected no arguments, but given $fs.args.len')

View File

@@ -355,7 +355,7 @@ fn test_single_dash() {
fn test_optional_flags() {
mut fp := flag.new_flag_parser(['-a', '10', '-b'])
a := fp.int_opt('some-flag', `a`, '') or {
fp.int_opt('some-flag', `a`, '') or {
assert false
return
}

View File

@@ -10,7 +10,7 @@ const (
is_used = openssl.is_used
)
fn (req &Request) ssl_do(port int, method Method, host_name, path string) ?Response {
fn (req &Request) ssl_do(port int, method Method, host_name string, path string) ?Response {
// ssl_method := C.SSLv23_method()
ssl_method := C.TLSv1_2_method()
ctx := C.SSL_CTX_new(ssl_method)

View File

@@ -5,7 +5,7 @@ module http
import os
pub fn download_file(url, out string) bool {
pub fn download_file(url string, out string) bool {
$if debug_http? {
println('download file url=$url out=$out')
}

View File

@@ -13,7 +13,7 @@ mut:
cb DownloadFn
}
*/
fn download_cb(ptr voidptr, size, nmemb size_t, userp voidptr) {
fn download_cb(ptr voidptr, size size_t, nmemb size_t, userp voidptr) {
/*
mut data := &DownloadStruct(userp)
written := C.fwrite(ptr, size, nmemb, data.stream)
@@ -24,7 +24,7 @@ fn download_cb(ptr voidptr, size, nmemb size_t, userp voidptr) {
*/
}
pub fn download_file_with_progress(url, out string, cb DownloadFn, cb_finished fn()) {
pub fn download_file_with_progress(url string, out string, cb DownloadFn, cb_finished fn()) {
/*
curl := C.curl_easy_init()
if isnil(curl) {

View File

@@ -11,7 +11,7 @@ import net
const (
max_redirects = 4
content_type_default = 'text/plain'
bufsize = 1536
bufsize = 1536
)
pub struct Request {
@@ -47,18 +47,17 @@ pub:
status_code int
}
pub fn new_request(method Method, url_, data string) ?Request {
pub fn new_request(method Method, url_ string, data string) ?Request {
url := if method == .get { url_ + '?' + data } else { url_ }
//println('new req() method=$method url="$url" dta="$data"')
// println('new req() method=$method url="$url" dta="$data"')
return Request{
method: method
url: url
data: data
/*
headers: {
data: data /*
headers: {
'Accept-Encoding': 'compress'
}
*/
*/
}
}
@@ -75,7 +74,7 @@ pub fn get(url string) ?Response {
return fetch_with_method(.get, url, FetchConfig{})
}
pub fn post(url, data string) ?Response {
pub fn post(url string, data string) ?Response {
return fetch_with_method(.post, url, {
data: data
headers: {
@@ -84,7 +83,7 @@ pub fn post(url, data string) ?Response {
})
}
pub fn post_json(url, data string) ?Response {
pub fn post_json(url string, data string) ?Response {
return fetch_with_method(.post, url, {
data: data
headers: {
@@ -102,7 +101,7 @@ pub fn post_form(url string, data map[string]string) ?Response {
})
}
pub fn put(url, data string) ?Response {
pub fn put(url string, data string) ?Response {
return fetch_with_method(.put, url, {
data: data
headers: {
@@ -111,7 +110,7 @@ pub fn put(url, data string) ?Response {
})
}
pub fn patch(url, data string) ?Response {
pub fn patch(url string, data string) ?Response {
return fetch_with_method(.patch, url, {
data: data
headers: {
@@ -133,7 +132,7 @@ pub fn fetch(_url string, config FetchConfig) ?Response {
return error('http.fetch: empty url')
}
url := build_url_from_fetch(_url, config) or {
return error('http.fetch: invalid url ${_url}')
return error('http.fetch: invalid url $_url')
}
data := config.data
req := Request{
@@ -147,7 +146,7 @@ pub fn fetch(_url string, config FetchConfig) ?Response {
user_ptr: 0
verbose: config.verbose
}
res := req.do()?
res := req.do() ?
return res
}
@@ -177,14 +176,14 @@ fn fetch_with_method(method Method, url string, _config FetchConfig) ?Response {
}
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
mut url := urllib.parse(_url)?
mut url := urllib.parse(_url) ?
params := config.params
if params.keys().len == 0 {
return url.str()
}
mut pieces := []string{}
for key in params.keys() {
pieces << '${key}=${params[key]}'
pieces << '$key=${params[key]}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
@@ -195,19 +194,15 @@ fn build_url_from_fetch(_url string, config FetchConfig) ?string {
}
fn (mut req Request) free() {
unsafe {
req.headers.free()
}
unsafe {req.headers.free()}
}
fn (mut resp Response) free() {
unsafe {
resp.headers.free()
}
unsafe {resp.headers.free()}
}
// add_header adds the key and value of an HTTP request header
pub fn (mut req Request) add_header(key, val string) {
pub fn (mut req Request) add_header(key string, val string) {
req.headers[key] = val
}
@@ -229,7 +224,7 @@ pub fn parse_headers(lines []string) map[string]string {
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() ?Response {
mut url := urllib.parse(req.url) or {
return error('http.Request.do: invalid url ${req.url}')
return error('http.Request.do: invalid url $req.url')
}
mut rurl := url
mut resp := Response{}
@@ -238,7 +233,7 @@ pub fn (req &Request) do() ?Response {
if no_redirects == max_redirects {
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
}
qresp := req.method_and_url_to_response(req.method, rurl)?
qresp := req.method_and_url_to_response(req.method, rurl) ?
resp = qresp
if resp.status_code !in [301, 302, 303, 307, 308] {
break
@@ -264,7 +259,7 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Res
host_name := url.hostname()
scheme := url.scheme
p := url.path.trim_left('/')
path := if url.query().len > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
path := if url.query().len > 0 { '/$p?$url.query().encode()' } else { '/$p' }
mut nport := url.port().int()
if nport == 0 {
if scheme == 'http' {
@@ -277,11 +272,11 @@ fn (req &Request) method_and_url_to_response(method Method, url urllib.URL) ?Res
// println('fetch $method, $scheme, $host_name, $nport, $path ')
if scheme == 'https' {
// println('ssl_do( $nport, $method, $host_name, $path )')
res := req.ssl_do(nport, method, host_name, path)?
res := req.ssl_do(nport, method, host_name, path) ?
return res
} else if scheme == 'http' {
// println('http_do( $nport, $method, $host_name, $path )')
res := req.http_do(nport, method, host_name, path)?
res := req.http_do(nport, method, host_name, path) ?
return res
}
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
@@ -322,7 +317,6 @@ fn parse_response(resp string) Response {
// if h.contains('Content-Type') {
// continue
// }
mut key := h[..pos]
lkey := key.to_lower()
val := h[pos + 2..]
@@ -346,7 +340,7 @@ fn parse_response(resp string) Response {
}
}
fn (req &Request) build_request_headers(method Method, host_name, path string) string {
fn (req &Request) build_request_headers(method Method, host_name string, path string) string {
ua := req.user_agent
mut uheaders := []string{}
if 'Host' !in req.headers {
@@ -356,17 +350,16 @@ fn (req &Request) build_request_headers(method Method, host_name, path string) s
uheaders << 'User-Agent: $ua\r\n'
}
if req.data.len > 0 && 'Content-Length' !in req.headers {
uheaders << 'Content-Length: ${req.data.len}\r\n'
uheaders << 'Content-Length: $req.data.len\r\n'
}
for key, val in req.headers {
if key == 'Cookie' {
continue
}
uheaders << '${key}: ${val}\r\n'
uheaders << '$key: $val\r\n'
}
uheaders << req.build_request_cookies_header()
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
req.data
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
}
fn (req &Request) build_request_cookies_header() string {
@@ -399,13 +392,12 @@ pub fn escape(s string) string {
panic('http.escape() was replaced with http.escape_url()')
}
fn (req &Request) http_do(port int, method Method, host_name, path string) ?Response {
fn (req &Request) http_do(port int, method Method, host_name string, path string) ?Response {
rbuffer := [bufsize]byte{}
mut sb := strings.new_builder(100)
s := req.build_request_headers(method, host_name, path)
client := net.dial(host_name, port)?
client.send(s.str, s.len) or {
}
client := net.dial(host_name, port) ?
client.send(s.str, s.len) or { }
for {
readbytes := client.crecv(rbuffer, bufsize)
if readbytes < 0 {
@@ -416,8 +408,7 @@ fn (req &Request) http_do(port int, method Method, host_name, path string) ?Resp
}
sb.write(tos(rbuffer, readbytes))
}
client.close() or {
}
client.close() or { }
return parse_response(sb.str())
}

View File

@@ -1,48 +1,62 @@
import net.http
fn test_http_get() {
$if !network ? { return }
$if !network ? {
return
}
assert http.get_text('https://vlang.io/version') == '0.1.5'
println('http ok')
}
fn test_http_get_from_vlang_utc_now() {
$if !network ? { return }
$if !network ? {
return
}
urls := ['http://vlang.io/utc_now', 'https://vlang.io/utc_now']
for url in urls {
println('Test getting current time from $url by http.get')
res := http.get(url) or { panic(err) }
res := http.get(url) or {
panic(err)
}
assert 200 == res.status_code
assert res.text.len > 0
assert res.text.int() > 1566403696
println('Current time is: ${res.text.int()}')
println('Current time is: $res.text.int()')
}
}
fn test_public_servers() {
$if !network ? { return }
$if !network ? {
return
}
urls := [
'http://github.com/robots.txt',
'http://google.com/robots.txt',
'https://github.com/robots.txt',
'https://google.com/robots.txt',
// 'http://yahoo.com/robots.txt',
// 'https://yahoo.com/robots.txt',
// 'http://yahoo.com/robots.txt',
// 'https://yahoo.com/robots.txt',
]
for url in urls {
println('Testing http.get on public url: $url ')
res := http.get( url ) or { panic(err) }
res := http.get(url) or {
panic(err)
}
assert 200 == res.status_code
assert res.text.len > 0
}
}
fn test_relative_redirects() {
$if !network ? { return }
$else { return } // tempfix periodic: httpbin relative redirects are broken
res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or { panic(err) }
$if !network ? {
return
} $else {
return
} // tempfix periodic: httpbin relative redirects are broken
res := http.get('https://httpbin.org/relative-redirect/3?abc=xyz') or {
panic(err)
}
assert 200 == res.status_code
assert res.text.len > 0
assert res.text.contains('"abc": "xyz"')
}

View File

@@ -4,10 +4,10 @@ import os
pub struct Socket {
pub:
sockfd int
family int
typ int
proto int
sockfd int
family int
typ int
proto int
pub mut:
max_single_send_size int = 64000
}
@@ -75,12 +75,12 @@ fn C.inet_ntop(af int, src voidptr, dst charptr, dst_size int) charptr
fn C.getpeername(sockfd int, addr &C.sockaddr_in, addrsize &int) int
// create socket
pub fn new_socket(family, typ, proto int) ?Socket {
pub fn new_socket(family int, typ int, proto int) ?Socket {
sockfd := C.socket(family, typ, proto)
one := 1
// This is needed so that there are no problems with reusing the
// same port after the application exits.
C.setsockopt(sockfd, C.SOL_SOCKET, C.SO_REUSEADDR, &one, sizeof(int))
C.setsockopt(sockfd, C.SOL_SOCKET, C.SO_REUSEADDR, &one, sizeof(voidptr))
if sockfd == -1 {
return error('net.socket: failed')
}
@@ -98,8 +98,8 @@ pub fn socket_udp() ?Socket {
}
// set socket options
pub fn (s Socket) setsockopt(level, optname int, optvalue &int) ?int {
res := C.setsockopt(s.sockfd, level, optname, optvalue, sizeof(&int))
pub fn (s Socket) setsockopt(level int, optname int, optvalue &int) ?int {
res := C.setsockopt(s.sockfd, level, optname, optvalue, sizeof(int))
if res < 0 {
return error('net.setsocketopt: failed with $res')
}
@@ -153,9 +153,9 @@ pub fn listen(port int) ?Socket {
$if debug {
println('net.listen($port)')
}
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0)?
s.bind(port)?
s.listen()?
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) ?
s.bind(port) ?
s.listen() ?
return s
}
@@ -190,7 +190,7 @@ pub fn (s Socket) peer_ip() ?string {
cstr := C.inet_ntop(C.AF_INET, &peeraddr.sin_addr, buf, sizeof(buf))
if cstr == 0 {
return error('net.peer_ip: inet_ntop failed')
}
}
return cstring_to_vstring(cstr)
}
@@ -222,8 +222,8 @@ pub fn (s Socket) connect(address string, port int) ?int {
// helper method to create socket and connect
pub fn dial(address string, port int) ?Socket {
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0)?
s.connect(address, port)?
s := new_socket(C.AF_INET, C.SOCK_STREAM, 0) ?
s.connect(address, port) ?
return s
}

View File

@@ -22,10 +22,10 @@ enum EncodingMode {
const (
err_msg_escape = 'unescape: invalid URL escape'
err_msg_parse = 'parse: failed parsing url'
err_msg_parse = 'parse: failed parsing url'
)
fn error_msg(message, val string) string {
fn error_msg(message string, val string) string {
mut msg := 'net.urllib.$message'
if val != '' {
msg = '$msg ($val)'
@@ -53,7 +53,8 @@ fn should_escape(c byte, mode EncodingMode) bool {
// we could possibly allow, and parse will reject them if we
// escape them (because hosts can`t use %-encoding for
// ASCII bytes).
if c in [`!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `:`, `[`, `]`, `<`, `>`, `"`] {
if c in
[`!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `:`, `[`, `]`, `<`, `>`, `"`] {
return false
}
}
@@ -100,11 +101,11 @@ fn should_escape(c byte, mode EncodingMode) bool {
// everything, so escape nothing.
return false
}
else {
}}
else {}
}
}
else {
}}
else {}
}
if mode == .encode_fragment {
// RFC 3986 §2.2 allows not escaping sub-delims. A subset of sub-delims are
// included in reserved from RFC 2396 §2.2. The remaining sub-delims do not
@@ -113,11 +114,9 @@ fn should_escape(c byte, mode EncodingMode) bool {
// escape single quote to avoid breaking callers that had previously assumed that
// single quotes would be escaped. See issue #19917.
match c {
`!`, `(`, `)`, `*` {
return false
}
else {
}}
`!`, `(`, `)`, `*` { return false }
else {}
}
}
// Everything else must be escaped.
return true
@@ -150,55 +149,58 @@ fn unescape(s_ string, mode EncodingMode) ?string {
// Count %, check that they're well-formed.
mut n := 0
mut has_plus := false
for i := 0; i < s.len; {
for i := 0; i < s.len; {
x := s[i]
match x {
`%` {
if s == '' {
break
}
n++
if i + 2 >= s.len || !ishex(s[i + 1]) || !ishex(s[i + 2]) {
s = s[i..]
if s.len > 3 {
s = s[..3]
`%` {
if s == '' {
break
}
return error(error_msg(err_msg_escape, s))
}
// Per https://tools.ietf.org/html/rfc3986#page-21
// in the host component %-encoding can only be used
// for non-ASCII bytes.
// But https://tools.ietf.org/html/rfc6874#section-2
// introduces %25 being allowed to escape a percent sign
// in IPv6 scoped-address literals. Yay.
if mode == .encode_host && unhex(s[i + 1]) < 8 && s[i..i + 3] != '%25' {
return error(error_msg(err_msg_escape, s[i..i + 3]))
}
if mode == .encode_zone {
// RFC 6874 says basically 'anything goes' for zone identifiers
// and that even non-ASCII can be redundantly escaped,
// but it seems prudent to restrict %-escaped bytes here to those
// that are valid host name bytes in their unescaped form.
// That is, you can use escaping in the zone identifier but not
// to introduce bytes you couldn't just write directly.
// But Windows puts spaces here! Yay.
v := ( (unhex(s[i + 1])<<byte(4)) | unhex(s[i + 2]))
if s[i..i + 3] != '%25' && v != ` ` && should_escape(v, .encode_host) {
error(error_msg(err_msg_escape, s[i..i + 3]))
n++
if i + 2 >= s.len || !ishex(s[i + 1]) || !ishex(s[i + 2]) {
s = s[i..]
if s.len > 3 {
s = s[..3]
}
return error(error_msg(err_msg_escape, s))
}
// Per https://tools.ietf.org/html/rfc3986#page-21
// in the host component %-encoding can only be used
// for non-ASCII bytes.
// But https://tools.ietf.org/html/rfc6874#section-2
// introduces %25 being allowed to escape a percent sign
// in IPv6 scoped-address literals. Yay.
if mode == .encode_host && unhex(s[i + 1]) < 8 && s[i..i + 3] != '%25' {
return error(error_msg(err_msg_escape, s[i..i + 3]))
}
if mode == .encode_zone {
// RFC 6874 says basically 'anything goes' for zone identifiers
// and that even non-ASCII can be redundantly escaped,
// but it seems prudent to restrict %-escaped bytes here to those
// that are valid host name bytes in their unescaped form.
// That is, you can use escaping in the zone identifier but not
// to introduce bytes you couldn't just write directly.
// But Windows puts spaces here! Yay.
v := ((unhex(s[i + 1]) << byte(4)) | unhex(s[i + 2]))
if s[i..i + 3] != '%25' && v != ` ` && should_escape(v, .encode_host) {
error(error_msg(err_msg_escape, s[i..i + 3]))
}
}
i += 3
}
i += 3
}
`+` {
has_plus = mode == .encode_query_component
i++
}
else {
if (mode == .encode_host || mode == .encode_zone) && s[i] < 0x80 && should_escape(s[i], mode) {
error(error_msg('unescape: invalid character in host name', s[i..i + 1]))
`+` {
has_plus = mode == .encode_query_component
i++
}
i++
}}
else {
if (mode == .encode_host ||
mode == .encode_zone) &&
s[i] < 0x80 && should_escape(s[i], mode) {
error(error_msg('unescape: invalid character in host name', s[i..i + 1]))
}
i++
}
}
}
if n == 0 && !has_plus {
return s
@@ -208,20 +210,20 @@ fn unescape(s_ string, mode EncodingMode) ?string {
x := s[i]
match x {
`%` {
t.write( ((unhex(s[i + 1])<<byte(4)) | unhex(s[i + 2])).str() )
t.write(((unhex(s[i + 1]) << byte(4)) | unhex(s[i + 2])).str())
i += 2
}
`+` {
if mode == .encode_query_component {
t.write(' ')
}
else {
} else {
t.write('+')
}
}
else {
t.write(s[i].str())
}}
}
}
}
return t.str()
}
@@ -242,13 +244,12 @@ fn escape(s string, mode EncodingMode) string {
mut space_count := 0
mut hex_count := 0
mut c := byte(0)
for i in 0..s.len {
for i in 0 .. s.len {
c = s[i]
if should_escape(c, mode) {
if c == ` ` && mode == .encode_query_component {
space_count++
}
else {
} else {
hex_count++
}
}
@@ -256,18 +257,17 @@ fn escape(s string, mode EncodingMode) string {
if space_count == 0 && hex_count == 0 {
return s
}
buf := []byte{len:(64)}
buf := []byte{len: (64)}
mut t := []byte{}
required := s.len + 2 * hex_count
if required <= buf.len {
t = buf[..required]
}
else {
t = []byte{len:(required)}
} else {
t = []byte{len: (required)}
}
if hex_count == 0 {
copy(t, s.bytes())
for i in 0..s.len {
for i in 0 .. s.len {
if s[i] == ` ` {
t[i] = `+`
}
@@ -276,19 +276,17 @@ fn escape(s string, mode EncodingMode) string {
}
upperhex := '0123456789ABCDEF'
mut j := 0
for i in 0..s.len {
for i in 0 .. s.len {
c1 := s[i]
if c1 == ` ` && mode == .encode_query_component {
t[j] = `+`
j++
}
else if should_escape(c1, mode) {
} else if should_escape(c1, mode) {
t[j] = `%`
t[j + 1] = upperhex[c1>>4]
t[j + 1] = upperhex[c1 >> 4]
t[j + 2] = upperhex[c1 & 15]
j += 3
}
else {
} else {
t[j] = s[i]
j++
}
@@ -345,9 +343,8 @@ pub fn user(username string) &Userinfo {
// ``is NOT RECOMMENDED, because the passing of authentication
// information in clear text (such as URI) has proven to be a
// security risk in almost every case where it has been used.''
fn user_password(username, password string) &Userinfo {
return &Userinfo{
username,password,true}
fn user_password(username string, password string) &Userinfo {
return &Userinfo{username, password, true}
}
// The Userinfo type is an immutable encapsulation of username and
@@ -382,23 +379,20 @@ fn (u &Userinfo) str() string {
// (scheme must be [a-zA-Z][a-zA-Z0-9+-.]*)
// If so, return [scheme, path]; else return ['', rawurl]
fn split_by_scheme(rawurl string) ?[]string {
for i in 0..rawurl.len {
for i in 0 .. rawurl.len {
c := rawurl[i]
if (`a` <= c && c <= `z`) || (`A` <= c && c <= `Z`) {
// do nothing
}
else if (`0` <= c && c <= `9`) || (c == `+` || c == `-` || c == `.`) {
} else if (`0` <= c && c <= `9`) || (c == `+` || c == `-` || c == `.`) {
if i == 0 {
return ['', rawurl]
}
}
else if c == `:` {
} else if c == `:` {
if i == 0 {
return error(error_msg('split_by_scheme: missing protocol scheme', ''))
}
return [rawurl[..i], rawurl[i + 1..]]
}
else {
} else {
// we have encountered an invalid character,
// so there is no valid scheme
return ['', rawurl]
@@ -417,15 +411,15 @@ fn get_scheme(rawurl string) ?string {
// split slices s into two substrings separated by the first occurence of
// sep. If cutc is true then sep is included with the second substring.
// If sep does not occur in s then s and the empty string is returned.
fn split(s string, sep byte, cutc bool) (string,string) {
fn split(s string, sep byte, cutc bool) (string, string) {
i := s.index_byte(sep)
if i < 0 {
return s,''
return s, ''
}
if cutc {
return s[..i],s[i + 1..]
return s[..i], s[i + 1..]
}
return s[..i],s[i..]
return s[..i], s[i..]
}
// parse parses rawurl into a URL structure.
@@ -436,7 +430,7 @@ fn split(s string, sep byte, cutc bool) (string,string) {
// error, due to parsing ambiguities.
pub fn parse(rawurl string) ?URL {
// Cut off #frag
u,frag := split(rawurl, `#`, true)
u, frag := split(rawurl, `#`, true)
mut url := parse_url(u, false) or {
return error(error_msg(err_msg_parse, u))
}
@@ -479,7 +473,7 @@ fn parse_url(rawurl string, via_request bool) ?URL {
}
// Split off possible leading 'http:', 'mailto:', etc.
// Cannot contain escaped characters.
p := split_by_scheme(rawurl)?
p := split_by_scheme(rawurl) ?
url.scheme = p[0]
mut rest := p[1]
url.scheme = url.scheme.to_lower()
@@ -487,9 +481,8 @@ fn parse_url(rawurl string, via_request bool) ?URL {
if rest.ends_with('?') && !rest[..1].contains('?') {
url.force_query = true
rest = rest[..rest.len - 1]
}
else {
r,raw_query := split(rest, `?`, true)
} else {
r, raw_query := split(rest, `?`, true)
rest = r
url.raw_query = raw_query
}
@@ -516,13 +509,14 @@ fn parse_url(rawurl string, via_request bool) ?URL {
}
if colon >= 0 && (slash < 0 || colon < slash) {
// First path segment has colon. Not allowed in relative URL.
return error(error_msg('parse_url: first path segment in URL cannot contain colon', ''))
return error(error_msg('parse_url: first path segment in URL cannot contain colon',
''))
}
}
if ((url.scheme != '' || !via_request) && !rest.starts_with('///')) && rest.starts_with('//') {
authority,r := split(rest[2..], `/`, false)
authority, r := split(rest[2..], `/`, false)
rest = r
a := parse_authority(authority)?
a := parse_authority(authority) ?
url.user = a.user
url.host = a.host
}
@@ -530,7 +524,7 @@ fn parse_url(rawurl string, via_request bool) ?URL {
// raw_path is a hint of the encoding of path. We don't want to set it if
// the default escaping of path is equivalent, to help make sure that people
// don't rely on it in general.
url.set_path(rest)?
url.set_path(rest) ?
return url
}
@@ -546,11 +540,10 @@ fn parse_authority(authority string) ?ParseAuthorityRes {
mut host := ''
mut zuser := user('')
if i < 0 {
h := parse_host(authority)?
h := parse_host(authority) ?
host = h
}
else {
h := parse_host(authority[i + 1..])?
} else {
h := parse_host(authority[i + 1..]) ?
host = h
}
if i < 0 {
@@ -564,15 +557,14 @@ fn parse_authority(authority string) ?ParseAuthorityRes {
return error(error_msg('parse_authority: invalid userinfo', ''))
}
if !userinfo.contains(':') {
u := unescape(userinfo, .encode_user_password)?
u := unescape(userinfo, .encode_user_password) ?
userinfo = u
zuser = user(userinfo)
}
else {
mut username,mut password := split(userinfo, `:`, true)
u := unescape(username, .encode_user_password)?
} else {
mut username, mut password := split(userinfo, `:`, true)
u := unescape(username, .encode_user_password) ?
username = u
p := unescape(password, .encode_user_password)?
p := unescape(password, .encode_user_password) ?
password = p
zuser = user_password(username, password)
}
@@ -593,7 +585,8 @@ fn parse_host(host string) ?string {
}
mut colon_port := host[i + 1..]
if !valid_optional_port(colon_port) {
return error(error_msg('parse_host: invalid port $colon_port after host ', ''))
return error(error_msg('parse_host: invalid port $colon_port after host ',
''))
}
// RFC 6874 defines that %25 (%-encoded percent) introduces
// the zone identifier, and the zone identifier can use basically
@@ -601,7 +594,7 @@ fn parse_host(host string) ?string {
// can only %-encode non-ASCII bytes.
// We do impose some restrictions on the zone, to avoid stupidity
// like newlines.
if zone:=host[..i].index('%25'){
if zone := host[..i].index('%25') {
host1 := unescape(host[..zone], .encode_host) or {
return err
}
@@ -613,10 +606,11 @@ fn parse_host(host string) ?string {
}
return host1 + host2 + host3
}
if idx:=host.last_index(':'){
if idx := host.last_index(':') {
colon_port = host[idx..]
if !valid_optional_port(colon_port) {
return error(error_msg('parse_host: invalid port $colon_port after host ', ''))
return error(error_msg('parse_host: invalid port $colon_port after host ',
''))
}
}
}
@@ -627,6 +621,7 @@ fn parse_host(host string) ?string {
// host = h
// return host
}
// set_path sets the path and raw_path fields of the URL based on the provided
// escaped path p. It maintains the invariant that raw_path is only specified
// when it differs from the default encoding of the path.
@@ -636,14 +631,13 @@ fn parse_host(host string) ?string {
// set_path will return an error only if the provided path contains an invalid
// escaping.
pub fn (mut u URL) set_path(p string) ?bool {
path := unescape(p, .encode_path)?
path := unescape(p, .encode_path) ?
u.path = path
escp := escape(path, .encode_path)
if p == escp {
// Default encoding is fine.
u.raw_path = ''
}
else {
} else {
u.raw_path = p
}
return true
@@ -674,7 +668,7 @@ fn (u &URL) escaped_path() string {
// valid_encoded_path reports whether s is a valid encoded path.
// It must not contain any bytes that require escaping during path encoding.
fn valid_encoded_path(s string) bool {
for i in 0..s.len {
for i in 0 .. s.len {
// RFC 3986, Appendix A.
// pchar = unreserved / pct-encoded / sub-delims / ':' / '@'.
// should_escape is not quite compliant with the RFC,
@@ -695,7 +689,8 @@ fn valid_encoded_path(s string) bool {
if should_escape(s[i], .encode_path) {
return false
}
}}
}
}
}
return true
}
@@ -746,8 +741,7 @@ pub fn (u URL) str() string {
}
if u.opaque != '' {
buf.write(u.opaque)
}
else {
} else {
if u.scheme != '' || u.host != '' || (u.user != 0 && !u.user.empty()) {
if u.host != '' || u.path != '' || !u.user.empty() {
buf.write('//')
@@ -804,7 +798,7 @@ pub fn (u URL) str() string {
// interpreted as a key set to an empty value.
pub fn parse_query(query string) ?Values {
mut m := new_values()
parse_query_values(mut m, query)?
parse_query_values(mut m, query) ?
return m
}
@@ -825,15 +819,14 @@ fn parse_query_values(mut m Values, query string) ?bool {
if i >= 0 {
q = key[i + 1..]
key = key[..i]
}
else {
} else {
q = ''
}
if key == '' {
continue
}
mut value := ''
if idx:=key.index('='){
if idx := key.index('=') {
i = idx
value = key[i + 1..]
key = key[..i]
@@ -885,18 +878,16 @@ pub fn (v Values) encode() string {
// resolve_path applies special path segments from refs and applies
// them to base, per RFC 3986.
fn resolve_path(base, ref string) string {
fn resolve_path(base string, ref string) string {
mut full := ''
if ref == '' {
full = base
}
else if ref[0] != `/` {
} else if ref[0] != `/` {
i := base.last_index('/') or {
-1
}
full = base[..i + 1] + ref
}
else {
} else {
full = ref
}
if full == '' {
@@ -916,7 +907,8 @@ fn resolve_path(base, ref string) string {
}
else {
dst << elem
}}
}
}
}
last := src[src.len - 1]
if last == '.' || last == '..' {
@@ -936,7 +928,7 @@ pub fn (u &URL) is_abs() bool {
// may be relative or absolute. parse returns nil, err on parse
// failure, otherwise its return value is the same as resolve_reference.
pub fn (u &URL) parse(ref string) ?URL {
refurl := parse(ref)?
refurl := parse(ref) ?
return u.resolve_reference(refurl)
}
@@ -955,7 +947,7 @@ pub fn (u &URL) resolve_reference(ref &URL) ?URL {
// The 'absoluteURI' or 'net_path' cases.
// We can ignore the error from set_path since we know we provided a
// validly-escaped path.
url.set_path(resolve_path(ref.escaped_path(), ''))?
url.set_path(resolve_path(ref.escaped_path(), '')) ?
return url
}
if ref.opaque != '' {
@@ -973,7 +965,7 @@ pub fn (u &URL) resolve_reference(ref &URL) ?URL {
// The 'abs_path' or 'rel_path' cases.
url.host = u.host
url.user = u.user
url.set_path(resolve_path(u.escaped_path(), ref.escaped_path()))?
url.set_path(resolve_path(u.escaped_path(), ref.escaped_path())) ?
return url
}
@@ -994,8 +986,7 @@ pub fn (u &URL) request_uri() string {
if result == '' {
result = '/'
}
}
else {
} else {
if result.starts_with('//') {
result = u.scheme + ':' + result
}
@@ -1011,21 +1002,21 @@ pub fn (u &URL) request_uri() string {
// If the result is enclosed in square brackets, as literal IPv6 addresses are,
// the square brackets are removed from the result.
pub fn (u &URL) hostname() string {
host,_ := split_host_port(u.host)
host, _ := split_host_port(u.host)
return host
}
// port returns the port part of u.host, without the leading colon.
// If u.host doesn't contain a port, port returns an empty string.
pub fn (u &URL) port() string {
_,port := split_host_port(u.host)
_, port := split_host_port(u.host)
return port
}
// split_host_port separates host and port. If the port is not valid, it returns
// the entire input as host, and it doesn't check the validity of the host.
// Per RFC 3986, it requires ports to be numeric.
fn split_host_port(hostport string) (string,string) {
fn split_host_port(hostport string) (string, string) {
mut host := hostport
mut port := ''
colon := host.last_index_byte(`:`)
@@ -1036,7 +1027,7 @@ fn split_host_port(hostport string) (string,string) {
if host.starts_with('[') && host.ends_with(']') {
host = host[1..host.len - 1]
}
return host,port
return host, port
}
// valid_userinfo reports whether s is a valid userinfo string per RFC 3986
@@ -1059,19 +1050,16 @@ pub fn valid_userinfo(s string) bool {
continue
}
match r {
`-`, `.`, `_`, `:`, `~`, `!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `%`, `@` {
continue
}
else {
return false
}}
`-`, `.`, `_`, `:`, `~`, `!`, `$`, `&`, `\\`, `(`, `)`, `*`, `+`, `,`, `;`, `=`, `%`, `@` { continue }
else { return false }
}
}
return true
}
// string_contains_ctl_byte reports whether s contains any ASCII control character.
fn string_contains_ctl_byte(s string) bool {
for i in 0..s.len {
for i in 0 .. s.len {
b := s[i]
if b < ` ` || b == 0x7f {
return true
@@ -1083,11 +1071,9 @@ fn string_contains_ctl_byte(s string) bool {
pub fn ishex(c byte) bool {
if `0` <= c && c <= `9` {
return true
}
else if `a` <= c && c <= `f` {
} else if `a` <= c && c <= `f` {
return true
}
else if `A` <= c && c <= `F` {
} else if `A` <= c && c <= `F` {
return true
}
return false
@@ -1096,11 +1082,9 @@ pub fn ishex(c byte) bool {
fn unhex(c byte) byte {
if `0` <= c && c <= `9` {
return c - `0`
}
else if `a` <= c && c <= `f` {
} else if `a` <= c && c <= `f` {
return c - `a` + 10
}
else if `A` <= c && c <= `F` {
} else if `A` <= c && c <= `F` {
return c - `A` + 10
}
return 0

View File

@@ -11,7 +11,7 @@ pub mut:
struct Values {
pub mut:
data map[string]Value
len int
len int
}
// new_values returns a new Values struct for creating
@@ -20,7 +20,7 @@ pub mut:
// values.encode() will return the encoded data
pub fn new_values() Values {
return Values{
data: map[string]Value
data: map[string]Value{}
}
}
@@ -61,7 +61,7 @@ pub fn (v &Values) get_all(key string) []string {
// set sets the key to value. It replaces any existing
// values.
pub fn (mut v Values) set(key, value string) {
pub fn (mut v Values) set(key string, value string) {
mut a := v.data[key]
a.data = [value]
v.data[key] = a
@@ -70,7 +70,7 @@ pub fn (mut v Values) set(key, value string) {
// add adds the value to key. It appends to any existing
// values associated with key.
pub fn (mut v Values) add(key, value string) {
pub fn (mut v Values) add(key string, value string) {
mut a := v.data[key]
if a.data.len == 0 {
a.data = []

View File

@@ -191,13 +191,13 @@ pub fn (mut nodes []DocNode) sort_by_category() {
nodes.sort_with_compare(compare_nodes_by_category)
}
fn compare_nodes_by_name(a, b &DocNode) int {
fn compare_nodes_by_name(a &DocNode, b &DocNode) int {
al := a.name.to_lower()
bl := b.name.to_lower()
return compare_strings(al, bl)
}
fn compare_nodes_by_category(a, b &DocNode) int {
fn compare_nodes_by_category(a &DocNode, b &DocNode) int {
al := a.attrs['category']
bl := b.attrs['category']
return compare_strings(al, bl)
@@ -217,7 +217,7 @@ pub fn (nodes []DocNode) find_children_of(parent string) []DocNode {
return nodes.find_nodes_with_attr('parent', parent)
}
pub fn (nodes []DocNode) find_nodes_with_attr(attr_name, value string) []DocNode {
pub fn (nodes []DocNode) find_nodes_with_attr(attr_name string, value string) []DocNode {
mut subgroup := []DocNode{}
if attr_name.len == 0 {
return subgroup
@@ -509,7 +509,7 @@ fn (mut d Doc) generate() ?Doc {
return *d
}
pub fn generate_from_pos(input_path, filename string, pos int) ?Doc {
pub fn generate_from_pos(input_path string, filename string, pos int) ?Doc {
mut doc := new(input_path)
doc.pub_only = false
doc.with_comments = true
@@ -519,7 +519,7 @@ pub fn generate_from_pos(input_path, filename string, pos int) ?Doc {
return doc.generate()
}
pub fn generate(input_path string, pub_only, with_comments bool) ?Doc {
pub fn generate(input_path string, pub_only bool, with_comments bool) ?Doc {
mut doc := new(input_path)
doc.pub_only = pub_only
doc.with_comments = with_comments