2020-01-23 23:04:46 +03:00
|
|
|
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
|
2019-06-23 05:21:30 +03:00
|
|
|
// Use of this source code is governed by an MIT license
|
|
|
|
// that can be found in the LICENSE file.
|
2019-06-22 21:20:28 +03:00
|
|
|
module http
|
|
|
|
|
2019-08-06 15:43:09 +03:00
|
|
|
import net.urllib
|
2019-12-30 07:42:23 +03:00
|
|
|
import net.http.chunked
|
2020-05-20 20:21:57 +03:00
|
|
|
import strings
|
|
|
|
import net
|
2019-08-06 15:43:09 +03:00
|
|
|
|
2019-08-17 15:50:47 +03:00
|
|
|
const (
|
2020-05-16 17:12:23 +03:00
|
|
|
max_redirects = 4
|
2020-01-16 20:16:11 +03:00
|
|
|
content_type_default = 'text/plain'
|
2020-05-20 20:23:51 +03:00
|
|
|
bufsize = 1536
|
2019-08-17 15:50:47 +03:00
|
|
|
)
|
|
|
|
|
2019-10-24 19:44:49 +03:00
|
|
|
pub struct Request {
|
2020-04-28 14:57:48 +03:00
|
|
|
pub mut:
|
2019-12-22 01:41:42 +03:00
|
|
|
method string
|
2020-01-16 20:16:11 +03:00
|
|
|
headers map[string]string
|
|
|
|
cookies map[string]string
|
2019-12-22 01:41:42 +03:00
|
|
|
data string
|
|
|
|
url string
|
2020-05-20 09:58:57 +03:00
|
|
|
user_agent string = 'v.http'
|
2020-01-16 20:16:11 +03:00
|
|
|
verbose bool
|
2019-12-22 01:41:42 +03:00
|
|
|
user_ptr voidptr
|
|
|
|
ws_func voidptr
|
2019-06-22 21:20:28 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub struct FetchConfig {
|
|
|
|
pub mut:
|
|
|
|
method string
|
2020-04-09 15:03:47 +03:00
|
|
|
data string
|
|
|
|
params map[string]string
|
|
|
|
headers map[string]string
|
|
|
|
cookies map[string]string
|
2020-05-20 09:58:57 +03:00
|
|
|
user_agent string = 'v.http'
|
2020-05-16 17:12:23 +03:00
|
|
|
verbose bool = false
|
2020-01-16 20:16:11 +03:00
|
|
|
}
|
|
|
|
|
2019-10-24 19:44:49 +03:00
|
|
|
pub struct Response {
|
2019-06-22 21:20:28 +03:00
|
|
|
pub:
|
2019-07-31 23:10:28 +03:00
|
|
|
text string
|
2020-06-08 00:01:20 +03:00
|
|
|
headers map[string]string // original response headers, 'Set-Cookie' or 'set-Cookie', etc.
|
|
|
|
lheaders map[string]string // same as headers, but with normalized lowercased keys, like 'set-cookie'
|
2020-01-16 20:16:11 +03:00
|
|
|
cookies map[string]string
|
2019-06-22 21:20:28 +03:00
|
|
|
status_code int
|
|
|
|
}
|
|
|
|
|
2020-05-16 17:12:23 +03:00
|
|
|
pub fn new_request(method, url_, data string) ?Request {
|
2020-05-14 19:14:35 +03:00
|
|
|
url := if method == 'GET' { url_ + '?' + data } else { url_ }
|
2020-05-18 06:10:56 +03:00
|
|
|
//println('new req() method=$method url="$url" dta="$data"')
|
2020-05-10 03:07:15 +03:00
|
|
|
return Request{
|
2020-05-14 19:14:35 +03:00
|
|
|
method: method.to_upper()
|
2020-05-10 03:07:15 +03:00
|
|
|
url: url
|
|
|
|
data: data
|
2020-05-18 06:10:56 +03:00
|
|
|
/*
|
|
|
|
headers: {
|
|
|
|
'Accept-Encoding': 'compress'
|
|
|
|
}
|
|
|
|
*/
|
2020-05-10 03:07:15 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn get(url string) ?Response {
|
|
|
|
return fetch_with_method('GET', url, FetchConfig{})
|
2020-01-15 01:19:50 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn post(url, data string) ?Response {
|
|
|
|
return fetch_with_method('POST', url, {
|
|
|
|
data: data
|
|
|
|
headers: {
|
|
|
|
'Content-Type': content_type_default
|
|
|
|
}
|
|
|
|
})
|
2020-01-15 01:19:50 +03:00
|
|
|
}
|
|
|
|
|
2020-06-30 15:36:11 +03:00
|
|
|
pub fn post_json(url, data string) ?Response {
|
|
|
|
return fetch_with_method('POST', url, {
|
|
|
|
data: data
|
|
|
|
headers: {
|
|
|
|
'Content-Type': 'application/json'
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn post_form(url string, data map[string]string) ?Response {
|
|
|
|
return fetch_with_method('POST', url, {
|
|
|
|
headers: {
|
|
|
|
'Content-Type': 'application/x-www-form-urlencoded'
|
|
|
|
}
|
|
|
|
data: url_encode_form_data(data)
|
|
|
|
})
|
2020-01-15 01:19:50 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn put(url, data string) ?Response {
|
|
|
|
return fetch_with_method('PUT', url, {
|
|
|
|
data: data
|
|
|
|
headers: {
|
|
|
|
'Content-Type': content_type_default
|
|
|
|
}
|
|
|
|
})
|
2020-01-15 01:19:50 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn patch(url, data string) ?Response {
|
|
|
|
return fetch_with_method('PATCH', url, {
|
|
|
|
data: data
|
|
|
|
headers: {
|
|
|
|
'Content-Type': content_type_default
|
|
|
|
}
|
|
|
|
})
|
2019-07-29 20:18:26 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn head(url string) ?Response {
|
|
|
|
return fetch_with_method('HEAD', url, FetchConfig{})
|
2019-06-22 21:20:28 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn delete(url string) ?Response {
|
|
|
|
return fetch_with_method('DELETE', url, FetchConfig{})
|
2020-01-04 00:07:58 +03:00
|
|
|
}
|
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn fetch(_url string, config FetchConfig) ?Response {
|
2019-07-29 20:18:26 +03:00
|
|
|
if _url == '' {
|
2020-01-16 20:16:11 +03:00
|
|
|
return error('http.fetch: empty url')
|
2019-07-29 20:18:26 +03:00
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
url := build_url_from_fetch(_url, config) or {
|
|
|
|
return error('http.fetch: invalid url ${_url}')
|
2019-06-22 21:20:28 +03:00
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
data := config.data
|
|
|
|
method := config.method.to_upper()
|
|
|
|
req := Request{
|
|
|
|
method: method
|
2019-07-13 15:10:15 +03:00
|
|
|
url: url
|
|
|
|
data: data
|
2020-01-16 20:16:11 +03:00
|
|
|
headers: config.headers
|
|
|
|
cookies: config.cookies
|
2020-01-29 06:05:37 +03:00
|
|
|
user_agent: config.user_agent
|
2019-06-22 21:20:28 +03:00
|
|
|
ws_func: 0
|
|
|
|
user_ptr: 0
|
2020-01-16 20:16:11 +03:00
|
|
|
verbose: config.verbose
|
2019-06-22 21:20:28 +03:00
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
res := req.do() or {
|
|
|
|
return error(err)
|
|
|
|
}
|
|
|
|
return res
|
2019-06-22 21:20:28 +03:00
|
|
|
}
|
|
|
|
|
2019-07-31 23:10:28 +03:00
|
|
|
pub fn get_text(url string) string {
|
2020-01-16 20:16:11 +03:00
|
|
|
resp := fetch(url, {
|
|
|
|
method: 'GET'
|
|
|
|
}) or {
|
2019-12-22 01:41:42 +03:00
|
|
|
return ''
|
|
|
|
}
|
2019-10-24 19:44:49 +03:00
|
|
|
return resp.text
|
|
|
|
}
|
2019-07-31 23:10:28 +03:00
|
|
|
|
2020-01-16 20:16:11 +03:00
|
|
|
pub fn url_encode_form_data(data map[string]string) string {
|
2020-04-26 14:49:31 +03:00
|
|
|
mut pieces := []string{}
|
2020-07-05 16:27:37 +03:00
|
|
|
for key_, value_ in data {
|
|
|
|
key := urllib.query_escape(key_)
|
|
|
|
value := urllib.query_escape(value_)
|
2020-01-16 20:16:11 +03:00
|
|
|
pieces << '$key=$value'
|
|
|
|
}
|
|
|
|
return pieces.join('&')
|
|
|
|
}
|
|
|
|
|
2020-05-16 17:12:23 +03:00
|
|
|
fn fetch_with_method(method, url string, _config FetchConfig) ?Response {
|
2020-01-16 20:16:11 +03:00
|
|
|
mut config := _config
|
|
|
|
config.method = method
|
|
|
|
return fetch(url, config)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
|
|
|
|
mut url := urllib.parse(_url) or {
|
|
|
|
return error(err)
|
|
|
|
}
|
|
|
|
params := config.params
|
|
|
|
if params.keys().len == 0 {
|
|
|
|
return url.str()
|
|
|
|
}
|
2020-04-26 14:49:31 +03:00
|
|
|
mut pieces := []string{}
|
2020-01-16 20:16:11 +03:00
|
|
|
for key in params.keys() {
|
|
|
|
pieces << '${key}=${params[key]}'
|
|
|
|
}
|
|
|
|
mut query := pieces.join('&')
|
|
|
|
if url.raw_query.len > 1 {
|
|
|
|
query = url.raw_query + '&' + query
|
|
|
|
}
|
|
|
|
url.raw_query = query
|
|
|
|
return url.str()
|
|
|
|
}
|
|
|
|
|
2020-05-16 17:12:23 +03:00
|
|
|
fn (mut req Request) free() {
|
2019-06-22 21:20:28 +03:00
|
|
|
req.headers.free()
|
|
|
|
}
|
|
|
|
|
2020-05-16 17:12:23 +03:00
|
|
|
fn (mut resp Response) free() {
|
2019-06-22 21:20:28 +03:00
|
|
|
resp.headers.free()
|
|
|
|
}
|
|
|
|
|
2019-11-18 00:42:25 +03:00
|
|
|
// add_header adds the key and value of an HTTP request header
|
2020-05-16 17:12:23 +03:00
|
|
|
pub fn (mut req Request) add_header(key, val string) {
|
2019-06-22 21:20:28 +03:00
|
|
|
req.headers[key] = val
|
|
|
|
}
|
|
|
|
|
2019-09-05 15:46:24 +03:00
|
|
|
pub fn parse_headers(lines []string) map[string]string {
|
2020-05-16 17:12:23 +03:00
|
|
|
mut headers := map[string]string{}
|
2019-09-05 15:46:24 +03:00
|
|
|
for i, line in lines {
|
|
|
|
if i == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
words := line.split(': ')
|
|
|
|
if words.len != 2 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
headers[words[0]] = words[1]
|
|
|
|
}
|
|
|
|
return headers
|
|
|
|
}
|
|
|
|
|
2019-11-18 00:42:25 +03:00
|
|
|
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
|
2019-08-17 17:02:01 +03:00
|
|
|
pub fn (req &Request) do() ?Response {
|
2020-02-12 16:52:39 +03:00
|
|
|
mut url := urllib.parse(req.url) or {
|
2020-01-16 20:16:11 +03:00
|
|
|
return error('http.Request.do: invalid url ${req.url}')
|
2019-12-22 01:41:42 +03:00
|
|
|
}
|
2019-08-21 20:04:06 +03:00
|
|
|
mut rurl := url
|
2019-12-23 13:37:52 +03:00
|
|
|
mut resp := Response{}
|
2019-08-17 15:50:47 +03:00
|
|
|
mut no_redirects := 0
|
2019-08-21 20:04:06 +03:00
|
|
|
for {
|
2019-12-22 01:41:42 +03:00
|
|
|
if no_redirects == max_redirects {
|
|
|
|
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
|
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
qresp := req.method_and_url_to_response(req.method, rurl) or {
|
2019-12-22 01:41:42 +03:00
|
|
|
return error(err)
|
|
|
|
}
|
2019-08-21 20:04:06 +03:00
|
|
|
resp = qresp
|
2020-04-26 07:39:23 +03:00
|
|
|
if resp.status_code !in [301, 302, 303, 307, 308] {
|
2019-12-22 01:41:42 +03:00
|
|
|
break
|
|
|
|
}
|
2019-08-21 20:04:06 +03:00
|
|
|
// follow any redirects
|
2020-06-08 00:01:20 +03:00
|
|
|
mut redirect_url := resp.lheaders['location']
|
2020-02-12 16:52:39 +03:00
|
|
|
if redirect_url.len > 0 && redirect_url[0] == `/` {
|
|
|
|
url.set_path(redirect_url) or {
|
|
|
|
return error('http.request.do: invalid path in redirect: "$redirect_url"')
|
|
|
|
}
|
|
|
|
redirect_url = url.str()
|
|
|
|
}
|
2019-12-22 01:41:42 +03:00
|
|
|
qrurl := urllib.parse(redirect_url) or {
|
|
|
|
return error('http.request.do: invalid URL in redirect "$redirect_url"')
|
|
|
|
}
|
2019-08-21 20:04:06 +03:00
|
|
|
rurl = qrurl
|
2019-08-17 15:50:47 +03:00
|
|
|
no_redirects++
|
|
|
|
}
|
|
|
|
return resp
|
2019-08-10 11:05:59 +03:00
|
|
|
}
|
|
|
|
|
2020-04-06 18:42:47 +03:00
|
|
|
fn (req &Request) method_and_url_to_response(method string, url urllib.URL) ?Response {
|
2019-08-25 01:48:06 +03:00
|
|
|
host_name := url.hostname()
|
|
|
|
scheme := url.scheme
|
2019-12-06 15:24:53 +03:00
|
|
|
p := url.path.trim_left('/')
|
2020-06-21 17:51:02 +03:00
|
|
|
path := if url.query().len > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
|
2019-08-21 20:04:06 +03:00
|
|
|
mut nport := url.port().int()
|
|
|
|
if nport == 0 {
|
2019-12-22 01:41:42 +03:00
|
|
|
if scheme == 'http' {
|
|
|
|
nport = 80
|
|
|
|
}
|
|
|
|
if scheme == 'https' {
|
|
|
|
nport = 443
|
|
|
|
}
|
2019-08-21 20:04:06 +03:00
|
|
|
}
|
2019-12-22 01:41:42 +03:00
|
|
|
// println('fetch $method, $scheme, $host_name, $nport, $path ')
|
2019-08-21 20:04:06 +03:00
|
|
|
if scheme == 'https' {
|
2019-12-22 01:41:42 +03:00
|
|
|
// println('ssl_do( $nport, $method, $host_name, $path )')
|
|
|
|
res := req.ssl_do(nport, method, host_name, path) or {
|
2019-10-10 20:24:36 +03:00
|
|
|
return error(err)
|
|
|
|
}
|
|
|
|
return res
|
2020-05-16 17:12:23 +03:00
|
|
|
} else if scheme == 'http' {
|
2019-12-22 01:41:42 +03:00
|
|
|
// println('http_do( $nport, $method, $host_name, $path )')
|
|
|
|
res := req.http_do(nport, method, host_name, path) or {
|
2019-10-10 20:24:36 +03:00
|
|
|
return error(err)
|
|
|
|
}
|
|
|
|
return res
|
2019-08-21 20:04:06 +03:00
|
|
|
}
|
2019-11-12 19:23:53 +03:00
|
|
|
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
|
2019-08-21 20:04:06 +03:00
|
|
|
}
|
|
|
|
|
2019-08-10 11:05:59 +03:00
|
|
|
fn parse_response(resp string) Response {
|
2020-01-16 20:16:11 +03:00
|
|
|
// TODO: Header data type
|
2020-05-16 17:12:23 +03:00
|
|
|
mut headers := map[string]string{}
|
2020-06-08 00:01:20 +03:00
|
|
|
mut lheaders := map[string]string{}
|
2020-01-16 20:16:11 +03:00
|
|
|
// TODO: Cookie data type
|
2020-05-16 17:12:23 +03:00
|
|
|
mut cookies := map[string]string{}
|
2019-10-24 19:44:49 +03:00
|
|
|
first_header := resp.all_before('\n')
|
|
|
|
mut status_code := 0
|
2019-08-06 06:54:47 +03:00
|
|
|
if first_header.contains('HTTP/') {
|
|
|
|
val := first_header.find_between(' ', ' ')
|
|
|
|
status_code = val.int()
|
|
|
|
}
|
2019-10-24 19:44:49 +03:00
|
|
|
mut text := ''
|
|
|
|
// Build resp headers map and separate the body
|
|
|
|
mut nl_pos := 3
|
|
|
|
mut i := 1
|
|
|
|
for {
|
|
|
|
old_pos := nl_pos
|
2019-12-22 01:41:42 +03:00
|
|
|
nl_pos = resp.index_after('\n', nl_pos + 1)
|
2019-08-06 06:54:47 +03:00
|
|
|
if nl_pos == -1 {
|
2019-10-24 19:44:49 +03:00
|
|
|
break
|
|
|
|
}
|
2019-10-27 10:03:15 +03:00
|
|
|
h := resp[old_pos + 1..nl_pos]
|
2019-10-24 19:44:49 +03:00
|
|
|
// End of headers
|
2019-08-06 06:54:47 +03:00
|
|
|
if h.len <= 1 {
|
2019-10-27 10:03:15 +03:00
|
|
|
text = resp[nl_pos + 1..]
|
2019-10-24 19:44:49 +03:00
|
|
|
break
|
|
|
|
}
|
|
|
|
i++
|
2019-11-30 15:27:16 +03:00
|
|
|
pos := h.index(':') or {
|
2019-08-06 06:54:47 +03:00
|
|
|
continue
|
|
|
|
}
|
2019-12-22 01:41:42 +03:00
|
|
|
// if h.contains('Content-Type') {
|
|
|
|
// continue
|
|
|
|
// }
|
2020-06-08 00:01:20 +03:00
|
|
|
|
|
|
|
mut key := h[..pos]
|
|
|
|
lkey := key.to_lower()
|
2019-12-22 01:41:42 +03:00
|
|
|
val := h[pos + 2..]
|
2020-06-08 00:01:20 +03:00
|
|
|
if lkey == 'set-cookie' {
|
2020-01-16 20:16:11 +03:00
|
|
|
parts := val.trim_space().split('=')
|
|
|
|
cookies[parts[0]] = parts[1]
|
|
|
|
}
|
2020-06-08 00:01:20 +03:00
|
|
|
tval := val.trim_space()
|
|
|
|
headers[key] = tval
|
|
|
|
lheaders[lkey] = tval
|
2019-08-07 04:57:47 +03:00
|
|
|
}
|
2020-06-08 00:01:20 +03:00
|
|
|
if lheaders['transfer-encoding'] == 'chunked' || lheaders['content-length'] == '' {
|
2019-12-22 01:41:42 +03:00
|
|
|
text = chunked.decode(text)
|
2019-08-07 04:57:47 +03:00
|
|
|
}
|
2019-12-22 01:41:42 +03:00
|
|
|
return Response{
|
2019-10-24 19:44:49 +03:00
|
|
|
status_code: status_code
|
2019-08-06 06:54:47 +03:00
|
|
|
headers: headers
|
2020-06-08 00:01:20 +03:00
|
|
|
lheaders: lheaders
|
2020-01-16 20:16:11 +03:00
|
|
|
cookies: cookies
|
2019-10-24 19:44:49 +03:00
|
|
|
text: text
|
2019-08-10 11:05:59 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-25 01:48:06 +03:00
|
|
|
fn (req &Request) build_request_headers(method, host_name, path string) string {
|
|
|
|
ua := req.user_agent
|
2020-04-26 14:49:31 +03:00
|
|
|
mut uheaders := []string{}
|
2020-04-26 07:39:23 +03:00
|
|
|
if 'Host' !in req.headers {
|
2020-01-16 20:16:11 +03:00
|
|
|
uheaders << 'Host: $host_name\r\n'
|
|
|
|
}
|
2020-04-26 07:39:23 +03:00
|
|
|
if 'User-Agent' !in req.headers {
|
2020-01-16 20:16:11 +03:00
|
|
|
uheaders << 'User-Agent: $ua\r\n'
|
|
|
|
}
|
2020-04-26 07:39:23 +03:00
|
|
|
if req.data.len > 0 && 'Content-Length' !in req.headers {
|
2020-01-16 20:16:11 +03:00
|
|
|
uheaders << 'Content-Length: ${req.data.len}\r\n'
|
|
|
|
}
|
2019-11-30 15:27:16 +03:00
|
|
|
for key, val in req.headers {
|
2020-01-16 20:16:11 +03:00
|
|
|
if key == 'Cookie' {
|
|
|
|
continue
|
|
|
|
}
|
2019-10-24 19:44:49 +03:00
|
|
|
uheaders << '${key}: ${val}\r\n'
|
2019-08-25 01:48:06 +03:00
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
uheaders << req.build_request_cookies_header()
|
2020-05-16 17:12:23 +03:00
|
|
|
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' +
|
|
|
|
req.data
|
2020-01-16 20:16:11 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn (req &Request) build_request_cookies_header() string {
|
|
|
|
if req.cookies.keys().len < 1 {
|
|
|
|
return ''
|
|
|
|
}
|
2020-04-26 14:49:31 +03:00
|
|
|
mut cookie := []string{}
|
2020-01-16 20:16:11 +03:00
|
|
|
for key, val in req.cookies {
|
|
|
|
cookie << '$key: $val'
|
|
|
|
}
|
|
|
|
if 'Cookie' in req.headers && req.headers['Cookie'] != '' {
|
|
|
|
cookie << req.headers['Cookie']
|
2019-08-25 01:48:06 +03:00
|
|
|
}
|
2020-01-16 20:16:11 +03:00
|
|
|
return 'Cookie: ' + cookie.join('; ') + '\r\n'
|
2019-08-06 06:54:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn unescape_url(s string) string {
|
2019-10-24 19:44:49 +03:00
|
|
|
panic('http.unescape_url() was replaced with urllib.query_unescape()')
|
2019-08-06 06:54:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn escape_url(s string) string {
|
2019-10-24 19:44:49 +03:00
|
|
|
panic('http.escape_url() was replaced with urllib.query_escape()')
|
2019-08-06 06:54:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn unescape(s string) string {
|
2019-10-24 19:44:49 +03:00
|
|
|
panic('http.unescape() was replaced with http.unescape_url()')
|
2019-08-06 06:54:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn escape(s string) string {
|
2019-10-24 19:44:49 +03:00
|
|
|
panic('http.escape() was replaced with http.escape_url()')
|
2019-08-06 06:54:47 +03:00
|
|
|
}
|
2020-05-20 20:21:57 +03:00
|
|
|
|
|
|
|
fn (req &Request) http_do(port int, method, host_name, path string) ?Response {
|
|
|
|
rbuffer := [bufsize]byte
|
|
|
|
mut sb := strings.new_builder(100)
|
|
|
|
s := req.build_request_headers(method, host_name, path)
|
|
|
|
client := net.dial(host_name, port) or {
|
|
|
|
return error(err)
|
|
|
|
}
|
|
|
|
client.send(s.str, s.len) or {
|
|
|
|
}
|
|
|
|
for {
|
|
|
|
readbytes := client.crecv(rbuffer, bufsize)
|
|
|
|
if readbytes < 0 {
|
|
|
|
return error('http.request.http_do: error reading response. readbytes=$readbytes')
|
|
|
|
}
|
|
|
|
if readbytes == 0 {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
sb.write(tos(rbuffer, readbytes))
|
|
|
|
}
|
|
|
|
client.close() or {
|
|
|
|
}
|
|
|
|
return parse_response(sb.str())
|
|
|
|
}
|