1
0
mirror of https://github.com/vlang/v.git synced 2023-08-10 21:13:21 +03:00
v/vlib/net/http/http.v

376 lines
8.3 KiB
V
Raw Normal View History

2020-01-23 23:04:46 +03:00
// Copyright (c) 2019-2020 Alexander Medvednikov. All rights reserved.
2019-06-23 05:21:30 +03:00
// Use of this source code is governed by an MIT license
// that can be found in the LICENSE file.
2019-06-22 21:20:28 +03:00
module http
2019-08-06 15:43:09 +03:00
import net.urllib
2019-12-30 07:42:23 +03:00
import net.http.chunked
2019-08-06 15:43:09 +03:00
const (
max_redirects = 4
2020-01-16 20:16:11 +03:00
content_type_default = 'text/plain'
)
2019-10-24 19:44:49 +03:00
pub struct Request {
2020-04-28 14:57:48 +03:00
pub mut:
2019-12-22 01:41:42 +03:00
method string
2020-01-16 20:16:11 +03:00
headers map[string]string
cookies map[string]string
2019-12-22 01:41:42 +03:00
data string
url string
2019-08-25 01:48:06 +03:00
user_agent string
2020-01-16 20:16:11 +03:00
verbose bool
2020-05-10 18:40:24 +03:00
//mut:
2019-12-22 01:41:42 +03:00
user_ptr voidptr
ws_func voidptr
2019-06-22 21:20:28 +03:00
}
2020-01-16 20:16:11 +03:00
pub struct FetchConfig {
pub mut:
method string
data string
params map[string]string
headers map[string]string
cookies map[string]string
user_agent string//='v' QTODO
2020-01-16 20:16:11 +03:00
verbose bool=false
}
2019-10-24 19:44:49 +03:00
pub struct Response {
2019-06-22 21:20:28 +03:00
pub:
2019-07-31 23:10:28 +03:00
text string
2019-10-24 19:44:49 +03:00
headers map[string]string
2020-01-16 20:16:11 +03:00
cookies map[string]string
2019-06-22 21:20:28 +03:00
status_code int
}
pub fn new_request(method, url, data string) ?Request{
return Request{
method: method
url: url
data: data
}
}
2020-01-16 20:16:11 +03:00
pub fn get(url string) ?Response {
return fetch_with_method('GET', url, FetchConfig{})
2020-01-15 01:19:50 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn post(url, data string) ?Response {
return fetch_with_method('POST', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-15 01:19:50 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn post_form(url string, data map[string]string) ?Response {
return fetch_with_method('POST', url, {
headers: {
'Content-Type': 'application/x-www-form-urlencoded'
}
data: url_encode_form_data(data)
})
2020-01-15 01:19:50 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn put(url, data string) ?Response {
return fetch_with_method('PUT', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2020-01-15 01:19:50 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn patch(url, data string) ?Response {
return fetch_with_method('PATCH', url, {
data: data
headers: {
'Content-Type': content_type_default
}
})
2019-07-29 20:18:26 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn head(url string) ?Response {
return fetch_with_method('HEAD', url, FetchConfig{})
2019-06-22 21:20:28 +03:00
}
2020-01-16 20:16:11 +03:00
pub fn delete(url string) ?Response {
return fetch_with_method('DELETE', url, FetchConfig{})
}
2020-01-16 20:16:11 +03:00
pub fn fetch(_url string, config FetchConfig) ?Response {
2019-07-29 20:18:26 +03:00
if _url == '' {
2020-01-16 20:16:11 +03:00
return error('http.fetch: empty url')
2019-07-29 20:18:26 +03:00
}
2020-01-16 20:16:11 +03:00
url := build_url_from_fetch(_url, config) or {
return error('http.fetch: invalid url ${_url}')
2019-06-22 21:20:28 +03:00
}
2020-01-16 20:16:11 +03:00
data := config.data
method := config.method.to_upper()
req := Request{
method: method
url: url
data: data
2020-01-16 20:16:11 +03:00
headers: config.headers
cookies: config.cookies
user_agent: config.user_agent
2019-06-22 21:20:28 +03:00
ws_func: 0
user_ptr: 0
2020-01-16 20:16:11 +03:00
verbose: config.verbose
2019-06-22 21:20:28 +03:00
}
2020-01-16 20:16:11 +03:00
res := req.do() or {
return error(err)
}
return res
2019-06-22 21:20:28 +03:00
}
2019-07-31 23:10:28 +03:00
pub fn get_text(url string) string {
2020-01-16 20:16:11 +03:00
resp := fetch(url, {
method: 'GET'
}) or {
2019-12-22 01:41:42 +03:00
return ''
}
2019-10-24 19:44:49 +03:00
return resp.text
}
2019-07-31 23:10:28 +03:00
2020-01-16 20:16:11 +03:00
pub fn url_encode_form_data(data map[string]string) string {
2020-04-26 14:49:31 +03:00
mut pieces := []string{}
2020-01-16 20:16:11 +03:00
for _key, _value in data {
key := urllib.query_escape(_key)
value := urllib.query_escape(_value)
pieces << '$key=$value'
}
return pieces.join('&')
}
fn fetch_with_method(method string, url string, _config FetchConfig) ?Response {
mut config := _config
config.method = method
return fetch(url, config)
}
fn build_url_from_fetch(_url string, config FetchConfig) ?string {
mut url := urllib.parse(_url) or {
return error(err)
}
params := config.params
if params.keys().len == 0 {
return url.str()
}
2020-04-26 14:49:31 +03:00
mut pieces := []string{}
2020-01-16 20:16:11 +03:00
for key in params.keys() {
pieces << '${key}=${params[key]}'
}
mut query := pieces.join('&')
if url.raw_query.len > 1 {
query = url.raw_query + '&' + query
}
url.raw_query = query
return url.str()
}
2019-06-22 21:20:28 +03:00
fn (req mut Request) free() {
req.headers.free()
}
fn (resp mut Response) free() {
resp.headers.free()
}
2019-11-18 00:42:25 +03:00
// add_header adds the key and value of an HTTP request header
2019-06-26 18:45:48 +03:00
pub fn (req mut Request) add_header(key, val string) {
2019-06-22 21:20:28 +03:00
req.headers[key] = val
}
2019-09-05 15:46:24 +03:00
pub fn parse_headers(lines []string) map[string]string {
mut headers := map[string]string
for i, line in lines {
if i == 0 {
continue
}
words := line.split(': ')
if words.len != 2 {
continue
}
headers[words[0]] = words[1]
}
return headers
}
2019-11-18 00:42:25 +03:00
// do will send the HTTP request and returns `http.Response` as soon as the response is recevied
pub fn (req &Request) do() ?Response {
mut url := urllib.parse(req.url) or {
2020-01-16 20:16:11 +03:00
return error('http.Request.do: invalid url ${req.url}')
2019-12-22 01:41:42 +03:00
}
2019-08-21 20:04:06 +03:00
mut rurl := url
2019-12-23 13:37:52 +03:00
mut resp := Response{}
mut no_redirects := 0
2019-08-21 20:04:06 +03:00
for {
2019-12-22 01:41:42 +03:00
if no_redirects == max_redirects {
return error('http.request.do: maximum number of redirects reached ($max_redirects)')
}
2020-01-16 20:16:11 +03:00
qresp := req.method_and_url_to_response(req.method, rurl) or {
2019-12-22 01:41:42 +03:00
return error(err)
}
2019-08-21 20:04:06 +03:00
resp = qresp
if resp.status_code !in [301, 302, 303, 307, 308] {
2019-12-22 01:41:42 +03:00
break
}
2019-08-21 20:04:06 +03:00
// follow any redirects
mut redirect_url := resp.headers['Location']
if redirect_url.len > 0 && redirect_url[0] == `/` {
url.set_path(redirect_url) or {
return error('http.request.do: invalid path in redirect: "$redirect_url"')
}
redirect_url = url.str()
}
2019-12-22 01:41:42 +03:00
qrurl := urllib.parse(redirect_url) or {
return error('http.request.do: invalid URL in redirect "$redirect_url"')
}
2019-08-21 20:04:06 +03:00
rurl = qrurl
no_redirects++
}
return resp
}
fn (req &Request) method_and_url_to_response(method string, url urllib.URL) ?Response {
2019-08-25 01:48:06 +03:00
host_name := url.hostname()
scheme := url.scheme
p := url.path.trim_left('/')
path := if url.query().size > 0 { '/$p?${url.query().encode()}' } else { '/$p' }
2019-08-21 20:04:06 +03:00
mut nport := url.port().int()
if nport == 0 {
2019-12-22 01:41:42 +03:00
if scheme == 'http' {
nport = 80
}
if scheme == 'https' {
nport = 443
}
2019-08-21 20:04:06 +03:00
}
2019-12-22 01:41:42 +03:00
// println('fetch $method, $scheme, $host_name, $nport, $path ')
2019-08-21 20:04:06 +03:00
if scheme == 'https' {
2019-12-22 01:41:42 +03:00
// println('ssl_do( $nport, $method, $host_name, $path )')
res := req.ssl_do(nport, method, host_name, path) or {
2019-10-10 20:24:36 +03:00
return error(err)
}
return res
2019-12-22 01:41:42 +03:00
}
else if scheme == 'http' {
// println('http_do( $nport, $method, $host_name, $path )')
res := req.http_do(nport, method, host_name, path) or {
2019-10-10 20:24:36 +03:00
return error(err)
}
return res
2019-08-21 20:04:06 +03:00
}
return error('http.request.method_and_url_to_response: unsupported scheme: "$scheme"')
2019-08-21 20:04:06 +03:00
}
fn parse_response(resp string) Response {
2020-01-16 20:16:11 +03:00
// TODO: Header data type
2019-08-17 02:55:11 +03:00
mut headers := map[string]string
2020-01-16 20:16:11 +03:00
// TODO: Cookie data type
mut cookies := map[string]string
2019-10-24 19:44:49 +03:00
first_header := resp.all_before('\n')
mut status_code := 0
if first_header.contains('HTTP/') {
val := first_header.find_between(' ', ' ')
status_code = val.int()
}
2019-10-24 19:44:49 +03:00
mut text := ''
// Build resp headers map and separate the body
mut nl_pos := 3
mut i := 1
for {
old_pos := nl_pos
2019-12-22 01:41:42 +03:00
nl_pos = resp.index_after('\n', nl_pos + 1)
if nl_pos == -1 {
2019-10-24 19:44:49 +03:00
break
}
h := resp[old_pos + 1..nl_pos]
2019-10-24 19:44:49 +03:00
// End of headers
if h.len <= 1 {
text = resp[nl_pos + 1..]
2019-10-24 19:44:49 +03:00
break
}
i++
2019-11-30 15:27:16 +03:00
pos := h.index(':') or {
continue
}
2019-12-22 01:41:42 +03:00
// if h.contains('Content-Type') {
// continue
// }
key := h[..pos]
2019-12-22 01:41:42 +03:00
val := h[pos + 2..]
2020-01-16 20:16:11 +03:00
if key == 'Set-Cookie' {
parts := val.trim_space().split('=')
cookies[parts[0]] = parts[1]
}
headers[key] = val.trim_space()
2019-08-07 04:57:47 +03:00
}
if headers['Transfer-Encoding'] == 'chunked' {
2019-12-22 01:41:42 +03:00
text = chunked.decode(text)
2019-08-07 04:57:47 +03:00
}
2019-12-22 01:41:42 +03:00
return Response{
2019-10-24 19:44:49 +03:00
status_code: status_code
headers: headers
2020-01-16 20:16:11 +03:00
cookies: cookies
2019-10-24 19:44:49 +03:00
text: text
}
}
2019-08-25 01:48:06 +03:00
fn (req &Request) build_request_headers(method, host_name, path string) string {
ua := req.user_agent
2020-04-26 14:49:31 +03:00
mut uheaders := []string{}
if 'Host' !in req.headers {
2020-01-16 20:16:11 +03:00
uheaders << 'Host: $host_name\r\n'
}
if 'User-Agent' !in req.headers {
2020-01-16 20:16:11 +03:00
uheaders << 'User-Agent: $ua\r\n'
}
if req.data.len > 0 && 'Content-Length' !in req.headers {
2020-01-16 20:16:11 +03:00
uheaders << 'Content-Length: ${req.data.len}\r\n'
}
2019-11-30 15:27:16 +03:00
for key, val in req.headers {
2020-01-16 20:16:11 +03:00
if key == 'Cookie' {
continue
}
2019-10-24 19:44:49 +03:00
uheaders << '${key}: ${val}\r\n'
2019-08-25 01:48:06 +03:00
}
2020-01-16 20:16:11 +03:00
uheaders << req.build_request_cookies_header()
return '$method $path HTTP/1.1\r\n' + uheaders.join('') + 'Connection: close\r\n\r\n' + req.data
}
fn (req &Request) build_request_cookies_header() string {
if req.cookies.keys().len < 1 {
return ''
}
2020-04-26 14:49:31 +03:00
mut cookie := []string{}
2020-01-16 20:16:11 +03:00
for key, val in req.cookies {
cookie << '$key: $val'
}
if 'Cookie' in req.headers && req.headers['Cookie'] != '' {
cookie << req.headers['Cookie']
2019-08-25 01:48:06 +03:00
}
2020-01-16 20:16:11 +03:00
return 'Cookie: ' + cookie.join('; ') + '\r\n'
}
pub fn unescape_url(s string) string {
2019-10-24 19:44:49 +03:00
panic('http.unescape_url() was replaced with urllib.query_unescape()')
}
pub fn escape_url(s string) string {
2019-10-24 19:44:49 +03:00
panic('http.escape_url() was replaced with urllib.query_escape()')
}
pub fn unescape(s string) string {
2019-10-24 19:44:49 +03:00
panic('http.unescape() was replaced with http.unescape_url()')
}
pub fn escape(s string) string {
2019-10-24 19:44:49 +03:00
panic('http.escape() was replaced with http.escape_url()')
}
2019-12-22 01:41:42 +03:00
type wsfn fn(s string, ptr voidptr)