mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
examples, readme: fix typos (#18994)
This commit is contained in:
@@ -1,35 +1,57 @@
|
||||
# JS DOM Benchmark Chart
|
||||
|
||||

|
||||
|
||||
# To run app
|
||||
## From root
|
||||
- run typescript project
|
||||
`npm i --prefix examples/js_dom_draw_bechmark_chart/typescript_vanilla_typeorm`
|
||||
`npm run start:dev --prefix examples/js_dom_draw_bechmark_chart/typescript_vanilla_typeorm`
|
||||
## Running the App
|
||||
|
||||
- run v project
|
||||
`v run examples/js_dom_draw_bechmark_chart/v_vweb_orm `
|
||||
> **NOTE**\
|
||||
> The following steps require Node.js.
|
||||
> To install Node, please refer to the [download page](https://nodejs.org/en/download/)
|
||||
> or the installation via your operating systems [package manager](https://nodejs.org/en/download/package-manager).
|
||||
|
||||
- running v chart
|
||||
`cd examples/js_dom_draw_bechmark_chart/chart && v run .`
|
||||
The steps below assume that your current directory path is the examples project directory.
|
||||
|
||||
Dockerfile
|
||||
[docker build]=> Docker image
|
||||
[docker run]=> Docker container
|
||||
```
|
||||
cd examples/js_dom_draw_bechmark_chart
|
||||
```
|
||||
|
||||
`sudo docker build -t <name> .`
|
||||
Execute the following commands in separate terminal instances.
|
||||
|
||||
`sudo docker run --name <container name> --interactive --tty --publish 3001:3001 <name>`
|
||||
Run the Benchmarks Typescript Part
|
||||
|
||||
`v run .`
|
||||
```sh
|
||||
npm i --prefix typescript_vanilla_typeorm
|
||||
npm run start:dev --prefix typescript_vanilla_typeorm
|
||||
```
|
||||
|
||||
A message like `[Vweb] Running app on http://localhost:3001/` should appear
|
||||
Run the Benchmarks V Part
|
||||
|
||||
`exit`
|
||||
```sh
|
||||
v run v_vweb_orm
|
||||
```
|
||||
|
||||
# To implement new benchmarks in v
|
||||
Run the Chart
|
||||
|
||||
In `examples/js_dom_draw_bechmark_chart/v_vweb_orm/src/main.v` path
|
||||
Create a route returning a `Response` struct like:
|
||||
```
|
||||
cd chart/ && v run .
|
||||
```
|
||||
|
||||
## Dockerfile
|
||||
|
||||
> [docker build] => Docker image\
|
||||
> [docker run] => Docker container
|
||||
|
||||
```sh
|
||||
sudo docker build -t <name> .
|
||||
sudo docker run --name <container name> --interactive --tty --publish 3001:3001 <name>
|
||||
v run .
|
||||
# A message like `[Vweb] Running app on http://localhost:3001/` should appear
|
||||
exit
|
||||
```
|
||||
|
||||
## Implementing New Benchmarks in V
|
||||
|
||||
In `v_vweb_orm/src/main.v`, create a route that returns a `Response` struct.
|
||||
|
||||
```v ignore
|
||||
['/sqlite-memory/:count']
|
||||
@@ -65,18 +87,17 @@ pub fn (mut app App) sqlite_memory(count int) vweb.Result {
|
||||
}!
|
||||
|
||||
response := Response{
|
||||
insert: insert_stopwatchs
|
||||
@select:select_stopwatchs
|
||||
update: update_stopwatchs
|
||||
insert: insert_stopwatchs
|
||||
@select: select_stopwatchs
|
||||
update: update_stopwatchs
|
||||
}
|
||||
return app.json(response)
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
In `examples/chart/services.v` path
|
||||
Create a service to request the benchmarks data by http
|
||||
Decode the info to `FrameworkBenchmarkResponse`
|
||||
In `chart/main.v`, create a service to request the benchmark data and decode the response as
|
||||
`FrameworkBenchmarkResponse`.
|
||||
|
||||
```v ignore
|
||||
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
|
||||
@@ -86,26 +107,13 @@ fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
|
||||
}
|
||||
```
|
||||
|
||||
In `examples/chart/main.v` path
|
||||
Create a service to request the benchmarks data by http
|
||||
Decode the info to `FrameworkBenchmarkResponse`
|
||||
```v ignore
|
||||
fn typescript_sqlite_memory() ?FrameworkBenchmarkResponse {
|
||||
url := 'http://localhost:3000/sqlite-memory/${benchmark_loop_length}'
|
||||
res := http.get(url) or { panic(err) }
|
||||
framework_benchmark_response := json.decode(FrameworkBenchmarkResponse, res.body)!
|
||||
return framework_benchmark_response
|
||||
}
|
||||
```
|
||||
Then, update:
|
||||
`insert_framework_benchmark_times()`;
|
||||
`select_framework_benchmark_times()`;
|
||||
`update_framework_benchmark_times()`.
|
||||
with the new function
|
||||
Then update `insert_framework_benchmark_times()`, `select_framework_benchmark_times()` and
|
||||
`update_framework_benchmark_times()` to include the `numbers := FrameworkPlatform{` for the newly
|
||||
added function.
|
||||
|
||||
## Roadmap
|
||||
|
||||
|
||||
# ROADMAP
|
||||
02/09/2022
|
||||
|
||||
- [ ] select bench (easy)
|
||||
- [ ] vsql (easy)
|
||||
|
||||
Reference in New Issue
Block a user