mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
Compare commits
264 Commits
weekly.202
...
weekly.202
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
efdbe9d92c | ||
|
|
8119a297f7 | ||
|
|
b2f984280b | ||
|
|
2e0f8ee9e2 | ||
|
|
bf11df40e2 | ||
|
|
57e850e932 | ||
|
|
74048e2f17 | ||
|
|
6ea4f361a1 | ||
|
|
9ff0c51e61 | ||
|
|
bad30bdd79 | ||
|
|
5071a54b99 | ||
|
|
b5379255da | ||
|
|
4391ae563d | ||
|
|
a74d28ae5f | ||
|
|
799c95dc4e | ||
|
|
11a0df5bee | ||
|
|
0eee012ae9 | ||
|
|
37c151efe5 | ||
|
|
ae0e90f5d8 | ||
|
|
7178367de0 | ||
|
|
9d0a5942ac | ||
|
|
61024d4b75 | ||
|
|
654bc49d3c | ||
|
|
e0618f94e8 | ||
|
|
a21f4563b3 | ||
|
|
7f29418c63 | ||
|
|
1c19573382 | ||
|
|
b205e2fc67 | ||
|
|
9ed18efa53 | ||
|
|
43d6b97c21 | ||
|
|
f2eb50008d | ||
|
|
0d1d259bb4 | ||
|
|
4be3c92640 | ||
|
|
356ccf247f | ||
|
|
dd835acb8d | ||
|
|
5d2995c4d5 | ||
|
|
1dbde05267 | ||
|
|
ff02f94fd6 | ||
|
|
f0806822dd | ||
|
|
4ef7d26133 | ||
|
|
5df83812a0 | ||
|
|
10dcb2e0d9 | ||
|
|
1dc239227d | ||
|
|
7f70493e1d | ||
|
|
191e99c248 | ||
|
|
649ecab11f | ||
|
|
d46ac40758 | ||
|
|
31df2c4f45 | ||
|
|
fd9aa8bee6 | ||
|
|
1be0cb0306 | ||
|
|
939bc9f26f | ||
|
|
31739c2586 | ||
|
|
66f21cae55 | ||
|
|
ece73836aa | ||
|
|
4e13ee22e9 | ||
|
|
f23d2c8cf4 | ||
|
|
cec7e91714 | ||
|
|
054c8b1f13 | ||
|
|
5faabe7464 | ||
|
|
4d485ae687 | ||
|
|
47ee292ba4 | ||
|
|
173b4652fb | ||
|
|
c9a8d6448d | ||
|
|
b9fce4ef09 | ||
|
|
a054f868a0 | ||
|
|
d02c0636d8 | ||
|
|
a61b4809dc | ||
|
|
89d399b035 | ||
|
|
85d36ed231 | ||
|
|
ede9f29323 | ||
|
|
be1e40dac0 | ||
|
|
9344c27021 | ||
|
|
c0c07db7cc | ||
|
|
c3573454d1 | ||
|
|
b10b65e134 | ||
|
|
b2c299da48 | ||
|
|
d8cce0a71d | ||
|
|
a014844050 | ||
|
|
51513ae19a | ||
|
|
7c1b249da0 | ||
|
|
310969a057 | ||
|
|
db50e79d26 | ||
|
|
b34860e39b | ||
|
|
46a096b95d | ||
|
|
b11ce91141 | ||
|
|
894da4b40b | ||
|
|
96c8188083 | ||
|
|
f6cb772347 | ||
|
|
f3683b7cdc | ||
|
|
eb7f152f3d | ||
|
|
afd3ca8273 | ||
|
|
a4fb5d2cfd | ||
|
|
44dddecc09 | ||
|
|
e43794ad61 | ||
|
|
ceb05b163a | ||
|
|
7f22ed7935 | ||
|
|
61d28c8a8c | ||
|
|
71d98717dc | ||
|
|
edc6c9e24f | ||
|
|
1f20127502 | ||
|
|
8491e83e3f | ||
|
|
41b9b2988a | ||
|
|
faaa7035c4 | ||
|
|
eb765f975c | ||
|
|
daa5be4c27 | ||
|
|
9dce8194a8 | ||
|
|
f8f7bc8ead | ||
|
|
ebad7301f9 | ||
|
|
e1a2ab345d | ||
|
|
3bd528b218 | ||
|
|
3fa9128716 | ||
|
|
65a97a9fbe | ||
|
|
01c72d44e8 | ||
|
|
2d184559b6 | ||
|
|
291a1ffd8d | ||
|
|
d71fc0d13f | ||
|
|
867056dafb | ||
|
|
0979723636 | ||
|
|
469282b20d | ||
|
|
4715fb67c1 | ||
|
|
1fdbdf4a6b | ||
|
|
95d86324c3 | ||
|
|
28ddd8440a | ||
|
|
ca1f675dba | ||
|
|
fe77e64b3e | ||
|
|
a913983408 | ||
|
|
009a65b1fc | ||
|
|
5f38ba896e | ||
|
|
32f2f0dfa1 | ||
|
|
509a8fcaf1 | ||
|
|
7199528a27 | ||
|
|
1cea7b0c73 | ||
|
|
387b2db658 | ||
|
|
0347dadf75 | ||
|
|
4ba9a2ffbe | ||
|
|
3bfad1b943 | ||
|
|
7fd08eca96 | ||
|
|
216a505c2a | ||
|
|
edf0bc365c | ||
|
|
4e0e2ef753 | ||
|
|
34f0d442df | ||
|
|
86a2562fa7 | ||
|
|
657270faa8 | ||
|
|
86930873ac | ||
|
|
22c21e62a1 | ||
|
|
fba9587323 | ||
|
|
359b674cff | ||
|
|
9057e9f1dd | ||
|
|
62c3ad4953 | ||
|
|
fa6f7d4c83 | ||
|
|
85ec0248e9 | ||
|
|
9ebd56caa7 | ||
|
|
e66e35ced1 | ||
|
|
839ae6b2e4 | ||
|
|
2b4f7e7685 | ||
|
|
7ae96f0e38 | ||
|
|
36c9ed6889 | ||
|
|
04aab250aa | ||
|
|
cbd3c14e83 | ||
|
|
ffb263c2e1 | ||
|
|
05ff8f516d | ||
|
|
79cb0db2ff | ||
|
|
6c8e7f53b5 | ||
|
|
edbb39b81d | ||
|
|
295156e054 | ||
|
|
9e0156b46a | ||
|
|
62687842e2 | ||
|
|
758d1f44ab | ||
|
|
7c9cd855b4 | ||
|
|
d553071e65 | ||
|
|
5143837d66 | ||
|
|
09797e493e | ||
|
|
14b33baa3b | ||
|
|
d67be6302b | ||
|
|
ba3308296b | ||
|
|
d63f395061 | ||
|
|
d714dcef75 | ||
|
|
38d3239b50 | ||
|
|
c48a9e71f5 | ||
|
|
7a2705d8ce | ||
|
|
bb6c46e1ef | ||
|
|
95b0c3789f | ||
|
|
1c5eb7ccdc | ||
|
|
6dfa11e2f7 | ||
|
|
ef562c92a5 | ||
|
|
91bfab79a5 | ||
|
|
40a5c5c1a0 | ||
|
|
f0b7e5049b | ||
|
|
1c4c430642 | ||
|
|
d8263171ff | ||
|
|
39f80840d7 | ||
|
|
835484a953 | ||
|
|
4597c74d1a | ||
|
|
727c9fb4a1 | ||
|
|
d1ac22e3bb | ||
|
|
851f265a5e | ||
|
|
8de1934f51 | ||
|
|
31615d67c4 | ||
|
|
db48594bd4 | ||
|
|
2a3a4cfc84 | ||
|
|
315b2deda9 | ||
|
|
ab7cc5809a | ||
|
|
f24e9cc80a | ||
|
|
d0bb810b42 | ||
|
|
57563599a7 | ||
|
|
6485b6eeb7 | ||
|
|
dfc23d939f | ||
|
|
7fe62a8b3e | ||
|
|
9fd65b5b18 | ||
|
|
f19197f9b0 | ||
|
|
879d1d2f11 | ||
|
|
104e0c5692 | ||
|
|
9329b6c8c8 | ||
|
|
4660220f4c | ||
|
|
6168a4d3a2 | ||
|
|
70aedaf184 | ||
|
|
4189b7e280 | ||
|
|
155f897270 | ||
|
|
7fbd856bf8 | ||
|
|
7882915409 | ||
|
|
547169674d | ||
|
|
b658b65774 | ||
|
|
89165ed259 | ||
|
|
f99b79480d | ||
|
|
c98af3c526 | ||
|
|
ab642cac43 | ||
|
|
ea660315e0 | ||
|
|
c27ffc6054 | ||
|
|
f3d8bbdf3d | ||
|
|
10efe47f03 | ||
|
|
ecc7accc8e | ||
|
|
8acd84d04a | ||
|
|
cb684b5c64 | ||
|
|
1e52b2c134 | ||
|
|
078229f213 | ||
|
|
791972ebc9 | ||
|
|
f54ad51946 | ||
|
|
65eb383cfd | ||
|
|
da0b89cc57 | ||
|
|
c07ce3ff15 | ||
|
|
4ce6e663bf | ||
|
|
02f791d9fe | ||
|
|
7f1cc44b04 | ||
|
|
5e85d4cb39 | ||
|
|
535317eba3 | ||
|
|
236a1d0255 | ||
|
|
0ac450927c | ||
|
|
86ba4517b1 | ||
|
|
5e5d62ed4c | ||
|
|
05ec8ec15b | ||
|
|
b44b8c472d | ||
|
|
e2a0046849 | ||
|
|
92fcb82ca3 | ||
|
|
22fce6a525 | ||
|
|
64c8fb061d | ||
|
|
64028eedb8 | ||
|
|
70bf200e16 | ||
|
|
83e9585d06 | ||
|
|
b778c1d097 | ||
|
|
9cbfa882e6 | ||
|
|
4d166e3b55 | ||
|
|
ec55c0fcd2 | ||
|
|
ac7e79ffd2 | ||
|
|
95f6bd7fca |
@@ -17,6 +17,7 @@ freebsd_task:
|
||||
|
||||
arm64_task:
|
||||
name: Code CI / arm64-ubuntu-tcc
|
||||
trigger_type: manual
|
||||
arm_container:
|
||||
image: ubuntu:latest
|
||||
install_script: apt-get update -y && apt-get install --quiet -y build-essential pkg-config wget git valgrind libsqlite3-dev libssl-dev libxi-dev libxcursor-dev libfreetype6-dev libxi-dev libxcursor-dev libgl-dev xfonts-75dpi xfonts-base libmysqlclient-dev libpq-dev gcc-10-arm-linux-gnueabihf libc6-dev-armhf-cross qemu-user
|
||||
@@ -56,7 +57,7 @@ arm64_task:
|
||||
./cmd/tools/test_if_v_test_system_works
|
||||
|
||||
# - name: Self tests
|
||||
./v -silent test-self
|
||||
./v test-self
|
||||
|
||||
## - name: Self tests (-cstrict)
|
||||
## ./v -cstrict test-self
|
||||
|
||||
@@ -16,6 +16,11 @@ indent_size = 2
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
# lines that are too long will trigger an error in cmd/tools/vcheck-md.v
|
||||
# run v check-md [folder/file] to test markdown files
|
||||
# the longest normal line is specified with this constant:
|
||||
# `too_long_line_length_other = 100`
|
||||
max_line_length = 100
|
||||
|
||||
[*.{txt,out}]
|
||||
insert_final_newline = false
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -14,6 +14,8 @@ assignees: ''
|
||||
**V version:**
|
||||
**OS:**
|
||||
|
||||
<!-- Please include the standalone minimal reproducible code.
|
||||
If not, it may be closed by the administrators. -->
|
||||
**What did you do?**
|
||||
|
||||
|
||||
|
||||
375
.github/workflows/ci.yml
vendored
375
.github/workflows/ci.yml
vendored
@@ -9,40 +9,10 @@ on:
|
||||
- "**.md"
|
||||
|
||||
concurrency:
|
||||
group: build-${{ github.event.pull_request.number || github.sha }}
|
||||
group: build-ci-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
code-formatting-perf-regressions:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
VFLAGS: -cc gcc
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Environment info
|
||||
run: echo $VFLAGS $GITHUB_SHA $GITHUB_REF
|
||||
- name: Build local v
|
||||
run: make -j4
|
||||
- name: v test-cleancode
|
||||
run: ./v -silent test-cleancode
|
||||
- name: v test-fmt
|
||||
run: ./v -silent test-fmt
|
||||
|
||||
## Performance regressions
|
||||
- name: Clone & Build vmaster/v
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/vlang/v vmaster/
|
||||
(cd vmaster; make -j4)
|
||||
- name: V versions
|
||||
run: ./v version && ./vmaster/v version
|
||||
- name: Build the repeat tool
|
||||
run: ./v cmd/tools/repeat.v
|
||||
- name: Repeat -o hw.c examples/hello_world.v
|
||||
run: cmd/tools/repeat --max_time 251 --series 3 --count 20 --nmins 2 --nmaxs 5 --warmup 3 --fail_percent 10 -t 'cd {T} ; ./v -show-timings -o hw.c examples/hello_world.v' . ./vmaster
|
||||
- name: Repeat -o v.c cmd/v
|
||||
run: cmd/tools/repeat --max_time 1501 --series 3 --count 20 --nmins 2 --nmaxs 5 --warmup 3 --fail_percent 10 -t 'cd {T} ; ./v -show-timings -o v.c cmd/v' . ./vmaster
|
||||
|
||||
ubuntu-tcc:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
@@ -173,89 +143,6 @@ jobs:
|
||||
./testcase_leak 2>leaks.txt
|
||||
[ "$(stat -c %s leaks.txt)" = "0" ]
|
||||
|
||||
misc-tooling:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
env:
|
||||
VFLAGS: -cc tcc -no-retry-compilation
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 10
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev valgrind
|
||||
sudo apt-get install --quiet -y libfreetype6-dev libxi-dev libxcursor-dev libgl-dev
|
||||
sudo apt-get install --quiet -y xfonts-75dpi xfonts-base
|
||||
- name: Build v
|
||||
run: make
|
||||
|
||||
- name: Ensure V can be compiled with -autofree
|
||||
run: ./v -autofree -o v2 cmd/v ## NB: this does not mean it runs, but at least keeps it from regressing
|
||||
|
||||
- name: Shader examples can be build
|
||||
run: |
|
||||
wget https://github.com/floooh/sokol-tools-bin/raw/master/bin/linux/sokol-shdc
|
||||
chmod +x ./sokol-shdc
|
||||
for f in examples/sokol/02_cubes_glsl/cube_glsl \
|
||||
examples/sokol/03_march_tracing_glsl/rt_glsl \
|
||||
examples/sokol/04_multi_shader_glsl/rt_glsl_puppy \
|
||||
examples/sokol/04_multi_shader_glsl/rt_glsl_march \
|
||||
examples/sokol/05_instancing_glsl/rt_glsl_instancing \
|
||||
examples/sokol/06_obj_viewer/gouraud \
|
||||
; do \
|
||||
echo "compiling shader $f.glsl ..."; \
|
||||
./sokol-shdc --input $f.glsl --output $f.h --slang glsl330 ; \
|
||||
done
|
||||
for vfile in examples/sokol/0?*/*.v; do echo "compiling $vfile ..."; ./v $vfile ; done
|
||||
|
||||
- name: Install C++ dependencies
|
||||
run: |
|
||||
sudo apt-get install --quiet -y postgresql libpq-dev libssl-dev sqlite3 libsqlite3-dev
|
||||
sudo apt-get install --quiet -y libfreetype6-dev libxi-dev libxcursor-dev libgl-dev
|
||||
sudo apt-get install --quiet -y valgrind g++-9
|
||||
- name: Build V
|
||||
run: make -j4
|
||||
- name: g++ version
|
||||
run: g++-9 --version
|
||||
- name: V self compilation with g++
|
||||
run: ./v -cc g++-9 -no-std -cflags -std=c++11 -o v2 cmd/v && ./v2 -cc g++-9 -no-std -cflags -std=c++11 -o v3 cmd/v
|
||||
## - name: Running tests with g++
|
||||
## run: ./v -cc g++-9 -silent test-self
|
||||
|
||||
|
||||
alpine-docker-musl-gcc:
|
||||
name: alpine-musl
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
container:
|
||||
# Alpine docker pre-built container
|
||||
image: thevlang/vlang:alpine-build
|
||||
env:
|
||||
V_CI_MUSL: 1
|
||||
VFLAGS: -cc gcc
|
||||
volumes:
|
||||
- ${{github.workspace}}:/opt/vlang
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Show Environment
|
||||
run: |
|
||||
echo "PWD:"
|
||||
pwd
|
||||
echo "ENVIRONMENT:"
|
||||
env
|
||||
echo "C Compiler:"
|
||||
gcc --version
|
||||
|
||||
- name: Build V
|
||||
run: CC=gcc make
|
||||
|
||||
- name: Test V fixed tests
|
||||
run: ./v -silent test-self
|
||||
|
||||
macos:
|
||||
runs-on: macOS-latest
|
||||
timeout-minutes: 121
|
||||
@@ -524,72 +411,6 @@ jobs:
|
||||
echo "Running it..."
|
||||
ls
|
||||
|
||||
# ubuntu-autofree-selfcompile:
|
||||
# runs-on: ubuntu-20.04
|
||||
# timeout-minutes: 121
|
||||
# env:
|
||||
# VFLAGS: -cc gcc
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
# - name: Build V
|
||||
# run: make -j4
|
||||
# - name: V self compilation with -autofree
|
||||
# run: ./v -o v2 -autofree cmd/v && ./v2 -o v3 -autofree cmd/v && ./v3 -o v4 -autofree cmd/v
|
||||
|
||||
# Ubuntu docker pre-built container
|
||||
ubuntu-musl:
|
||||
name: ubuntu-musl
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
container:
|
||||
image: thevlang/vlang:ubuntu-build
|
||||
env:
|
||||
V_CI_MUSL: 1
|
||||
V_CI_UBUNTU_MUSL: 1
|
||||
VFLAGS: -cc musl-gcc
|
||||
volumes:
|
||||
- ${{github.workspace}}:/opt/vlang
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build V
|
||||
run: |
|
||||
echo $VFLAGS && make -j4 && ./v -cg -o v cmd/v
|
||||
- name: Verify `v test` works
|
||||
run: |
|
||||
echo $VFLAGS
|
||||
./v cmd/tools/test_if_v_test_system_works.v
|
||||
./cmd/tools/test_if_v_test_system_works
|
||||
- name: Test V fixed tests
|
||||
run: |
|
||||
./v -silent test-self
|
||||
|
||||
# ubuntu-musl:
|
||||
# runs-on: ubuntu-20.04
|
||||
# timeout-minutes: 121
|
||||
# env:
|
||||
# VFLAGS: -cc musl-gcc
|
||||
# V_CI_MUSL: 1
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
# - uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 12.x
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# sudo apt-get install --quiet -y musl musl-tools libssl-dev sqlite3 libsqlite3-dev valgrind
|
||||
# - name: Build v
|
||||
# run: echo $VFLAGS && make -j4 && ./v -cg -o v cmd/v
|
||||
# # - name: Test v binaries
|
||||
# # run: ./v build-vbinaries
|
||||
# ## - name: Test v->js
|
||||
# ## run: ./v -o hi.js examples/hello_v_js.v && node hi.js
|
||||
# - name: quick debug
|
||||
# run: ./v -stats vlib/strconv/format_test.v
|
||||
# - name: Self tests
|
||||
# run: ./v -silent test-self
|
||||
|
||||
windows-gcc:
|
||||
runs-on: windows-2019
|
||||
timeout-minutes: 121
|
||||
@@ -775,127 +596,99 @@ jobs:
|
||||
## - name: v2 self compilation
|
||||
## run: .\v.exe -o v2.exe cmd/v && .\v2.exe -o v3.exe cmd/v
|
||||
|
||||
parser-silent:
|
||||
name: Parser silent mode
|
||||
alpine-docker-musl-gcc:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
container:
|
||||
# Alpine docker pre-built container
|
||||
image: thevlang/vlang:alpine-build
|
||||
env:
|
||||
V_CI_MUSL: 1
|
||||
VFLAGS: -cc gcc
|
||||
volumes:
|
||||
- ${{github.workspace}}:/opt/vlang
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install zzuf
|
||||
run: sudo apt install -qq zzuf
|
||||
- name: Build local v
|
||||
run: |
|
||||
make -j4
|
||||
./v -g cmd/tools/vtest-parser.v
|
||||
- name: Run test-parser
|
||||
run: |
|
||||
./v test-parser -S examples/hello_world.v
|
||||
./v test-parser -S examples/hanoi.v
|
||||
./v test-parser -S examples/fibonacci.v
|
||||
./v test-parser -S examples/cli.v
|
||||
./v test-parser -S examples/json.v
|
||||
./v test-parser -S examples/vmod.v
|
||||
./v test-parser -S examples/regex/regex_example.v
|
||||
./v test-parser -S examples/2048/2048.v
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Run test-parser over fuzzed files
|
||||
- name: Show Environment
|
||||
run: |
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/hello_world.v > examples/hello_world_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/hanoi.v > examples/hanoi_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/fibonacci.v > examples/fibonacci_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/cli.v > examples/cli_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/json.v > examples/json_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/vmod.v > examples/vmod_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/regex/regex_example.v > examples/regex_example_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/2048/2048.v > examples/2048/2048_fuzz.v
|
||||
./v test-parser -S examples/hello_world_fuzz.v
|
||||
./v test-parser -S examples/hanoi_fuzz.v
|
||||
./v test-parser -S examples/cli_fuzz.v
|
||||
./v test-parser -S examples/regex_example_fuzz.v
|
||||
./v test-parser -S examples/2048/2048_fuzz.v
|
||||
echo "PWD:"
|
||||
pwd
|
||||
echo "ENVIRONMENT:"
|
||||
env
|
||||
echo "C Compiler:"
|
||||
gcc --version
|
||||
|
||||
v-apps-compile:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build V
|
||||
run: make && sudo ./v symlink
|
||||
run: CC=gcc make
|
||||
|
||||
## vls
|
||||
- name: Clone VLS
|
||||
run: git clone --depth 1 https://github.com/vlang/vls
|
||||
- name: Build VLS
|
||||
run: pushd vls; v cmd/vls ; popd
|
||||
- name: Build VLS with -prod
|
||||
run: pushd vls; v -prod cmd/vls; popd
|
||||
- name: Test V fixed tests
|
||||
run: ./v -silent test-self
|
||||
|
||||
## vsl
|
||||
- name: Clone VSL
|
||||
run: git clone --depth 1 https://github.com/vlang/vsl ~/.vmodules/vsl
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
|
||||
- name: Execute Tests using Pure V Backend
|
||||
run: ~/.vmodules/vsl/bin/test
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math
|
||||
run: ~/.vmodules/vsl/bin/test --use-cblas
|
||||
- name: Execute Tests using Pure V Backend and Garbage Collection enabled
|
||||
run: ~/.vmodules/vsl/bin/test --use-gc boehm
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math and Garbage Collection enabled
|
||||
run: ~/.vmodules/vsl/bin/test --use-cblas --use-gc boehm
|
||||
|
||||
## vtl
|
||||
- name: Clone VTL
|
||||
run: git clone --depth 1 https://github.com/vlang/vtl ~/.vmodules/vtl
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
|
||||
- name: Execute Tests using Pure V Backend
|
||||
run: ~/.vmodules/vtl/bin/test
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math
|
||||
run: ~/.vmodules/vtl/bin/test --use-cblas
|
||||
- name: Execute Tests using Pure V Backend and Garbage Collection enabled
|
||||
run: ~/.vmodules/vtl/bin/test --use-gc boehm
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math and Garbage Collection enabled
|
||||
run: ~/.vmodules/vtl/bin/test --use-cblas --use-gc boehm
|
||||
# ubuntu-autofree-selfcompile:
|
||||
# runs-on: ubuntu-20.04
|
||||
# timeout-minutes: 121
|
||||
# env:
|
||||
# VFLAGS: -cc gcc
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
# - name: Build V
|
||||
# run: make -j4
|
||||
# - name: V self compilation with -autofree
|
||||
# run: ./v -o v2 -autofree cmd/v && ./v2 -o v3 -autofree cmd/v && ./v3 -o v4 -autofree cmd/v
|
||||
|
||||
## vab
|
||||
- name: Clone vab
|
||||
run: git clone --depth 1 https://github.com/vlang/vab
|
||||
- name: Build vab
|
||||
run: cd vab; ../v ./vab.v ; cd ..
|
||||
|
||||
## gitly
|
||||
- name: Install markdown
|
||||
run: ./v install markdown
|
||||
- name: Build Gitly
|
||||
# ubuntu-musl:
|
||||
# runs-on: ubuntu-20.04
|
||||
# timeout-minutes: 121
|
||||
# env:
|
||||
# VFLAGS: -cc musl-gcc
|
||||
# V_CI_MUSL: 1
|
||||
# steps:
|
||||
# - uses: actions/checkout@v2
|
||||
# - uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 12.x
|
||||
# - name: Install dependencies
|
||||
# run: |
|
||||
# sudo apt-get install --quiet -y musl musl-tools libssl-dev sqlite3 libsqlite3-dev valgrind
|
||||
# - name: Build v
|
||||
# run: echo $VFLAGS && make -j4 && ./v -cg -o v cmd/v
|
||||
# # - name: Test v binaries
|
||||
# # run: ./v build-vbinaries
|
||||
# ## - name: Test v->js
|
||||
# ## run: ./v -o hi.js examples/hello_v_js.v && node hi.js
|
||||
# - name: quick debug
|
||||
# run: ./v -stats vlib/strconv/format_test.v
|
||||
# - name: Self tests
|
||||
# run: ./v -silent test-self
|
||||
|
||||
# Ubuntu docker pre-built container
|
||||
ubuntu-musl:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
container:
|
||||
image: thevlang/vlang:ubuntu-build
|
||||
env:
|
||||
V_CI_MUSL: 1
|
||||
V_CI_UBUNTU_MUSL: 1
|
||||
VFLAGS: -cc musl-gcc
|
||||
volumes:
|
||||
- ${{github.workspace}}:/opt/vlang
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Build V
|
||||
run: |
|
||||
git clone --depth 1 https://github.com/vlang/gitly
|
||||
cd gitly
|
||||
../v .
|
||||
# ./gitly -ci_run
|
||||
../v -autofree .
|
||||
../v -o x tests/first_run.v
|
||||
./x
|
||||
cd ..
|
||||
|
||||
## vex
|
||||
- name: Install Vex dependencies
|
||||
run: sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev
|
||||
- name: Install Vex
|
||||
run: mkdir -p ~/.vmodules/nedpals; git clone https://github.com/nedpals/vex ~/.vmodules/nedpals/vex
|
||||
- name: Compile the simple Vex example
|
||||
run: ./v ~/.vmodules/nedpals/vex/examples/simple_example.v
|
||||
- name: Run Vex Tests
|
||||
run: ./v test ~/.vmodules/nedpals/vex
|
||||
|
||||
## vpm modules
|
||||
- name: Install UI through VPM
|
||||
run: ./v install ui
|
||||
|
||||
## libsodium
|
||||
- name: Install libsodium-dev package
|
||||
run: sudo apt-get install --quiet -y libsodium-dev
|
||||
- name: Installl the libsodium wrapper through VPM
|
||||
run: ./v install libsodium
|
||||
- name: Test libsodium
|
||||
run: ./v test ~/.vmodules/libsodium
|
||||
echo $VFLAGS && make -j4 && ./v -cg -o v cmd/v
|
||||
- name: Verify `v test` works
|
||||
run: |
|
||||
echo $VFLAGS
|
||||
./v cmd/tools/test_if_v_test_system_works.v
|
||||
./cmd/tools/test_if_v_test_system_works
|
||||
- name: Test V fixed tests
|
||||
run: |
|
||||
./v -silent test-self
|
||||
|
||||
2
.github/workflows/ci_sanitized.yml
vendored
2
.github/workflows/ci_sanitized.yml
vendored
@@ -54,7 +54,7 @@ on:
|
||||
- 'vlib/v/embed_file/**.v'
|
||||
|
||||
concurrency:
|
||||
group: build-${{ github.event.pull_request.number || github.sha }}
|
||||
group: build-sanitized-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
34
.github/workflows/docs_ci.yml
vendored
34
.github/workflows/docs_ci.yml
vendored
@@ -14,5 +14,35 @@ jobs:
|
||||
run: make
|
||||
- name: Check markdown line length & code examples
|
||||
run: ./v check-md -hide-warnings .
|
||||
## NB: -hide-warnings is used here, so that the output is less noisy,
|
||||
## thus real errors are easier to spot.
|
||||
## NB: -hide-warnings is used here, so that the output is less noisy,
|
||||
## thus real errors are easier to spot.
|
||||
|
||||
report-missing-fn-doc:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 5
|
||||
env:
|
||||
MOPTIONS: --no-line-numbers --relative-paths --exclude /vlib/v/ --exclude /builtin/linux_bare/ --exclude /testdata/ --exclude /tests/ vlib/
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build V
|
||||
run: make
|
||||
|
||||
- name: Checkout previous v
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: vlang/v
|
||||
path: pv
|
||||
|
||||
- name: Check against parent commit
|
||||
run: |
|
||||
./v run cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/n_v.txt
|
||||
cd pv/ && ../v run ../cmd/tools/missdoc.v $MOPTIONS | sort > /tmp/o_v.txt
|
||||
count_new=$(cat /tmp/n_v.txt | wc -l)
|
||||
count_old=$(cat /tmp/o_v.txt | wc -l)
|
||||
echo "new pubs: $count_new | old pubs: $count_old"
|
||||
echo "new head: $(head -n1 /tmp/n_v.txt)"
|
||||
echo "old head: $(head -n1 /tmp/o_v.txt)"
|
||||
if [[ ${count_new} -gt ${count_old} ]]; then
|
||||
echo "The following $((count_new-count_old)) function(s) are introduced with no documentation:"
|
||||
diff /tmp/n_v.txt /tmp/o_v.txt ## diff does exit(1) when files are different
|
||||
fi
|
||||
|
||||
248
.github/workflows/other_ci.yml
vendored
Normal file
248
.github/workflows/other_ci.yml
vendored
Normal file
@@ -0,0 +1,248 @@
|
||||
name: Other CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
|
||||
concurrency:
|
||||
group: build-other-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
no-gpl-by-accident:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: No GPL licensed code, should be added accidentally
|
||||
run: |
|
||||
! grep -r --exclude="*.yml" "a GPL license" .
|
||||
|
||||
code-formatting:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
VFLAGS: -cc gcc
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Environment info
|
||||
run: echo $VFLAGS $GITHUB_SHA $GITHUB_REF
|
||||
- name: Build local v
|
||||
run: make -j4
|
||||
- name: v test-cleancode
|
||||
run: ./v -silent test-cleancode
|
||||
- name: v test-fmt
|
||||
run: ./v -silent test-fmt
|
||||
|
||||
performance-regressions:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 15
|
||||
env:
|
||||
VFLAGS: -cc gcc
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Environment info
|
||||
run: echo $VFLAGS $GITHUB_SHA $GITHUB_REF
|
||||
- name: Build local v
|
||||
run: make -j4
|
||||
- name: Clone & Build previous vmaster/v
|
||||
run: |
|
||||
git clone --depth=1 https://github.com/vlang/v vmaster/
|
||||
(cd vmaster; make -j4)
|
||||
- name: V versions
|
||||
run: ./v version && ./vmaster/v version
|
||||
- name: Build the repeat tool
|
||||
run: ./v cmd/tools/repeat.v
|
||||
- name: Repeat -o hw.c examples/hello_world.v
|
||||
run: cmd/tools/repeat --max_time 251 --series 3 --count 20 --nmins 2 --nmaxs 5 --warmup 3 --fail_percent 10 -t 'cd {T} ; ./v -show-timings -o hw.c examples/hello_world.v' . ./vmaster
|
||||
- name: Repeat -o v.c cmd/v
|
||||
run: cmd/tools/repeat --max_time 1501 --series 3 --count 20 --nmins 2 --nmaxs 5 --warmup 3 --fail_percent 10 -t 'cd {T} ; ./v -show-timings -o v.c cmd/v' . ./vmaster
|
||||
|
||||
misc-tooling:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
env:
|
||||
VFLAGS: -cc tcc -no-retry-compilation
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 10
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev valgrind
|
||||
sudo apt-get install --quiet -y libfreetype6-dev libxi-dev libxcursor-dev libgl-dev
|
||||
sudo apt-get install --quiet -y xfonts-75dpi xfonts-base
|
||||
- name: Build v
|
||||
run: make
|
||||
|
||||
- name: Ensure V can be compiled with -autofree
|
||||
run: ./v -autofree -o v2 cmd/v ## NB: this does not mean it runs, but at least keeps it from regressing
|
||||
|
||||
- name: Shader examples can be build
|
||||
run: |
|
||||
wget https://github.com/floooh/sokol-tools-bin/raw/33d2e4cc26088c6c28eaef5467990f8940d15aab/bin/linux/sokol-shdc
|
||||
chmod +x ./sokol-shdc
|
||||
for f in examples/sokol/02_cubes_glsl/cube_glsl \
|
||||
examples/sokol/03_march_tracing_glsl/rt_glsl \
|
||||
examples/sokol/04_multi_shader_glsl/rt_glsl_puppy \
|
||||
examples/sokol/04_multi_shader_glsl/rt_glsl_march \
|
||||
examples/sokol/05_instancing_glsl/rt_glsl_instancing \
|
||||
examples/sokol/06_obj_viewer/gouraud \
|
||||
; do \
|
||||
echo "compiling shader $f.glsl ..."; \
|
||||
./sokol-shdc --input $f.glsl --output $f.h --slang glsl330 ; \
|
||||
done
|
||||
for vfile in examples/sokol/0?*/*.v; do echo "compiling $vfile ..."; ./v $vfile ; done
|
||||
|
||||
- name: Install C++ dependencies
|
||||
run: |
|
||||
sudo apt-get install --quiet -y postgresql libpq-dev libssl-dev sqlite3 libsqlite3-dev
|
||||
sudo apt-get install --quiet -y libfreetype6-dev libxi-dev libxcursor-dev libgl-dev
|
||||
sudo apt-get install --quiet -y valgrind g++-9
|
||||
- name: Build V
|
||||
run: make -j4
|
||||
- name: g++ version
|
||||
run: g++-9 --version
|
||||
- name: V self compilation with g++
|
||||
run: ./v -cc g++-9 -no-std -cflags -std=c++11 -o v2 cmd/v && ./v2 -cc g++-9 -no-std -cflags -std=c++11 -o v3 cmd/v
|
||||
## - name: Running tests with g++
|
||||
## run: ./v -cc g++-9 -silent test-self
|
||||
|
||||
|
||||
parser-silent:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install zzuf
|
||||
run: sudo apt install -qq zzuf
|
||||
- name: Build local v
|
||||
run: |
|
||||
make -j4
|
||||
./v -g cmd/tools/vtest-parser.v
|
||||
- name: Run test-parser
|
||||
run: |
|
||||
./v test-parser -S examples/hello_world.v
|
||||
./v test-parser -S examples/hanoi.v
|
||||
./v test-parser -S examples/fibonacci.v
|
||||
./v test-parser -S examples/cli.v
|
||||
./v test-parser -S examples/json.v
|
||||
./v test-parser -S examples/vmod.v
|
||||
./v test-parser -S examples/regex/regex_example.v
|
||||
./v test-parser -S examples/2048/2048.v
|
||||
|
||||
- name: Run test-parser over fuzzed files
|
||||
run: |
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/hello_world.v > examples/hello_world_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/hanoi.v > examples/hanoi_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/fibonacci.v > examples/fibonacci_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/cli.v > examples/cli_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/json.v > examples/json_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/vmod.v > examples/vmod_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/regex/regex_example.v > examples/regex_example_fuzz.v
|
||||
zzuf -R '\x00-\x20\x7f-\xff' -r0.01 < examples/2048/2048.v > examples/2048/2048_fuzz.v
|
||||
./v test-parser -S examples/hello_world_fuzz.v
|
||||
./v test-parser -S examples/hanoi_fuzz.v
|
||||
./v test-parser -S examples/cli_fuzz.v
|
||||
./v test-parser -S examples/regex_example_fuzz.v
|
||||
./v test-parser -S examples/2048/2048_fuzz.v
|
||||
|
||||
v-apps-compile:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build V
|
||||
run: make && sudo ./v symlink
|
||||
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install --quiet -y libgc-dev
|
||||
|
||||
## vls
|
||||
- name: Clone VLS
|
||||
run: git clone --depth 1 https://github.com/vlang/vls
|
||||
- name: Build VLS
|
||||
run: pushd vls; v cmd/vls ; popd
|
||||
- name: Build VLS with -prod
|
||||
run: pushd vls; v -prod cmd/vls; popd
|
||||
- name: Build VLS with -gc boehm -skip-unused
|
||||
run: pushd vls; v -gc boehm -skip-unused cmd/vls; popd
|
||||
|
||||
## vsl
|
||||
- name: Clone VSL
|
||||
run: git clone --depth 1 https://github.com/vlang/vsl ~/.vmodules/vsl
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
|
||||
- name: Execute Tests using Pure V Backend
|
||||
run: ~/.vmodules/vsl/bin/test
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math
|
||||
run: ~/.vmodules/vsl/bin/test --use-cblas
|
||||
- name: Execute Tests using Pure V Backend and Garbage Collection enabled
|
||||
run: ~/.vmodules/vsl/bin/test --use-gc boehm
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math and Garbage Collection enabled
|
||||
run: ~/.vmodules/vsl/bin/test --use-cblas --use-gc boehm
|
||||
|
||||
## vtl
|
||||
- name: Clone VTL
|
||||
run: git clone --depth 1 https://github.com/vlang/vtl ~/.vmodules/vtl
|
||||
- name: Install dependencies
|
||||
run: sudo apt-get install --quiet -y --no-install-recommends gfortran liblapacke-dev libopenblas-dev libgc-dev
|
||||
- name: Execute Tests using Pure V Backend
|
||||
run: ~/.vmodules/vtl/bin/test
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math
|
||||
run: ~/.vmodules/vtl/bin/test --use-cblas
|
||||
- name: Execute Tests using Pure V Backend and Garbage Collection enabled
|
||||
run: ~/.vmodules/vtl/bin/test --use-gc boehm
|
||||
- name: Execute Tests using Pure V Backend with Pure V Math and Garbage Collection enabled
|
||||
run: ~/.vmodules/vtl/bin/test --use-cblas --use-gc boehm
|
||||
|
||||
## vab
|
||||
- name: Clone vab
|
||||
run: git clone --depth 1 https://github.com/vlang/vab
|
||||
- name: Build vab
|
||||
run: cd vab; ../v ./vab.v ; cd ..
|
||||
- name: Build vab with -gc boehm -skip-unused
|
||||
run: cd vab; ../v -gc boehm -skip-unused ./vab.v ; cd ..
|
||||
|
||||
## gitly
|
||||
- name: Install markdown
|
||||
run: ./v install markdown
|
||||
- name: Build Gitly
|
||||
run: |
|
||||
git clone --depth 1 https://github.com/vlang/gitly
|
||||
cd gitly
|
||||
../v .
|
||||
# ./gitly -ci_run
|
||||
../v -autofree .
|
||||
../v -o x tests/first_run.v
|
||||
./x
|
||||
cd ..
|
||||
|
||||
## vex
|
||||
- name: Install Vex dependencies
|
||||
run: sudo apt-get install --quiet -y libssl-dev sqlite3 libsqlite3-dev
|
||||
- name: Install Vex
|
||||
run: mkdir -p ~/.vmodules/nedpals; git clone https://github.com/nedpals/vex ~/.vmodules/nedpals/vex
|
||||
- name: Compile the simple Vex example
|
||||
run: ./v ~/.vmodules/nedpals/vex/examples/simple_example.v
|
||||
- name: Compile the simple Vex example with -gc boehm -skip-unused
|
||||
run: ./v -gc boehm -skip-unused ~/.vmodules/nedpals/vex/examples/simple_example.v
|
||||
- name: Run Vex Tests
|
||||
run: ./v test ~/.vmodules/nedpals/vex
|
||||
|
||||
## vpm modules
|
||||
- name: Install UI through VPM
|
||||
run: ./v install ui
|
||||
|
||||
## libsodium
|
||||
- name: Install libsodium-dev package
|
||||
run: sudo apt-get install --quiet -y libsodium-dev
|
||||
- name: Installl the libsodium wrapper through VPM
|
||||
run: ./v install libsodium
|
||||
- name: Test libsodium
|
||||
run: ./v test ~/.vmodules/libsodium
|
||||
104
.github/workflows/paths_ci.yml
vendored
Normal file
104
.github/workflows/paths_ci.yml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
name: Path Testing CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
|
||||
concurrency:
|
||||
group: build-paths-${{ github.event.pull_request.number || github.sha }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
|
||||
space-paths-linux:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
MY_V_PATH: '你好 my $path, @с интервали'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
path: ${{env.MY_V_PATH}}
|
||||
- name: Build V
|
||||
run: |
|
||||
echo '${{env.MY_V_PATH}}'
|
||||
ls -la
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
ls -la
|
||||
make
|
||||
- name: v doctor
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v doctor
|
||||
- name: v tests
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v test vlib/builtin vlib/os
|
||||
|
||||
space-paths-macos:
|
||||
runs-on: macOS-latest
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
MY_V_PATH: '你好 my $path, @с интервали'
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
path: ${{env.MY_V_PATH}}
|
||||
- name: Build V
|
||||
run: |
|
||||
echo '${{env.MY_V_PATH}}'
|
||||
ls -la
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
ls -la
|
||||
make
|
||||
## prebuild cmd/tools/builders/js_builder, to minimise the
|
||||
## chances of a sporadic "Killed" when running the tests later
|
||||
./v -b js run examples/hello_world.v
|
||||
- name: v doctor
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v doctor
|
||||
- name: v tests
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v test vlib/builtin vlib/os
|
||||
|
||||
space-paths-windows:
|
||||
runs-on: windows-2022
|
||||
timeout-minutes: 30
|
||||
env:
|
||||
MY_V_PATH: 'path with some $punctuation, and some spaces'
|
||||
|
||||
## NB: the following paths do not work for now:
|
||||
##### MY_V_PATH: '你好 my $path, @с интервали'
|
||||
##### MY_V_PATH: 'path with some $punctuation, and some spaces '
|
||||
## tcc has a problem interpreting paths with non latin letters in them,
|
||||
## by default, but that can be solved with passing -Bthirdparty/tcc
|
||||
## but after that V fails self building with:
|
||||
####### builder error: cannot write to folder
|
||||
####### D:\a\v\v\你好 my $path, @с интервали: No such file or directory
|
||||
## and that happens even for gcc builds, not just tcc ones
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
path: ${{env.MY_V_PATH}}
|
||||
- name: Build V
|
||||
run: |
|
||||
echo '${{env.MY_V_PATH}}'
|
||||
dir
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
dir
|
||||
.\make.bat -tcc
|
||||
- name: v doctor
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v doctor
|
||||
- name: v tests
|
||||
run: |
|
||||
cd '${{env.MY_V_PATH}}'
|
||||
./v test vlib/builtin vlib/os
|
||||
31
.github/workflows/vab_ci.yml
vendored
31
.github/workflows/vab_ci.yml
vendored
@@ -12,6 +12,8 @@ jobs:
|
||||
vab-compiles-v-examples:
|
||||
runs-on: ubuntu-20.04
|
||||
timeout-minutes: 121
|
||||
env:
|
||||
VAB_FLAGS: --api 30 --build-tools 29.0.0 -v 3
|
||||
steps:
|
||||
- uses: actions/setup-java@v2
|
||||
with:
|
||||
@@ -20,33 +22,34 @@ jobs:
|
||||
|
||||
- uses: actions/checkout@v2
|
||||
- name: Build V
|
||||
run: make -j2 && ./v -cc gcc -o v cmd/v
|
||||
run: make && sudo ./v symlink
|
||||
|
||||
- name: Checkout vab
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
repository: vlang/vab
|
||||
path: vab
|
||||
|
||||
- name: Clone vab
|
||||
run: git clone --depth 1 https://github.com/vlang/vab
|
||||
- name: Build vab
|
||||
run: cd vab; ../v ./vab.v ; cd ..
|
||||
run: |
|
||||
cd vab
|
||||
v -g vab.v
|
||||
sudo ln -s $(pwd)/vab /usr/local/bin/vab
|
||||
|
||||
- name: Run tests
|
||||
run: ./v test vab
|
||||
|
||||
- name: Build vab
|
||||
run: ./v -g vab/vab.v
|
||||
run: v test vab
|
||||
|
||||
- name: Run vab --help
|
||||
run: vab/vab --help
|
||||
run: vab --help
|
||||
|
||||
- name: Run vab doctor
|
||||
run: |
|
||||
export VEXE=./v
|
||||
vab/vab doctor
|
||||
run: vab doctor
|
||||
|
||||
- name: Build graphical V examples as APK
|
||||
run: |
|
||||
declare -a v_examples=('flappylearning' '2048' 'fireworks' 'tetris' 'sokol/particles' 'sokol/drawing.v' 'sokol/freetype_raven.v' 'gg/polygons.v' 'gg/raven_text_rendering.v' 'gg/rectangles.v' 'gg/stars.v' 'gg/worker_thread.v')
|
||||
mkdir apks
|
||||
export VEXE=./v
|
||||
for example in "${v_examples[@]}"; do
|
||||
safe_name=$(echo "$example" | sed 's%/%-%' | sed 's%\.%-%' )
|
||||
vab/vab --api 29 --build-tools '29.0.0' -v 3 examples/$example -o apks/$safe_name.apk
|
||||
vab examples/$example -o apks/$safe_name.apk
|
||||
done
|
||||
|
||||
@@ -199,3 +199,4 @@ to create a copy of the compiler rather than replacing it with `v self`.
|
||||
| `trace_parser` | Prints details about parsed statements and expressions |
|
||||
| `trace_thirdparty_obj_files` | Prints details about built thirdparty obj files |
|
||||
| `trace_usecache` | Prints details when -usecache is used |
|
||||
| `trace_embed_file` | Prints details when $embed_file is used |
|
||||
|
||||
11
GNUmakefile
11
GNUmakefile
@@ -76,7 +76,7 @@ endif
|
||||
endif
|
||||
endif
|
||||
|
||||
.PHONY: all clean fresh_vc fresh_tcc
|
||||
.PHONY: all clean fresh_vc fresh_tcc check_for_working_tcc
|
||||
|
||||
ifdef prod
|
||||
VFLAGS+=-prod
|
||||
@@ -95,6 +95,7 @@ else
|
||||
./v2.exe -o $(V) $(VFLAGS) cmd/v
|
||||
rm -rf v1.exe v2.exe
|
||||
endif
|
||||
@$(V) run cmd/tools/detect_tcc.v
|
||||
@echo "V has been successfully built"
|
||||
@$(V) -version
|
||||
|
||||
@@ -110,6 +111,9 @@ latest_vc:
|
||||
@echo "Using local vc"
|
||||
endif
|
||||
|
||||
check_for_working_tcc:
|
||||
@$(TMPTCC)/tcc.exe --version > /dev/null 2> /dev/null || echo "The executable '$(TMPTCC)/tcc.exe' does not work."
|
||||
|
||||
fresh_vc:
|
||||
rm -rf $(VC)
|
||||
$(GITFASTCLONE) $(VCREPO) $(VC)
|
||||
@@ -117,9 +121,11 @@ fresh_vc:
|
||||
ifndef local
|
||||
latest_tcc: $(TMPTCC)/.git/config
|
||||
cd $(TMPTCC) && $(GITCLEANPULL)
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
else
|
||||
latest_tcc:
|
||||
@echo "Using local tcc"
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
endif
|
||||
|
||||
fresh_tcc:
|
||||
@@ -128,12 +134,15 @@ ifndef local
|
||||
# Check wether a TCC branch exists for the user's system configuration.
|
||||
ifneq (,$(findstring thirdparty-$(TCCOS)-$(TCCARCH), $(shell git ls-remote --heads $(TCCREPO) | sed 's/^[a-z0-9]*\trefs.heads.//')))
|
||||
$(GITFASTCLONE) --branch thirdparty-$(TCCOS)-$(TCCARCH) $(TCCREPO) $(TMPTCC)
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
else
|
||||
@echo 'Pre-built TCC not available for thirdparty-$(TCCOS)-$(TCCARCH) at $(TCCREPO), will use the system compiler: $(CC)'
|
||||
$(GITFASTCLONE) --branch thirdparty-unknown-unknown $(TCCREPO) $(TMPTCC)
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
endif
|
||||
else
|
||||
@echo "Using local tccbin"
|
||||
@$(MAKE) --quiet check_for_working_tcc 2> /dev/null
|
||||
endif
|
||||
|
||||
$(TMPTCC)/.git/config:
|
||||
|
||||
1
Makefile
1
Makefile
@@ -9,3 +9,4 @@ all:
|
||||
./v2 -o v $(VFLAGS) cmd/v
|
||||
rm -rf v1 v2 vc/
|
||||
@echo "V has been successfully built"
|
||||
./v run ./cmd/tools/detect_tcc.v
|
||||
|
||||
242
TESTS.md
242
TESTS.md
@@ -1,104 +1,12 @@
|
||||
# Automated tests
|
||||
|
||||
TLDR: run `v test-all` locally, after making your changes,
|
||||
TLDR: do run `v test-all` locally, after making your changes,
|
||||
and before submitting PRs.
|
||||
|
||||
## Notes
|
||||
In the `v` repo there are several different tests. The main types are:
|
||||
|
||||
* `_test.v` tests - check that `test_` functions succeed. These can be
|
||||
run per directory or individually.
|
||||
* `.out` tests - run a `.vv` file and check the output matches the
|
||||
contents of the `.out` file with the same base name. This is
|
||||
particularly useful for checking that errors are printed.
|
||||
|
||||
Tip: use `v -cc tcc` when compiling tests for speed.
|
||||
|
||||
## `vlib/v/tests`
|
||||
|
||||
General runnable tests for different features of the V compiler.
|
||||
|
||||
* `vlib/v/tests/inout/compiler_test.v`
|
||||
|
||||
Test output of running a V program matches an expected .out file.
|
||||
Check the source for how to test panics.
|
||||
|
||||
* `vlib/v/gen/c/coutput_test.v`
|
||||
|
||||
This tests whether the generated C source code matches all expectations,
|
||||
specified in *.c.must_have files, in the folder vlib/v/gen/c/testdata/ .
|
||||
|
||||
Each `.c.must_have` file has to have a corresponding .vv file.
|
||||
|
||||
Each `.c.must_have` file, consists of multiple lines. Each of these
|
||||
lines, *should* be present *at least once* in the output, when the .vv
|
||||
file is compiled with `-o -` .
|
||||
|
||||
* `vlib/v/tests/run_project_folders_test.v`
|
||||
Tests whether whole project folders can be compiled, and run.
|
||||
NB: Each project in these folders, should finish with exit code 0,
|
||||
and it should output `OK` as its last stdout line.
|
||||
|
||||
## Test building of actual V programs (examples, tools, V itself)
|
||||
|
||||
* `v build-tools`
|
||||
* `v build-examples`
|
||||
* `v build-vbinaries`
|
||||
|
||||
## vfmt tests
|
||||
|
||||
In `vlib/v/fmt/` there are::
|
||||
|
||||
* `v vlib/v/fmt/fmt_test.v`
|
||||
|
||||
This checks `.out` tests.
|
||||
|
||||
* `v vlib/v/fmt/fmt_keep_test.v`
|
||||
|
||||
This verifies that `_keep.v` files would be unchanged by `vfmt -w`.
|
||||
|
||||
* `v vlib/v/fmt/fmt_vlib_test.v`
|
||||
|
||||
This checks all source files are formatted and prints a summary.
|
||||
This is not required.
|
||||
|
||||
* `v test-fmt`
|
||||
|
||||
Test all files in the current directory are formatted.
|
||||
|
||||
## Markdown
|
||||
|
||||
* `v check-md -hide-warnings .`
|
||||
|
||||
Ensure that all .md files in the project are formatted properly,
|
||||
and that the V code block examples in them can be compiled/formatted too.
|
||||
|
||||
## `.github/workflows/ci.yml`
|
||||
|
||||
This runs various CI tests, e.g.:
|
||||
|
||||
* `v vet vlib/v` - style checker
|
||||
* `v fmt -verify` on certain source files
|
||||
|
||||
## `v test-cleancode`
|
||||
|
||||
Check that most .v files, are invariant of `v fmt` runs.
|
||||
|
||||
## `v test-self`
|
||||
|
||||
Run `vlib` module tests, *including* the compiler tests.
|
||||
|
||||
## `v vlib/v/compiler_errors_test.v`
|
||||
|
||||
This runs tests for:
|
||||
* `vlib/v/checker/tests/*.vv`
|
||||
* `vlib/v/parser/tests/*.vv`
|
||||
|
||||
### Special folders that compiler_errors_test.v will try to
|
||||
run/compile with specific options:
|
||||
|
||||
vlib/v/checker/tests/globals_run/ - `-enable-globals run`;
|
||||
results stored in `.run.out` files, matching the .vv ones.
|
||||
Tip: use `v -cc tcc` when compiling tests, because TCC is much faster,
|
||||
compared to most other C compilers like clang/gcc/msvc. Most test commands
|
||||
will use the V compiler and the V tools many times, potentially
|
||||
hundreds/thousands of times.
|
||||
|
||||
## `v test-all`
|
||||
|
||||
@@ -113,3 +21,143 @@ It works, by running these in succession:
|
||||
* `v build-examples`
|
||||
* `v check-md -hide-warnings .`
|
||||
* `v install nedpals.args`
|
||||
|
||||
# Details:
|
||||
In the `v` repo there are many tests. The main types are:
|
||||
|
||||
## `_test.v` tests - these are the normal V test files.
|
||||
All `test_` functions in these files, will be ran automatically by
|
||||
V's test framework.
|
||||
|
||||
NB 1: You can run test files one by one, with:
|
||||
`v file_test.v` - this will run the test_ functions in file_test.v,
|
||||
and will exit with a 0 exit code, if they all had 0 failing assertions.
|
||||
|
||||
`v -stats file_test.v` - this will run the test_ functions, and show a
|
||||
report about how much time it took to run each of them too.
|
||||
|
||||
NB 2: You can also run many test files at once (in parallel, depending on
|
||||
how many cores you have), with:
|
||||
`v test folder` - this will run *all* `_test.v` files in `folder`,
|
||||
recursively.
|
||||
|
||||
`v -stats test folder` - same, but will also produce timing reports
|
||||
about how fast each test_ function in each _test.v file ran.
|
||||
|
||||
|
||||
## `v test vlib/v/tests`:
|
||||
|
||||
This folder contains _test.v files, testing the different features of the V
|
||||
compiler. Each of them will be compiled, and all the features in them have
|
||||
to work (verified by assertions).
|
||||
|
||||
## `v vlib/v/tests/inout/compiler_test.v`
|
||||
|
||||
This is a *test runner*, that checks whether the output of running a V program,
|
||||
matches an expected .out file. You can also check for code that does panic
|
||||
using this test runner - just paste the start of the `panic()` output in the
|
||||
corresponding .out file.
|
||||
|
||||
NB: these tests, expect to find a pair of `.vv` and `.out` files, in the folder:
|
||||
vlib/v/tests/inout
|
||||
|
||||
The test runner will run each `.vv` file, and will check that its output, matches
|
||||
the contents of the `.out` file with the same base name. This is particularly useful
|
||||
for checking that errors and panics are printed.
|
||||
|
||||
## `v vlib/v/gen/c/coutput_test.v`
|
||||
|
||||
coutput_test.v is a *test runner*, that checks whether the generated C source
|
||||
code matches *all* expectations, specified in *.c.must_have files, in the
|
||||
folder vlib/v/gen/c/testdata/ .
|
||||
|
||||
Each `.c.must_have` file, *has* to have a corresponding `.vv` file.
|
||||
|
||||
Each `.c.must_have` file, consists of multiple lines. Each of these
|
||||
lines, *should* be present *at least once* in the output, when the .vv
|
||||
file is compiled with `-o -` .
|
||||
|
||||
## `v vlib/v/tests/run_project_folders_test.v`
|
||||
This *test runner*, checks whether whole project folders, can be compiled, and run.
|
||||
|
||||
NB: Each project in these folders, should finish with an exit code of 0,
|
||||
and it should output `OK` as its last stdout line.
|
||||
|
||||
## Test building of actual V programs (examples, tools, V itself)
|
||||
|
||||
* `v build-tools`
|
||||
* `v build-examples`
|
||||
* `v build-vbinaries`
|
||||
|
||||
## Formatting tests
|
||||
|
||||
In `vlib/v/fmt/` there are:
|
||||
|
||||
* `v vlib/v/fmt/fmt_test.v`
|
||||
|
||||
This checks `.out` tests.
|
||||
|
||||
* `v vlib/v/fmt/fmt_keep_test.v`
|
||||
|
||||
This verifies that all `_keep.vv` files in the `vlib/v/fmt/tests/` folder,
|
||||
would be unchanged by `v fmt -w`, i.e. that the v source code formatter,
|
||||
generates a stable source output, that does not change, once it is already
|
||||
formatted once.
|
||||
|
||||
* `v vlib/v/fmt/fmt_vlib_test.v`
|
||||
|
||||
This checks that all V source files are formatted, and prints a summary.
|
||||
This is not required.
|
||||
|
||||
* `v test-cleancode`
|
||||
|
||||
Check that most .v files, are invariant of `v fmt` runs.
|
||||
|
||||
* `v test-fmt`
|
||||
|
||||
This tests that all .v files in the current folder are already formatted.
|
||||
It is useful for adding to CI jobs, to guarantee, that future contributions
|
||||
will keep the existing source nice and clean.
|
||||
|
||||
## Markdown/documentation checks:
|
||||
|
||||
* `v check-md -hide-warnings .`
|
||||
|
||||
Ensure that all .md files in the project are formatted properly,
|
||||
and that the V code block examples in them can be compiled/formatted too.
|
||||
|
||||
## `v test-self`
|
||||
|
||||
Run `vlib` module tests, *including* the compiler tests.
|
||||
|
||||
## `v vlib/v/compiler_errors_test.v`
|
||||
|
||||
This runs tests for:
|
||||
* `vlib/v/scanner/tests/*.vv`
|
||||
* `vlib/v/checker/tests/*.vv`
|
||||
* `vlib/v/parser/tests/*.vv`
|
||||
|
||||
NB: there are special folders, that compiler_errors_test.v will try to
|
||||
run/compile with specific options:
|
||||
|
||||
vlib/v/checker/tests/globals_run/ - `-enable-globals run`;
|
||||
results stored in `.run.out` files, matching the .vv ones.
|
||||
|
||||
NB 2: in case you need to modify many .out files, run *twice* in a row:
|
||||
`VAUTOFIX=1 ./v vlib/v/compiler_errors_test.v`
|
||||
This will fail the first time, but it will record the new output for each
|
||||
.vv file, and store it into the corresponding .out file. The second run
|
||||
should be now successfull, and so you can inspect the difference, and
|
||||
commit the new .out files with minimum manual effort.
|
||||
|
||||
NB 3: To run only some of the tests, use:
|
||||
`VTEST_ONLY=mismatch ./v vlib/v/compiler_errors_test.v`
|
||||
This will check only the .vv files, whose paths match the given filter.
|
||||
|
||||
## `.github/workflows/ci.yml`
|
||||
|
||||
This is a Github Actions configuration file, that runs various CI
|
||||
tests in the main V repository, for example:
|
||||
|
||||
* `v vet vlib/v` - run a style checker.
|
||||
* `v test-self` (run self tests) in various compilation modes.
|
||||
|
||||
15
cmd/tools/detect_tcc.v
Normal file
15
cmd/tools/detect_tcc.v
Normal file
@@ -0,0 +1,15 @@
|
||||
fn main() {
|
||||
$if tinyc {
|
||||
println('Your `tcc` is working. Good - it is much faster at compiling C source code.')
|
||||
exit(0)
|
||||
}
|
||||
|
||||
println('
|
||||
NB: `tcc` was not used, so unless you install it yourself, your backend
|
||||
C compiler will be `cc`, which is usually either `clang`, `gcc` or `msvc`.
|
||||
|
||||
These C compilers, are several times slower at compiling C source code,
|
||||
compared to `tcc`. They do produce more optimised executables, but that
|
||||
is done at the cost of compilation speed.
|
||||
')
|
||||
}
|
||||
@@ -6,8 +6,9 @@ import flag
|
||||
|
||||
const (
|
||||
tool_name = os.file_name(os.executable())
|
||||
tool_version = '0.0.2'
|
||||
tool_version = '0.0.3'
|
||||
tool_description = 'Prints all V functions in .v files under PATH/, that do not yet have documentation comments.'
|
||||
work_dir_prefix = normalise_path(os.real_path(os.wd_at_startup) + '/')
|
||||
)
|
||||
|
||||
struct UndocumentedFN {
|
||||
@@ -17,50 +18,46 @@ struct UndocumentedFN {
|
||||
}
|
||||
|
||||
struct Options {
|
||||
show_help bool
|
||||
collect_tags bool
|
||||
deprecated bool
|
||||
show_help bool
|
||||
collect_tags bool
|
||||
deprecated bool
|
||||
private bool
|
||||
js bool
|
||||
no_line_numbers bool
|
||||
exclude []string
|
||||
relative_paths bool
|
||||
}
|
||||
|
||||
fn collect(path string, mut l []string, f fn (string, mut []string)) {
|
||||
if !os.is_dir(path) {
|
||||
return
|
||||
}
|
||||
mut files := os.ls(path) or { return }
|
||||
for file in files {
|
||||
p := path + os.path_separator + file
|
||||
if os.is_dir(p) && !os.is_link(p) {
|
||||
collect(p, mut l, f)
|
||||
} else if os.exists(p) {
|
||||
f(p, mut l)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
fn report_undocumented_functions_in_path(opt Options, path string) {
|
||||
fn (opt Options) report_undocumented_functions_in_path(path string) {
|
||||
mut files := []string{}
|
||||
collect_fn := fn (path string, mut l []string) {
|
||||
if os.file_ext(path) == '.v' {
|
||||
l << os.real_path(path)
|
||||
collect(path, mut files, fn (npath string, mut accumulated_paths []string) {
|
||||
if !npath.ends_with('.v') {
|
||||
return
|
||||
}
|
||||
}
|
||||
collect(path, mut files, collect_fn)
|
||||
if npath.ends_with('_test.v') {
|
||||
return
|
||||
}
|
||||
accumulated_paths << npath
|
||||
})
|
||||
for file in files {
|
||||
if file.ends_with('_test.v') {
|
||||
if !opt.js && file.ends_with('.js.v') {
|
||||
continue
|
||||
}
|
||||
report_undocumented_functions_in_file(opt, file)
|
||||
if opt.exclude.len > 0 && opt.exclude.any(file.contains(it)) {
|
||||
continue
|
||||
}
|
||||
opt.report_undocumented_functions_in_file(file)
|
||||
}
|
||||
}
|
||||
|
||||
fn report_undocumented_functions_in_file(opt Options, file string) {
|
||||
fn (opt &Options) report_undocumented_functions_in_file(nfile string) {
|
||||
file := os.real_path(nfile)
|
||||
contents := os.read_file(file) or { panic(err) }
|
||||
lines := contents.split('\n')
|
||||
mut info := []UndocumentedFN{}
|
||||
for i, line in lines {
|
||||
if line.starts_with('pub fn') || (line.starts_with('fn ') && !(line.starts_with('fn C.')
|
||||
|| line.starts_with('fn main'))) {
|
||||
if line.starts_with('pub fn') || (opt.private && (line.starts_with('fn ')
|
||||
&& !(line.starts_with('fn C.') || line.starts_with('fn main')))) {
|
||||
// println('Match: $line')
|
||||
if i > 0 && lines.len > 0 {
|
||||
mut line_above := lines[i - 1]
|
||||
@@ -89,22 +86,51 @@ fn report_undocumented_functions_in_file(opt Options, file string) {
|
||||
}
|
||||
if info.len > 0 {
|
||||
for undocumented_fn in info {
|
||||
mut line_numbers := '$undocumented_fn.line:0:'
|
||||
if opt.no_line_numbers {
|
||||
line_numbers = ''
|
||||
}
|
||||
tags_str := if opt.collect_tags && undocumented_fn.tags.len > 0 {
|
||||
'$undocumented_fn.tags'
|
||||
} else {
|
||||
''
|
||||
}
|
||||
ofile := if opt.relative_paths {
|
||||
nfile.replace(work_dir_prefix, '')
|
||||
} else {
|
||||
os.real_path(nfile)
|
||||
}
|
||||
if opt.deprecated {
|
||||
println('$file:$undocumented_fn.line:0:$undocumented_fn.signature $tags_str')
|
||||
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
|
||||
} else {
|
||||
if 'deprecated' !in undocumented_fn.tags {
|
||||
println('$file:$undocumented_fn.line:0:$undocumented_fn.signature $tags_str')
|
||||
println('$ofile:$line_numbers$undocumented_fn.signature $tags_str')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn normalise_path(path string) string {
|
||||
return path.replace('\\', '/')
|
||||
}
|
||||
|
||||
fn collect(path string, mut l []string, f fn (string, mut []string)) {
|
||||
if !os.is_dir(path) {
|
||||
return
|
||||
}
|
||||
mut files := os.ls(path) or { return }
|
||||
for file in files {
|
||||
p := normalise_path(os.join_path_single(path, file))
|
||||
if os.is_dir(p) && !os.is_link(p) {
|
||||
collect(p, mut l, f)
|
||||
} else if os.exists(p) {
|
||||
f(p, mut l)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
fn collect_tags(line string) []string {
|
||||
mut cleaned := line.all_before('/')
|
||||
cleaned = cleaned.replace_each(['[', '', ']', '', ' ', ''])
|
||||
@@ -125,7 +151,12 @@ fn main() {
|
||||
opt := Options{
|
||||
show_help: fp.bool('help', `h`, false, 'Show this help text.')
|
||||
deprecated: fp.bool('deprecated', `d`, false, 'Include deprecated functions in output.')
|
||||
private: fp.bool('private', `p`, false, 'Include private functions in output.')
|
||||
js: fp.bool('js', 0, false, 'Include JavaScript functions in output.')
|
||||
no_line_numbers: fp.bool('no-line-numbers', `n`, false, 'Exclude line numbers in output.')
|
||||
collect_tags: fp.bool('tags', `t`, false, 'Also print function tags if any is found.')
|
||||
exclude: fp.string_multi('exclude', `e`, '')
|
||||
relative_paths: fp.bool('relative-paths', `r`, false, 'Use relative paths in output.')
|
||||
}
|
||||
if opt.show_help {
|
||||
println(fp.usage())
|
||||
@@ -133,9 +164,9 @@ fn main() {
|
||||
}
|
||||
for path in os.args[1..] {
|
||||
if os.is_file(path) {
|
||||
report_undocumented_functions_in_file(opt, path)
|
||||
opt.report_undocumented_functions_in_file(path)
|
||||
} else {
|
||||
report_undocumented_functions_in_path(opt, path)
|
||||
opt.report_undocumented_functions_in_path(path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,9 +164,18 @@ pub fn new_test_session(_vargs string, will_compile bool) TestSession {
|
||||
skip_files << 'examples/database/orm.v' // try fix it
|
||||
}
|
||||
}
|
||||
$if windows {
|
||||
// TODO: remove when closures on windows are supported
|
||||
skip_files << 'examples/pendulum-simulation/animation.v'
|
||||
skip_files << 'examples/pendulum-simulation/full.v'
|
||||
skip_files << 'examples/pendulum-simulation/parallel.v'
|
||||
skip_files << 'examples/pendulum-simulation/parallel_with_iw.v'
|
||||
skip_files << 'examples/pendulum-simulation/sequential.v'
|
||||
}
|
||||
if testing.github_job != 'sokol-shaders-can-be-compiled' {
|
||||
// These examples need .h files that are produced from the supplied .glsl files,
|
||||
// using by the shader compiler tools in https://github.com/floooh/sokol-tools-bin/archive/pre-feb2021-api-changes.tar.gz
|
||||
skip_files << 'examples/sokol/simple_shader_glsl/simple_shader.v'
|
||||
skip_files << 'examples/sokol/02_cubes_glsl/cube_glsl.v'
|
||||
skip_files << 'examples/sokol/03_march_tracing_glsl/rt_glsl.v'
|
||||
skip_files << 'examples/sokol/04_multi_shader_glsl/rt_glsl.v'
|
||||
@@ -176,6 +185,7 @@ pub fn new_test_session(_vargs string, will_compile bool) TestSession {
|
||||
}
|
||||
if testing.github_job != 'ubuntu-tcc' {
|
||||
skip_files << 'examples/c_interop_wkhtmltopdf.v' // needs installation of wkhtmltopdf from https://github.com/wkhtmltopdf/packaging/releases
|
||||
skip_files << 'examples/call_v_from_python/test.v' // the example only makes sense to be compiled, when python is installed
|
||||
// the ttf_test.v is not interactive, but needs X11 headers to be installed, which is done only on ubuntu-tcc for now
|
||||
skip_files << 'vlib/x/ttf/ttf_test.v'
|
||||
skip_files << 'vlib/vweb/vweb_app_test.v' // imports the `sqlite` module, which in turn includes sqlite3.h
|
||||
@@ -329,9 +339,9 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
}
|
||||
|
||||
if !ts.vargs.contains('fmt') {
|
||||
cmd_options << ' -o "$generated_binary_fpath"'
|
||||
cmd_options << ' -o ${os.quoted_path(generated_binary_fpath)}'
|
||||
}
|
||||
cmd := '"$ts.vexe" ' + cmd_options.join(' ') + ' "$file"'
|
||||
cmd := '${os.quoted_path(ts.vexe)} ' + cmd_options.join(' ') + ' ${os.quoted_path(file)}'
|
||||
ts.benchmark.step()
|
||||
tls_bench.step()
|
||||
if relative_file.replace('\\', '/') in ts.skip_files {
|
||||
@@ -357,6 +367,7 @@ fn worker_trunner(mut p pool.PoolProcessor, idx int, thread_id int) voidptr {
|
||||
goto test_passed_system
|
||||
}
|
||||
}
|
||||
time.sleep(500 * time.millisecond)
|
||||
}
|
||||
ts.failed = true
|
||||
ts.benchmark.fail()
|
||||
@@ -456,7 +467,7 @@ pub fn prepare_test_session(zargs string, folder string, oskipped []string, main
|
||||
}
|
||||
}
|
||||
c := os.read_file(f) or { panic(err) }
|
||||
maxc := if c.len > 300 { 300 } else { c.len }
|
||||
maxc := if c.len > 500 { 500 } else { c.len }
|
||||
start := c[0..maxc]
|
||||
if start.contains('module ') && !start.contains('module main') {
|
||||
skipped_f := f.replace(os.join_path_single(parent_dir, ''), '')
|
||||
|
||||
@@ -294,8 +294,8 @@ fn (t Tree) mod(node ast.Module) &Node {
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('short_name', t.string_node(node.short_name))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('name_pos', t.position(node.name_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('name_pos', t.pos(node.name_pos))
|
||||
obj.add_terse('is_skipped', t.bool_node(node.is_skipped))
|
||||
return obj
|
||||
}
|
||||
@@ -327,7 +327,7 @@ fn (t Tree) scope_struct_field(node ast.ScopeStructField) &Node {
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('orig_type', t.type_node(node.orig_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('smartcasts', t.array_node_type(node.smartcasts))
|
||||
return obj
|
||||
}
|
||||
@@ -365,7 +365,7 @@ fn (t Tree) errors(errors []errors.Error) &Node {
|
||||
obj := new_object()
|
||||
obj.add_terse('message', t.string_node(e.message))
|
||||
obj.add_terse('file_path', t.string_node(e.file_path))
|
||||
obj.add('pos', t.position(e.pos))
|
||||
obj.add('pos', t.pos(e.pos))
|
||||
obj.add_terse('backtrace', t.string_node(e.backtrace))
|
||||
obj.add_terse('reporter', t.enum_node(e.reporter))
|
||||
errs.add_item(obj)
|
||||
@@ -379,7 +379,7 @@ fn (t Tree) warnings(warnings []errors.Warning) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add('message', t.string_node(w.message))
|
||||
obj.add('file_path', t.string_node(w.file_path))
|
||||
obj.add('pos', t.position(w.pos))
|
||||
obj.add('pos', t.pos(w.pos))
|
||||
obj.add('reporter', t.enum_node(w.reporter))
|
||||
warns.add_item(obj)
|
||||
}
|
||||
@@ -392,7 +392,7 @@ fn (t Tree) notices(notices []errors.Notice) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add('message', t.string_node(n.message))
|
||||
obj.add('file_path', t.string_node(n.file_path))
|
||||
obj.add('pos', t.position(n.pos))
|
||||
obj.add('pos', t.pos(n.pos))
|
||||
obj.add('reporter', t.enum_node(n.reporter))
|
||||
notice_array.add_item(obj)
|
||||
}
|
||||
@@ -449,21 +449,21 @@ fn (t Tree) import_module(node ast.Import) &Node {
|
||||
obj.add_terse('syms', t.array_node_import_symbol(node.syms))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('next_comments', t.array_node_comment(node.next_comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('mod_pos', t.position(node.mod_pos))
|
||||
obj.add('alias_pos', t.position(node.alias_pos))
|
||||
obj.add('syms_pos', t.position(node.syms_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('mod_pos', t.pos(node.mod_pos))
|
||||
obj.add('alias_pos', t.pos(node.alias_pos))
|
||||
obj.add('syms_pos', t.pos(node.syms_pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) import_symbol(node ast.ImportSymbol) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) position(p token.Position) &Node {
|
||||
fn (t Tree) pos(p token.Pos) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add('line_nr', t.number_node(p.line_nr))
|
||||
obj.add('last_line', t.number_node(p.last_line))
|
||||
@@ -478,7 +478,7 @@ fn (t Tree) comment(node ast.Comment) &Node {
|
||||
obj.add('text', t.string_node(node.text))
|
||||
obj.add('is_multi', t.bool_node(node.is_multi))
|
||||
obj.add('is_inline', t.bool_node(node.is_inline))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -490,7 +490,7 @@ fn (t Tree) const_decl(node ast.ConstDecl) &Node {
|
||||
obj.add_terse('fields', t.array_node_const_field(node.fields))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
obj.add('end_comments', t.array_node_comment(node.end_comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -505,7 +505,7 @@ fn (t Tree) const_field(node ast.ConstField) &Node {
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('comptime_expr_value', t.comptime_expr_value(node.comptime_expr_value))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -542,9 +542,9 @@ fn (t Tree) fn_decl(node ast.FnDecl) &Node {
|
||||
obj.add('is_keep_alive', t.bool_node(node.is_keep_alive))
|
||||
obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe))
|
||||
obj.add_terse('receiver', t.struct_field(node.receiver))
|
||||
obj.add('receiver_pos', t.position(node.receiver_pos))
|
||||
obj.add('receiver_pos', t.pos(node.receiver_pos))
|
||||
obj.add_terse('is_method', t.bool_node(node.is_method))
|
||||
obj.add('method_type_pos', t.position(node.method_type_pos))
|
||||
obj.add('method_type_pos', t.pos(node.method_type_pos))
|
||||
obj.add('method_idx', t.number_node(node.method_idx))
|
||||
obj.add_terse('rec_mut', t.bool_node(node.rec_mut))
|
||||
obj.add('rec_share', t.enum_node(node.rec_share))
|
||||
@@ -554,9 +554,9 @@ fn (t Tree) fn_decl(node ast.FnDecl) &Node {
|
||||
obj.add('is_builtin', t.bool_node(node.is_builtin))
|
||||
obj.add('is_direct_arr', t.bool_node(node.is_direct_arr))
|
||||
obj.add('ctdefine_idx', t.number_node(node.ctdefine_idx))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('body_pos', t.position(node.body_pos))
|
||||
obj.add('return_type_pos', t.position(node.return_type_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('body_pos', t.pos(node.body_pos))
|
||||
obj.add('return_type_pos', t.pos(node.return_type_pos))
|
||||
obj.add('file', t.string_node(node.file))
|
||||
obj.add('has_return', t.bool_node(node.has_return))
|
||||
obj.add('should_be_skipped', t.bool_node(node.should_be_skipped))
|
||||
@@ -598,7 +598,7 @@ fn (t Tree) struct_decl(node ast.StructDecl) &Node {
|
||||
obj.add('module_pos', t.number_node(node.module_pos))
|
||||
obj.add_terse('language', t.enum_node(node.language))
|
||||
obj.add_terse('is_union', t.bool_node(node.is_union))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('fields', t.array_node_struct_field(node.fields))
|
||||
obj.add_terse('generic_types', t.array_node_type(node.generic_types))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
@@ -612,7 +612,7 @@ fn (t Tree) struct_field(node ast.StructField) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('StructField'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('type_pos', t.position(node.type_pos))
|
||||
obj.add('type_pos', t.pos(node.type_pos))
|
||||
obj.add_terse('has_default_expr', t.bool_node(node.has_default_expr))
|
||||
obj.add_terse('default_expr_typ', t.type_node(node.default_expr_typ))
|
||||
obj.add_terse('default_expr', t.expr(node.default_expr))
|
||||
@@ -622,14 +622,14 @@ fn (t Tree) struct_field(node ast.StructField) &Node {
|
||||
obj.add_terse('is_volatile', t.bool_node(node.is_volatile))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) embed(node ast.Embed) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
}
|
||||
@@ -641,7 +641,7 @@ fn (t Tree) enum_decl(node ast.EnumDecl) &Node {
|
||||
obj.add_terse('is_pub', t.bool_node(node.is_pub))
|
||||
obj.add_terse('is_flag', t.bool_node(node.is_flag))
|
||||
obj.add_terse('is_multi_allowed', t.bool_node(node.is_multi_allowed))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('fields', t.array_node_enum_field(node.fields))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
@@ -654,7 +654,7 @@ fn (t Tree) enum_field(node ast.EnumField) &Node {
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('has_expr', t.bool_node(node.has_expr))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('next_comments', t.array_node_comment(node.next_comments))
|
||||
return obj
|
||||
@@ -671,9 +671,9 @@ fn (t Tree) interface_decl(node ast.InterfaceDecl) &Node {
|
||||
obj.add_terse('methods', t.array_node_fn_decl(node.methods))
|
||||
obj.add_terse('fields', t.array_node_struct_field(node.fields))
|
||||
obj.add('pre_comments', t.array_node_comment(node.pre_comments))
|
||||
obj.add('name_pos', t.position(node.name_pos))
|
||||
obj.add('name_pos', t.pos(node.name_pos))
|
||||
obj.add_terse('language', t.enum_node(node.language))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('are_ifaces_expanded', t.bool_node(node.are_ifaces_expanded))
|
||||
obj.add_terse('ifaces', t.array_node_interface_embedding(node.ifaces))
|
||||
obj.add_terse('attrs', t.array_node_attr(node.attrs))
|
||||
@@ -685,7 +685,7 @@ fn (t Tree) interface_embedding(node ast.InterfaceEmbedding) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('InterfaceEmbedding'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
}
|
||||
@@ -714,7 +714,7 @@ fn (t Tree) hash_stmt(node ast.HashStmt) &Node {
|
||||
obj.add_terse('msg', t.string_node(node.msg))
|
||||
obj.add_terse('ct_conds', t.array_node_expr(node.ct_conds))
|
||||
obj.add_terse('source_file', t.string_node(node.source_file))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -724,8 +724,8 @@ fn (t Tree) comptime_for(node ast.ComptimeFor) &Node {
|
||||
obj.add_terse('val_var', t.string_node(node.val_var))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('kind', t.enum_node(node.kind))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('typ_pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('typ_pos', t.pos(node.pos))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
return obj
|
||||
}
|
||||
@@ -734,7 +734,7 @@ fn (t Tree) global_decl(node ast.GlobalDecl) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('GlobalDecl'))
|
||||
obj.add_terse('mod', t.string_node(node.mod))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('is_block', t.bool_node(node.is_block))
|
||||
obj.add_terse('fields', t.array_node_global_field(node.fields))
|
||||
obj.add('end_comments', t.array_node_comment(node.end_comments))
|
||||
@@ -751,8 +751,8 @@ fn (t Tree) global_field(node ast.GlobalField) &Node {
|
||||
obj.add_terse('has_expr', t.bool_node(node.has_expr))
|
||||
obj.add_terse('is_markused', t.bool_node(node.is_markused))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('typ_pos', t.position(node.typ_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('typ_pos', t.pos(node.typ_pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -763,7 +763,7 @@ fn (t Tree) defer_stmt(node ast.DeferStmt) &Node {
|
||||
obj.add_terse('defer_vars', t.array_node_ident(node.defer_vars))
|
||||
obj.add_terse('ifdef', t.string_node(node.ifdef))
|
||||
obj.add('idx_in_fn', t.number_node(node.idx_in_fn))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -782,7 +782,7 @@ fn (t Tree) alias_type_decl(node ast.AliasTypeDecl) &Node {
|
||||
obj.add_terse('is_pub', t.bool_node(node.is_pub))
|
||||
obj.add_terse('parent_type', t.type_node(node.parent_type))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -791,7 +791,7 @@ fn (t Tree) sum_type_decl(node ast.SumTypeDecl) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('SumTypeDecl'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('is_pub', t.bool_node(node.is_pub))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('generic_types', t.array_node_type(node.generic_types))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
@@ -805,7 +805,7 @@ fn (t Tree) fn_type_decl(node ast.FnTypeDecl) &Node {
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('is_pub', t.bool_node(node.is_pub))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
}
|
||||
@@ -823,7 +823,7 @@ fn (t Tree) goto_label(node ast.GotoLabel) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('GotoLabel'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -831,7 +831,7 @@ fn (t Tree) goto_stmt(node ast.GotoStmt) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('GotoStmt'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -847,7 +847,7 @@ fn (t Tree) assign_stmt(node ast.AssignStmt) &Node {
|
||||
obj.add_terse('is_volatile', t.bool_node(node.is_volatile))
|
||||
obj.add_terse('is_simple', t.bool_node(node.is_simple))
|
||||
obj.add_terse('has_cross_var', t.bool_node(node.has_cross_var))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('end_comments', t.array_node_comment(node.end_comments))
|
||||
return obj
|
||||
@@ -872,7 +872,7 @@ fn (t Tree) var(node ast.Var) &Node {
|
||||
obj.add('is_auto_heap', t.bool_node(node.is_auto_heap))
|
||||
obj.add('is_stack_obj', t.bool_node(node.is_stack_obj))
|
||||
obj.add_terse('share', t.enum_node(node.share))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('smartcasts', t.array_node_type(node.smartcasts))
|
||||
return obj
|
||||
}
|
||||
@@ -882,7 +882,7 @@ fn (t Tree) return_(node ast.Return) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('Return'))
|
||||
obj.add_terse('exprs', t.array_node_expr(node.exprs))
|
||||
obj.add_terse('types', t.array_node_type(node.types))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -897,7 +897,7 @@ fn (t Tree) for_c_stmt(node ast.ForCStmt) &Node {
|
||||
obj.add_terse('inc', t.stmt(node.inc))
|
||||
obj.add_terse('is_multi', t.bool_node(node.is_multi))
|
||||
obj.add_terse('label', t.string_node(node.label))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
return obj
|
||||
@@ -909,7 +909,7 @@ fn (t Tree) for_stmt(node ast.ForStmt) &Node {
|
||||
obj.add_terse('cond', t.expr(node.cond))
|
||||
obj.add_terse('is_inf', t.bool_node(node.is_inf))
|
||||
obj.add_terse('label', t.string_node(node.label))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
return obj
|
||||
@@ -929,7 +929,7 @@ fn (t Tree) for_in_stmt(node ast.ForInStmt) &Node {
|
||||
obj.add_terse('kind', t.enum_node(node.kind))
|
||||
obj.add_terse('val_is_mut', t.bool_node(node.val_is_mut))
|
||||
obj.add_terse('label', t.string_node(node.label))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
return obj
|
||||
@@ -940,7 +940,7 @@ fn (t Tree) branch_stmt(node ast.BranchStmt) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('BranchStmt'))
|
||||
obj.add_terse('kind', t.token_node(node.kind))
|
||||
obj.add_terse('label', t.string_node(node.label))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -949,7 +949,7 @@ fn (t Tree) assert_stmt(node ast.AssertStmt) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('AssertStmt'))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('is_used', t.bool_node(node.is_used))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -958,7 +958,7 @@ fn (t Tree) block(node ast.Block) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('Block'))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
obj.add_terse('is_unsafe', t.bool_node(node.is_unsafe))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -973,12 +973,12 @@ fn (t Tree) comptime_call(node ast.ComptimeCall) &Node {
|
||||
obj.add_terse('has_parens', t.bool_node(node.has_parens))
|
||||
obj.add_terse('is_embed', t.bool_node(node.is_embed))
|
||||
obj.add_terse('embed_file', t.embed_file(node.embed_file))
|
||||
obj.add('method_pos', t.position(node.method_pos))
|
||||
obj.add('method_pos', t.pos(node.method_pos))
|
||||
obj.add_terse('left_type', t.type_node(node.left_type))
|
||||
obj.add_terse('result_type', t.type_node(node.result_type))
|
||||
obj.add('scope', t.scope(node.scope))
|
||||
obj.add_terse('env_value', t.string_node(node.env_value))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('args', t.array_node_call_arg(node.args))
|
||||
return obj
|
||||
}
|
||||
@@ -991,7 +991,7 @@ fn (t Tree) comptime_selector(node ast.ComptimeSelector) &Node {
|
||||
obj.add_terse('field_expr', t.expr(node.field_expr))
|
||||
obj.add_terse('left_type', t.type_node(node.left_type))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1001,7 +1001,7 @@ fn (t Tree) expr_stmt(node ast.ExprStmt) &Node {
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('is_expr', t.bool_node(node.is_expr))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
}
|
||||
@@ -1167,7 +1167,7 @@ fn (t Tree) integer_literal(node ast.IntegerLiteral) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('IntegerLiteral'))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1175,7 +1175,7 @@ fn (t Tree) float_literal(node ast.FloatLiteral) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('FloatLiteral'))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1185,7 +1185,7 @@ fn (t Tree) string_literal(node ast.StringLiteral) &Node {
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add_terse('is_raw', t.bool_node(node.is_raw))
|
||||
obj.add_terse('language', t.enum_node(node.language))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1193,7 +1193,7 @@ fn (t Tree) char_literal(node ast.CharLiteral) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('CharLiteral'))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1201,7 +1201,7 @@ fn (t Tree) bool_literal(node ast.BoolLiteral) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('BoolLiteral'))
|
||||
obj.add_terse('val', t.bool_node(node.val))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1218,7 +1218,7 @@ fn (t Tree) string_inter_literal(node ast.StringInterLiteral) &Node {
|
||||
obj.add_terse('fmt_poss', t.array_node_position(node.fmt_poss))
|
||||
obj.add_terse('fmts', t.array_node_byte(node.fmts))
|
||||
obj.add_terse('need_fmts', t.array_node_bool(node.need_fmts))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1229,7 +1229,7 @@ fn (t Tree) enum_val(node ast.EnumVal) &Node {
|
||||
obj.add_terse('mod', t.string_node(node.mod))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1240,7 +1240,7 @@ fn (t Tree) assoc(node ast.Assoc) &Node {
|
||||
obj.add_terse('fields', t.array_node_string(node.fields))
|
||||
obj.add_terse('exprs', t.array_node_expr(node.exprs))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
return obj
|
||||
}
|
||||
@@ -1249,7 +1249,7 @@ fn (t Tree) at_expr(node ast.AtExpr) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('AtExpr'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('kind', t.enum_node(node.kind))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
return obj
|
||||
@@ -1265,7 +1265,7 @@ fn (t Tree) cast_expr(node ast.CastExpr) &Node {
|
||||
obj.add_terse('arg', t.expr(node.arg))
|
||||
obj.add_terse('expr_type', t.type_node(node.expr_type))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1275,7 +1275,7 @@ fn (t Tree) as_cast(node ast.AsCast) &Node {
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('expr_type', t.type_node(node.expr_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1283,7 +1283,7 @@ fn (t Tree) type_expr(node ast.TypeNode) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('TypeNode'))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1293,7 +1293,7 @@ fn (t Tree) size_of(node ast.SizeOf) &Node {
|
||||
obj.add_terse('is_type', t.bool_node(node.is_type))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1303,7 +1303,7 @@ fn (t Tree) is_ref_type(node ast.IsRefType) &Node {
|
||||
obj.add_terse('is_type', t.bool_node(node.is_type))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1315,7 +1315,7 @@ fn (t Tree) prefix_expr(node ast.PrefixExpr) &Node {
|
||||
obj.add_terse('right_type', t.type_node(node.right_type))
|
||||
obj.add_terse('or_block', t.or_expr(node.or_block))
|
||||
obj.add_terse('is_option', t.bool_node(node.is_option))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1334,7 +1334,7 @@ fn (t Tree) infix_expr(node ast.InfixExpr) &Node {
|
||||
obj.add_terse('ct_left_value', t.comptime_expr_value(node.ct_left_value))
|
||||
obj.add_terse('ct_right_value_evaled', t.bool_node(node.ct_right_value_evaled))
|
||||
obj.add_terse('ct_right_value', t.comptime_expr_value(node.ct_right_value))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1347,7 +1347,7 @@ fn (t Tree) index_expr(node ast.IndexExpr) &Node {
|
||||
obj.add_terse('is_setter', t.bool_node(node.is_setter))
|
||||
obj.add_terse('is_direct', t.bool_node(node.is_direct))
|
||||
obj.add_terse('or_expr', t.or_expr(node.or_expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1357,7 +1357,7 @@ fn (t Tree) postfix_expr(node ast.PostfixExpr) &Node {
|
||||
obj.add_terse('op', t.token_node(node.op))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('auto_locked', t.string_node(node.auto_locked))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1372,7 +1372,7 @@ fn (t Tree) selector_expr(node ast.SelectorExpr) &Node {
|
||||
obj.add_terse('gkind_field', t.enum_node(node.gkind_field))
|
||||
obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types))
|
||||
obj.add_terse('next_token', t.token_node(node.next_token))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
return obj
|
||||
}
|
||||
@@ -1384,7 +1384,7 @@ fn (t Tree) range_expr(node ast.RangeExpr) &Node {
|
||||
obj.add_terse('high', t.expr(node.high))
|
||||
obj.add_terse('has_high', t.bool_node(node.has_high))
|
||||
obj.add_terse('has_low', t.bool_node(node.has_low))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1398,7 +1398,7 @@ fn (t Tree) if_expr(node ast.IfExpr) &Node {
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add_terse('has_else', t.bool_node(node.has_else))
|
||||
obj.add_terse('is_expr', t.bool_node(node.is_expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('post_comments', t.array_node_comment(node.post_comments))
|
||||
return obj
|
||||
}
|
||||
@@ -1407,8 +1407,8 @@ fn (t Tree) if_branch(node ast.IfBranch) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('IfBranch'))
|
||||
obj.add_terse('cond', t.expr(node.cond))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('body_pos', t.position(node.body_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('body_pos', t.pos(node.body_pos))
|
||||
obj.add_terse('pkg_exist', t.bool_node(node.pkg_exist))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
@@ -1427,8 +1427,8 @@ fn (t Tree) ident(node ast.Ident) &Node {
|
||||
obj.add_terse('tok_kind', t.token_node(node.tok_kind))
|
||||
obj.add_terse('kind', t.enum_node(node.kind))
|
||||
obj.add_terse('info', t.ident_info(node.info))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('mut_pos', t.position(node.mut_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('mut_pos', t.pos(node.mut_pos))
|
||||
obj.add('obj', t.scope_object(node.obj))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
return obj
|
||||
@@ -1481,11 +1481,11 @@ fn (t Tree) call_expr(node ast.CallExpr) &Node {
|
||||
obj.add_terse('expected_arg_types', t.array_node_type(node.expected_arg_types))
|
||||
obj.add_terse('concrete_types', t.array_node_type(node.concrete_types))
|
||||
obj.add_terse('or_block', t.or_expr(node.or_block))
|
||||
obj.add('concrete_list_pos', t.position(node.concrete_list_pos))
|
||||
obj.add('concrete_list_pos', t.pos(node.concrete_list_pos))
|
||||
obj.add_terse('from_embed_types', t.array_node_type(node.from_embed_types))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('name_pos', t.position(node.name_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('name_pos', t.pos(node.name_pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1497,7 +1497,7 @@ fn (t Tree) call_arg(node ast.CallArg) &Node {
|
||||
obj.add_terse('share', t.enum_node(node.share))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('is_tmp_autofree', t.bool_node(node.is_tmp_autofree))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
}
|
||||
@@ -1507,7 +1507,7 @@ fn (t Tree) or_expr(node ast.OrExpr) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('OrExpr'))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
obj.add_terse('kind', t.enum_node(node.kind))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1520,8 +1520,8 @@ fn (t Tree) struct_init(node ast.StructInit) &Node {
|
||||
obj.add_terse('has_update_expr', t.bool_node(node.has_update_expr))
|
||||
obj.add_terse('update_expr', t.expr(node.update_expr))
|
||||
obj.add_terse('update_expr_type', t.type_node(node.update_expr_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('name_pos', t.position(node.name_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('name_pos', t.pos(node.name_pos))
|
||||
obj.add('update_expr_comments', t.array_node_comment(node.update_expr_comments))
|
||||
obj.add_terse('fields', t.array_node_struct_init_field(node.fields))
|
||||
obj.add_terse('embeds', t.array_node_struct_init_embed(node.embeds))
|
||||
@@ -1539,8 +1539,8 @@ fn (t Tree) struct_init_field(node ast.StructInitField) &Node {
|
||||
obj.add_terse('parent_type', t.type_node(node.parent_type))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('next_comments', t.array_node_comment(node.next_comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('name_pos', t.position(node.name_pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add('name_pos', t.pos(node.name_pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1553,7 +1553,7 @@ fn (t Tree) struct_init_embed(node ast.StructInitEmbed) &Node {
|
||||
obj.add_terse('expected_type', t.type_node(node.expected_type))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('next_comments', t.array_node_comment(node.next_comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1565,7 +1565,7 @@ fn (t Tree) array_init(node ast.ArrayInit) &Node {
|
||||
obj.add_terse('exprs', t.array_node_expr(node.exprs))
|
||||
obj.add('ecmnts', t.two_dimension_comment(node.ecmnts))
|
||||
obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts))
|
||||
obj.add('elem_type_pos', t.position(node.elem_type_pos))
|
||||
obj.add('elem_type_pos', t.pos(node.elem_type_pos))
|
||||
obj.add_terse('is_fixed', t.bool_node(node.is_fixed))
|
||||
obj.add_terse('has_val', t.bool_node(node.has_val))
|
||||
obj.add_terse('mod', t.string_node(node.mod))
|
||||
@@ -1577,7 +1577,7 @@ fn (t Tree) array_init(node ast.ArrayInit) &Node {
|
||||
obj.add_terse('has_default', t.bool_node(node.has_default))
|
||||
obj.add_terse('has_it', t.bool_node(node.has_it))
|
||||
obj.add_terse('expr_types', t.array_node_type(node.expr_types))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1589,16 +1589,17 @@ fn (t Tree) map_init(node ast.MapInit) &Node {
|
||||
obj.add_terse('value_type', t.type_node(node.value_type))
|
||||
obj.add_terse('keys', t.array_node_expr(node.keys))
|
||||
obj.add_terse('vals', t.array_node_expr(node.vals))
|
||||
obj.add_terse('val_types', t.array_node_type(node.val_types))
|
||||
obj.add('comments', t.two_dimension_comment(node.comments))
|
||||
obj.add('pre_cmnts', t.array_node_comment(node.pre_cmnts))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) none_expr(node ast.None) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('None'))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1606,17 +1607,25 @@ fn (t Tree) par_expr(node ast.ParExpr) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('ParExpr'))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) if_guard_expr(node ast.IfGuardExpr) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('IfGuardExpr'))
|
||||
obj.add_terse('var_name', t.string_node(node.var_name))
|
||||
obj.add_terse('vars', t.array_node_if_guard_var(node.vars))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('expr_type', t.type_node(node.expr_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
fn (t Tree) if_guard_var(node ast.IfGuardVar) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('IfGuardVar'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add_terse('is_mut', t.bool_node(node.is_mut))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1630,7 +1639,7 @@ fn (t Tree) match_expr(node ast.MatchExpr) &Node {
|
||||
obj.add_terse('expected_type', t.type_node(node.expected_type))
|
||||
obj.add_terse('is_sum_type', t.bool_node(node.is_sum_type))
|
||||
obj.add_terse('is_expr', t.bool_node(node.is_expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('branches', t.array_node_match_branch(node.branches))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
return obj
|
||||
@@ -1642,9 +1651,9 @@ fn (t Tree) match_branch(node ast.MatchBranch) &Node {
|
||||
obj.add('ecmnts', t.two_dimension_comment(node.ecmnts))
|
||||
obj.add_terse('stmts', t.array_node_stmt(node.stmts))
|
||||
obj.add_terse('is_else', t.bool_node(node.is_else))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('post_comments', t.array_node_comment(node.post_comments))
|
||||
obj.add('branch_pos', t.position(node.branch_pos))
|
||||
obj.add('branch_pos', t.pos(node.branch_pos))
|
||||
obj.add_terse('exprs', t.array_node_expr(node.exprs))
|
||||
obj.add('scope', t.number_node(int(node.scope)))
|
||||
return obj
|
||||
@@ -1655,7 +1664,7 @@ fn (t Tree) concat_expr(node ast.ConcatExpr) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('ConcatExpr'))
|
||||
obj.add_terse('vals', t.array_node_expr(node.vals))
|
||||
obj.add_terse('return_type', t.type_node(node.return_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1664,7 +1673,7 @@ fn (t Tree) type_of(node ast.TypeOf) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('TypeOf'))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('expr_type', t.type_node(node.expr_type))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1673,7 +1682,7 @@ fn (t Tree) likely(node ast.Likely) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('Likely'))
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('is_likely', t.bool_node(node.is_likely))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1690,7 +1699,7 @@ fn (t Tree) sql_expr(node ast.SqlExpr) &Node {
|
||||
obj.add_terse('order_expr', t.expr(node.order_expr))
|
||||
obj.add_terse('has_desc', t.bool_node(node.has_desc))
|
||||
obj.add_terse('is_array', t.bool_node(node.is_array))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('has_limit', t.bool_node(node.has_limit))
|
||||
obj.add_terse('limit_expr', t.expr(node.limit_expr))
|
||||
obj.add_terse('has_offset', t.bool_node(node.has_offset))
|
||||
@@ -1708,7 +1717,7 @@ fn (t Tree) sql_stmt(node ast.SqlStmt) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('SqlStmt'))
|
||||
obj.add_terse('db_expr', t.expr(node.db_expr))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('lines', t.array_node_sql_stmt_line(node.lines))
|
||||
return obj
|
||||
}
|
||||
@@ -1723,7 +1732,7 @@ fn (t Tree) sql_stmt_line(node ast.SqlStmtLine) &Node {
|
||||
obj.add_terse('fields', t.array_node_struct_field(node.fields))
|
||||
obj.add_terse('updated_columns', t.array_node_string(node.updated_columns))
|
||||
obj.add_terse('update_exprs', t.array_node_expr(node.update_exprs))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
|
||||
sub_struct_map := new_object()
|
||||
for key, val in node.sub_structs {
|
||||
@@ -1738,7 +1747,7 @@ fn (t Tree) lock_expr(expr ast.LockExpr) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('LockExpr'))
|
||||
obj.add_terse('is_expr', t.bool_node(expr.is_expr))
|
||||
obj.add_terse('typ', t.type_node(expr.typ))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
obj.add_terse('stmts', t.array_node_stmt(expr.stmts))
|
||||
obj.add_terse('lockeds', t.array_node_expr(expr.lockeds))
|
||||
obj.add_terse('r_lock', t.array_node_bool(expr.is_rlock))
|
||||
@@ -1749,7 +1758,7 @@ fn (t Tree) unsafe_expr(expr ast.UnsafeExpr) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('UnsafeExpr'))
|
||||
obj.add_terse('expr', t.expr(expr.expr))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1760,7 +1769,7 @@ fn (t Tree) chan_init(expr ast.ChanInit) &Node {
|
||||
obj.add_terse('cap_expr', t.expr(expr.cap_expr))
|
||||
obj.add_terse('typ', t.type_node(expr.typ))
|
||||
obj.add_terse('elem_type', t.type_node(expr.elem_type))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1771,7 +1780,7 @@ fn (t Tree) select_expr(expr ast.SelectExpr) &Node {
|
||||
obj.add_terse('is_expr', t.bool_node(expr.is_expr))
|
||||
obj.add_terse('has_exception', t.bool_node(expr.has_exception))
|
||||
obj.add_terse('expected_type', t.type_node(expr.expected_type))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1780,7 +1789,7 @@ fn (t Tree) select_branch(expr ast.SelectBranch) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('SelectBranch'))
|
||||
obj.add_terse('stmt', t.stmt(expr.stmt))
|
||||
obj.add_terse('stmts', t.array_node_stmt(expr.stmts))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
obj.add('comment', t.comment(expr.comment))
|
||||
obj.add_terse('is_else', t.bool_node(expr.is_else))
|
||||
obj.add_terse('is_timeout', t.bool_node(expr.is_timeout))
|
||||
@@ -1794,7 +1803,7 @@ fn (t Tree) array_decompose(expr ast.ArrayDecompose) &Node {
|
||||
obj.add_terse('expr', t.expr(expr.expr))
|
||||
obj.add_terse('expr_type', t.type_node(expr.expr_type))
|
||||
obj.add_terse('arg_type', t.type_node(expr.arg_type))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1803,7 +1812,7 @@ fn (t Tree) go_expr(expr ast.GoExpr) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('GoExpr'))
|
||||
obj.add_terse('call_expr', t.call_expr(expr.call_expr))
|
||||
obj.add_terse('is_expr', t.bool_node(expr.is_expr))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1812,7 +1821,7 @@ fn (t Tree) offset_of(expr ast.OffsetOf) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('OffsetOf'))
|
||||
obj.add_terse('struct_type', t.type_node(expr.struct_type))
|
||||
obj.add_terse('field', t.string_node('field'))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1821,7 +1830,7 @@ fn (t Tree) dump_expr(expr ast.DumpExpr) &Node {
|
||||
obj.add_terse('ast_type', t.string_node('DumpExpr'))
|
||||
obj.add_terse('expr', t.expr(expr.expr))
|
||||
obj.add_terse('expr_type', t.type_node(expr.expr_type))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1829,7 +1838,7 @@ fn (t Tree) node_error(expr ast.NodeError) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('NodeError'))
|
||||
obj.add_terse('idx', t.number_node(expr.idx))
|
||||
obj.add('pos', t.position(expr.pos))
|
||||
obj.add('pos', t.pos(expr.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1843,7 +1852,7 @@ fn (t Tree) empty_expr(expr ast.EmptyExpr) &Node {
|
||||
fn (t Tree) empty_stmt(node ast.EmptyStmt) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('EmptyStmt'))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1856,7 +1865,7 @@ fn (t Tree) asm_stmt(node ast.AsmStmt) &Node {
|
||||
obj.add_terse('is_goto', t.bool_node(node.is_goto))
|
||||
obj.add('scope', t.scope(node.scope))
|
||||
// obj.add('scope', t.number_node(int(node.scope)))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
obj.add_terse('clobbered', t.array_node_asm_clobbered(node.clobbered))
|
||||
obj.add_terse('templates', t.array_node_asm_template(node.templates))
|
||||
obj.add_terse('output', t.array_node_asm_io(node.output))
|
||||
@@ -1883,7 +1892,7 @@ fn (t Tree) asm_template(node ast.AsmTemplate) &Node {
|
||||
obj.add_terse('is_directive', t.bool_node(node.is_directive))
|
||||
obj.add_terse('args', t.array_node_asm_arg(node.args))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1896,7 +1905,7 @@ fn (t Tree) asm_addressing(node ast.AsmAddressing) &Node {
|
||||
obj.add_terse('displacement', t.asm_arg(node.displacement))
|
||||
obj.add_terse('base', t.asm_arg(node.base))
|
||||
obj.add_terse('index', t.asm_arg(node.index))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1936,7 +1945,7 @@ fn (t Tree) asm_alias(node ast.AsmAlias) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('AsmAlias'))
|
||||
obj.add_terse('name', t.string_node(node.name))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1944,7 +1953,7 @@ fn (t Tree) asm_disp(node ast.AsmDisp) &Node {
|
||||
mut obj := new_object()
|
||||
obj.add_terse('ast_type', t.string_node('AsmDisp'))
|
||||
obj.add_terse('val', t.string_node(node.val))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -1964,7 +1973,7 @@ fn (t Tree) asm_io(node ast.AsmIO) &Node {
|
||||
obj.add_terse('expr', t.expr(node.expr))
|
||||
obj.add_terse('typ', t.type_node(node.typ))
|
||||
obj.add('comments', t.array_node_comment(node.comments))
|
||||
obj.add('pos', t.position(node.pos))
|
||||
obj.add('pos', t.pos(node.pos))
|
||||
return obj
|
||||
}
|
||||
|
||||
@@ -2024,10 +2033,10 @@ fn (t Tree) array_node_string(nodes []string) &Node {
|
||||
return arr
|
||||
}
|
||||
|
||||
fn (t Tree) array_node_position(nodes []token.Position) &Node {
|
||||
fn (t Tree) array_node_position(nodes []token.Pos) &Node {
|
||||
mut arr := new_array()
|
||||
for node in nodes {
|
||||
arr.add_item(t.position(node))
|
||||
arr.add_item(t.pos(node))
|
||||
}
|
||||
return arr
|
||||
}
|
||||
@@ -2224,6 +2233,14 @@ fn (t Tree) array_node_struct_init_field(nodes []ast.StructInitField) &Node {
|
||||
return arr
|
||||
}
|
||||
|
||||
fn (t Tree) array_node_if_guard_var(nodes []ast.IfGuardVar) &Node {
|
||||
mut arr := new_array()
|
||||
for node in nodes {
|
||||
arr.add_item(t.if_guard_var(node))
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
fn (t Tree) array_node_struct_init_embed(nodes []ast.StructInitEmbed) &Node {
|
||||
mut arr := new_array()
|
||||
for node in nodes {
|
||||
|
||||
@@ -8,7 +8,7 @@ const vroot = @VMODROOT
|
||||
fn get_vdoctor_output(is_verbose bool) string {
|
||||
vexe := os.getenv('VEXE')
|
||||
verbose_flag := if is_verbose { '-v' } else { '' }
|
||||
result := os.execute('$vexe $verbose_flag doctor')
|
||||
result := os.execute('${os.quoted_path(vexe)} $verbose_flag doctor')
|
||||
if result.exit_code != 0 {
|
||||
eprintln('unable to get `v doctor` output: $result.output')
|
||||
return ''
|
||||
@@ -24,7 +24,7 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
os.chdir(vroot) or {}
|
||||
verbose_flag := if is_verbose { '-v' } else { '' }
|
||||
vdbg_path := $if windows { '$vroot/vdbg.exe' } $else { '$vroot/vdbg' }
|
||||
vdbg_compilation_cmd := '"$vexe" $verbose_flag -g -o "$vdbg_path" cmd/v'
|
||||
vdbg_compilation_cmd := '${os.quoted_path(vexe)} $verbose_flag -g -o ${os.quoted_path(vdbg_path)} cmd/v'
|
||||
vdbg_result := os.execute(vdbg_compilation_cmd)
|
||||
os.chdir(wd) or {}
|
||||
if vdbg_result.exit_code == 0 {
|
||||
@@ -33,7 +33,7 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
eprintln('unable to compile V in debug mode: $vdbg_result.output\ncommand: $vdbg_compilation_cmd\n')
|
||||
}
|
||||
//
|
||||
mut result := os.execute('"$vexe" $verbose_flag "$file_path"')
|
||||
mut result := os.execute('${os.quoted_path(vexe)} $verbose_flag ${os.quoted_path(file_path)}')
|
||||
defer {
|
||||
os.rm(vdbg_path) or {
|
||||
if is_verbose {
|
||||
@@ -56,7 +56,7 @@ fn get_v_build_output(is_verbose bool, is_yes bool, file_path string) string {
|
||||
run := is_yes
|
||||
|| ask('It looks like the compilation went well, do you want to run the file?')
|
||||
if run {
|
||||
result = os.execute('"$vexe" $verbose_flag run "$file_path"')
|
||||
result = os.execute('${os.quoted_path(vexe)} $verbose_flag run ${os.quoted_path(file_path)}')
|
||||
if result.exit_code == 0 && !is_yes {
|
||||
confirm_or_exit('It looks like the file ran correctly as well, are you sure you want to continue?')
|
||||
}
|
||||
|
||||
@@ -69,22 +69,19 @@ fn run_individual_test(case BumpTestCase) ? {
|
||||
|
||||
os.rm(test_file) or {}
|
||||
os.write_file(test_file, case.contents) ?
|
||||
//
|
||||
os.execute_or_exit('${os.quoted_path(vexe)} bump --patch ${os.quoted_path(test_file)}')
|
||||
patch_lines := os.read_lines(test_file) ?
|
||||
assert patch_lines[case.line] == case.expected_patch
|
||||
|
||||
{
|
||||
os.execute_or_exit('$vexe bump --patch $test_file')
|
||||
patch_lines := os.read_lines(test_file) ?
|
||||
assert patch_lines[case.line] == case.expected_patch
|
||||
}
|
||||
{
|
||||
os.execute_or_exit('$vexe bump --minor $test_file')
|
||||
minor_lines := os.read_lines(test_file) ?
|
||||
assert minor_lines[case.line] == case.expected_minor
|
||||
}
|
||||
{
|
||||
os.execute_or_exit('$vexe bump --major $test_file')
|
||||
major_lines := os.read_lines(test_file) ?
|
||||
assert major_lines[case.line] == case.expected_major
|
||||
}
|
||||
os.execute_or_exit('${os.quoted_path(vexe)} bump --minor ${os.quoted_path(test_file)}')
|
||||
minor_lines := os.read_lines(test_file) ?
|
||||
assert minor_lines[case.line] == case.expected_minor
|
||||
|
||||
os.execute_or_exit('${os.quoted_path(vexe)} bump --major ${os.quoted_path(test_file)}')
|
||||
major_lines := os.read_lines(test_file) ?
|
||||
assert major_lines[case.line] == case.expected_major
|
||||
//
|
||||
os.rm(test_file) ?
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ const (
|
||||
hide_warnings = '-hide-warnings' in os.args || '-w' in os.args
|
||||
show_progress = os.getenv('GITHUB_JOB') == '' && '-silent' !in os.args
|
||||
non_option_args = cmdline.only_non_options(os.args[2..])
|
||||
is_verbose = os.getenv('VERBOSE') != ''
|
||||
)
|
||||
|
||||
struct CheckResult {
|
||||
@@ -75,7 +76,7 @@ fn main() {
|
||||
res += mdfile.check()
|
||||
}
|
||||
if res.errors == 0 && show_progress {
|
||||
term.clear_previous_line()
|
||||
clear_previous_line()
|
||||
}
|
||||
if res.warnings > 0 || res.errors > 0 || res.oks > 0 {
|
||||
println('\nWarnings: $res.warnings | Errors: $res.errors | OKs: $res.oks')
|
||||
@@ -131,9 +132,7 @@ fn eline(file_path string, lnumber int, column int, message string) string {
|
||||
return btext('$file_path:${lnumber + 1}:${column + 1}:') + btext(rtext(' error: $message'))
|
||||
}
|
||||
|
||||
const (
|
||||
default_command = 'compile'
|
||||
)
|
||||
const default_command = 'compile'
|
||||
|
||||
struct VCodeExample {
|
||||
mut:
|
||||
@@ -160,7 +159,7 @@ mut:
|
||||
|
||||
fn (mut f MDFile) progress(message string) {
|
||||
if show_progress {
|
||||
term.clear_previous_line()
|
||||
clear_previous_line()
|
||||
println('File: ${f.path:-30s}, Lines: ${f.lines.len:5}, $message')
|
||||
}
|
||||
}
|
||||
@@ -172,30 +171,30 @@ fn (mut f MDFile) check() CheckResult {
|
||||
// f.progress('line: $j')
|
||||
if f.state == .vexample {
|
||||
if line.len > too_long_line_length_example {
|
||||
wprintln(wline(f.path, j, line.len, 'long V example line'))
|
||||
wprintln(wline(f.path, j, line.len, 'example lines must be less than $too_long_line_length_example characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if f.state == .codeblock {
|
||||
if line.len > too_long_line_length_codeblock {
|
||||
wprintln(wline(f.path, j, line.len, 'long code block line'))
|
||||
wprintln(wline(f.path, j, line.len, 'code lines must be less than $too_long_line_length_codeblock characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.starts_with('|') {
|
||||
if line.len > too_long_line_length_table {
|
||||
wprintln(wline(f.path, j, line.len, 'long table'))
|
||||
wprintln(wline(f.path, j, line.len, 'table lines must be less than $too_long_line_length_table characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.contains('http') {
|
||||
if line.all_after('https').len > too_long_line_length_link {
|
||||
wprintln(wline(f.path, j, line.len, 'long link'))
|
||||
wprintln(wline(f.path, j, line.len, 'link lines must be less than $too_long_line_length_link characters'))
|
||||
wprintln(line)
|
||||
res.warnings++
|
||||
}
|
||||
} else if line.len > too_long_line_length_other {
|
||||
eprintln(eline(f.path, j, line.len, 'line too long'))
|
||||
eprintln(eline(f.path, j, line.len, 'must be less than $too_long_line_length_other characters'))
|
||||
eprintln(line)
|
||||
res.errors++
|
||||
}
|
||||
@@ -394,6 +393,7 @@ fn (mut f MDFile) debug() {
|
||||
}
|
||||
|
||||
fn cmdexecute(cmd string) int {
|
||||
verbose_println(cmd)
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code < 0 {
|
||||
return 1
|
||||
@@ -405,12 +405,13 @@ fn cmdexecute(cmd string) int {
|
||||
}
|
||||
|
||||
fn silent_cmdexecute(cmd string) int {
|
||||
verbose_println(cmd)
|
||||
res := os.execute(cmd)
|
||||
return res.exit_code
|
||||
}
|
||||
|
||||
fn get_fmt_exit_code(vfile string, vexe string) int {
|
||||
return silent_cmdexecute('"$vexe" fmt -verify $vfile')
|
||||
return silent_cmdexecute('${os.quoted_path(vexe)} fmt -verify ${os.quoted_path(vfile)}')
|
||||
}
|
||||
|
||||
fn (mut f MDFile) check_examples() CheckResult {
|
||||
@@ -426,6 +427,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
}
|
||||
fname := os.base(f.path).replace('.md', '_md')
|
||||
uid := rand.ulid()
|
||||
cfile := os.join_path(os.temp_dir(), '${uid}.c')
|
||||
vfile := os.join_path(os.temp_dir(), 'check_${fname}_example_${e.sline}__${e.eline}__${uid}.v')
|
||||
mut should_cleanup_vfile := true
|
||||
// eprintln('>>> checking example $vfile ...')
|
||||
@@ -438,8 +440,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
fmt_res := if nofmt { 0 } else { get_fmt_exit_code(vfile, vexe) }
|
||||
match command {
|
||||
'compile' {
|
||||
res := cmdexecute('"$vexe" -w -Wfatal-errors -o x.c $vfile')
|
||||
os.rm('x.c') or {}
|
||||
res := cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors ${os.quoted_path(vfile)}')
|
||||
if res != 0 || fmt_res != 0 {
|
||||
if res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, 'example failed to compile'))
|
||||
@@ -454,9 +455,26 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
}
|
||||
oks++
|
||||
}
|
||||
'cgen' {
|
||||
res := cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -o ${os.quoted_path(cfile)} ${os.quoted_path(vfile)}')
|
||||
os.rm(cfile) or {}
|
||||
if res != 0 || fmt_res != 0 {
|
||||
if res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, 'example failed to generate C code'))
|
||||
}
|
||||
if fmt_res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, 'example is not formatted'))
|
||||
}
|
||||
eprintln(vcontent)
|
||||
should_cleanup_vfile = false
|
||||
errors++
|
||||
continue
|
||||
}
|
||||
oks++
|
||||
}
|
||||
'globals' {
|
||||
res := cmdexecute('"$vexe" -w -Wfatal-errors -enable-globals -o x.c $vfile')
|
||||
os.rm('x.c') or {}
|
||||
res := cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -enable-globals -o ${os.quoted_path(cfile)} ${os.quoted_path(vfile)}')
|
||||
os.rm(cfile) or {}
|
||||
if res != 0 || fmt_res != 0 {
|
||||
if res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, '`example failed to compile with -enable-globals'))
|
||||
@@ -472,7 +490,8 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
oks++
|
||||
}
|
||||
'live' {
|
||||
res := cmdexecute('"$vexe" -w -Wfatal-errors -live -o x.c $vfile')
|
||||
res := cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -live -o ${os.quoted_path(cfile)} ${os.quoted_path(vfile)}')
|
||||
os.rm(cfile) or {}
|
||||
if res != 0 || fmt_res != 0 {
|
||||
if res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, 'example failed to compile with -live'))
|
||||
@@ -488,8 +507,8 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
oks++
|
||||
}
|
||||
'failcompile' {
|
||||
res := silent_cmdexecute('"$vexe" -w -Wfatal-errors -o x.c $vfile')
|
||||
os.rm('x.c') or {}
|
||||
res := silent_cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -o ${os.quoted_path(cfile)} ${os.quoted_path(vfile)}')
|
||||
os.rm(cfile) or {}
|
||||
if res == 0 || fmt_res != 0 {
|
||||
if res == 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, '`failcompile` example compiled'))
|
||||
@@ -505,7 +524,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
oks++
|
||||
}
|
||||
'oksyntax' {
|
||||
res := cmdexecute('"$vexe" -w -Wfatal-errors -check-syntax $vfile')
|
||||
res := cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -check-syntax ${os.quoted_path(vfile)}')
|
||||
if res != 0 || fmt_res != 0 {
|
||||
if res != 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, '`oksyntax` example with invalid syntax'))
|
||||
@@ -521,7 +540,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
oks++
|
||||
}
|
||||
'badsyntax' {
|
||||
res := silent_cmdexecute('"$vexe" -w -Wfatal-errors -check-syntax $vfile')
|
||||
res := silent_cmdexecute('${os.quoted_path(vexe)} -w -Wfatal-errors -check-syntax ${os.quoted_path(vfile)}')
|
||||
if res == 0 {
|
||||
eprintln(eline(f.path, e.sline, 0, '`badsyntax` example can be parsed fine'))
|
||||
eprintln(vcontent)
|
||||
@@ -533,7 +552,7 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
}
|
||||
'nofmt' {}
|
||||
else {
|
||||
eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "$command", use one of: wip/ignore/compile/failcompile/oksyntax/badsyntax'))
|
||||
eprintln(eline(f.path, e.sline, 0, 'unrecognized command: "$command", use one of: wip/ignore/compile/cgen/failcompile/oksyntax/badsyntax/nofmt'))
|
||||
should_cleanup_vfile = false
|
||||
errors++
|
||||
}
|
||||
@@ -548,3 +567,16 @@ fn (mut f MDFile) check_examples() CheckResult {
|
||||
oks: oks
|
||||
}
|
||||
}
|
||||
|
||||
fn verbose_println(message string) {
|
||||
if is_verbose {
|
||||
println(message)
|
||||
}
|
||||
}
|
||||
|
||||
fn clear_previous_line() {
|
||||
if is_verbose {
|
||||
return
|
||||
}
|
||||
term.clear_previous_line()
|
||||
}
|
||||
|
||||
@@ -44,6 +44,34 @@ import os
|
||||
const (
|
||||
auto_complete_shells = ['bash', 'fish', 'zsh', 'powershell'] // list of supported shells
|
||||
vexe = os.getenv('VEXE')
|
||||
help_text = "Usage:
|
||||
v complete [options] [SUBCMD] QUERY...
|
||||
|
||||
Description:
|
||||
Tool for bridging auto completion between various shells and v
|
||||
|
||||
Supported shells:
|
||||
bash, fish, zsh, powershell
|
||||
|
||||
Examples:
|
||||
Echo auto-detected shell install script to STDOUT
|
||||
v complete
|
||||
Echo specific shell install script to STDOUT
|
||||
v complete setup bash
|
||||
Auto complete input `v tes`*USER PUSHES TAB* (in Bash compatible format).
|
||||
This is not meant for manual invocation - it's called by the relevant
|
||||
shell via the script installed with `v complete` or `v complete setup SHELL`.
|
||||
v complete bash v tes
|
||||
|
||||
Options:
|
||||
-h, --help Show this help text.
|
||||
|
||||
SUBCMD:
|
||||
setup : setup [SHELL] - returns the code for completion setup for SHELL
|
||||
bash : [QUERY] - returns Bash compatible completion code with completions computed from QUERY
|
||||
fish : [QUERY] - returns Fish compatible completion code with completions computed from QUERY
|
||||
zsh : [QUERY] - returns ZSH compatible completion code with completions computed from QUERY
|
||||
powershell: [QUERY] - returns PowerShell compatible completion code with completions computed from QUERY"
|
||||
)
|
||||
|
||||
// Snooped from cmd/v/v.v, vlib/v/pref/pref.v
|
||||
@@ -229,6 +257,16 @@ const (
|
||||
fn auto_complete(args []string) {
|
||||
if args.len <= 1 || args[0] != 'complete' {
|
||||
if args.len == 1 {
|
||||
shell_path := os.getenv('SHELL')
|
||||
if shell_path.len > 0 {
|
||||
shell_name := os.file_name(shell_path).to_lower()
|
||||
if shell_name in auto_complete_shells {
|
||||
println(setup_for_shell(shell_name))
|
||||
exit(0)
|
||||
}
|
||||
eprintln('Unknown shell ${shell_name}. Supported shells are: $auto_complete_shells')
|
||||
exit(1)
|
||||
}
|
||||
eprintln('auto completion require arguments to work.')
|
||||
} else {
|
||||
eprintln('auto completion failed for "$args".')
|
||||
@@ -244,62 +282,7 @@ fn auto_complete(args []string) {
|
||||
exit(1)
|
||||
}
|
||||
shell := sub_args[1]
|
||||
mut setup := ''
|
||||
match shell {
|
||||
'bash' {
|
||||
setup = '
|
||||
_v_completions() {
|
||||
local src
|
||||
local limit
|
||||
# Send all words up to the word the cursor is currently on
|
||||
let limit=1+\$COMP_CWORD
|
||||
src=\$($vexe complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
fi
|
||||
}
|
||||
|
||||
complete -o nospace -F _v_completions v
|
||||
'
|
||||
}
|
||||
'fish' {
|
||||
setup = '
|
||||
function __v_completions
|
||||
# Send all words up to the one before the cursor
|
||||
$vexe complete fish (commandline -cop)
|
||||
end
|
||||
complete -f -c v -a "(__v_completions)"
|
||||
'
|
||||
}
|
||||
'zsh' {
|
||||
setup = '
|
||||
#compdef v
|
||||
_v() {
|
||||
local src
|
||||
# Send all words up to the word the cursor is currently on
|
||||
src=\$($vexe complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
fi
|
||||
}
|
||||
compdef _v v
|
||||
'
|
||||
}
|
||||
'powershell' {
|
||||
setup = '
|
||||
Register-ArgumentCompleter -Native -CommandName v -ScriptBlock {
|
||||
param(\$commandName, \$wordToComplete, \$cursorPosition)
|
||||
$vexe complete powershell "\$wordToComplete" | ForEach-Object {
|
||||
[System.Management.Automation.CompletionResult]::new(\$_, \$_, \'ParameterValue\', \$_)
|
||||
}
|
||||
}
|
||||
'
|
||||
}
|
||||
else {}
|
||||
}
|
||||
println(setup)
|
||||
println(setup_for_shell(shell))
|
||||
}
|
||||
'bash' {
|
||||
if sub_args.len <= 1 {
|
||||
@@ -334,6 +317,9 @@ Register-ArgumentCompleter -Native -CommandName v -ScriptBlock {
|
||||
}
|
||||
println(lines.join('\n'))
|
||||
}
|
||||
'-h', '--help' {
|
||||
println(help_text)
|
||||
}
|
||||
else {}
|
||||
}
|
||||
exit(0)
|
||||
@@ -348,12 +334,26 @@ fn append_separator_if_dir(path string) string {
|
||||
return path
|
||||
}
|
||||
|
||||
// nearest_path_or_root returns the nearest valid path searching
|
||||
// backwards from `path`.
|
||||
fn nearest_path_or_root(path string) string {
|
||||
mut fixed_path := path
|
||||
if !os.is_dir(fixed_path) {
|
||||
fixed_path = path.all_before_last(os.path_separator)
|
||||
if fixed_path == '' {
|
||||
fixed_path = '/'
|
||||
}
|
||||
}
|
||||
return fixed_path
|
||||
}
|
||||
|
||||
// auto_complete_request retuns a list of completions resolved from a full argument list.
|
||||
fn auto_complete_request(args []string) []string {
|
||||
// Using space will ensure a uniform input in cases where the shell
|
||||
// returns the completion input as a string (['v','run'] vs. ['v run']).
|
||||
split_by := ' '
|
||||
request := args.join(split_by)
|
||||
mut do_home_expand := false
|
||||
mut list := []string{}
|
||||
// new_part := request.ends_with('\n\n')
|
||||
mut parts := request.trim_right(' ').split(split_by)
|
||||
@@ -362,7 +362,7 @@ fn auto_complete_request(args []string) []string {
|
||||
list << command
|
||||
}
|
||||
} else {
|
||||
part := parts.last().trim(' ')
|
||||
mut part := parts.last().trim(' ')
|
||||
mut parent_command := ''
|
||||
for i := parts.len - 1; i >= 0; i-- {
|
||||
if parts[i].starts_with('-') {
|
||||
@@ -435,17 +435,34 @@ fn auto_complete_request(args []string) []string {
|
||||
mut ls_path := '.'
|
||||
mut collect_all := part in auto_complete_commands
|
||||
mut path_complete := false
|
||||
do_home_expand = part.starts_with('~')
|
||||
if do_home_expand {
|
||||
add_sep := if part == '~' { os.path_separator } else { '' }
|
||||
part = part.replace_once('~', os.home_dir().trim_right(os.path_separator)) + add_sep
|
||||
}
|
||||
is_abs_path := part.starts_with(os.path_separator) // TODO Windows support for drive prefixes
|
||||
if part.ends_with(os.path_separator) || part == '.' || part == '..' {
|
||||
// 'v <command>(.*/$|.|..)<tab>' -> output full directory list
|
||||
ls_path = '.' + os.path_separator + part
|
||||
if is_abs_path {
|
||||
ls_path = nearest_path_or_root(part)
|
||||
}
|
||||
collect_all = true
|
||||
} else if !collect_all && part.contains(os.path_separator) && os.is_dir(os.dir(part)) {
|
||||
// 'v <command>(.*/.* && os.is_dir)<tab>' -> output completion friendly directory list
|
||||
ls_path = os.dir(part)
|
||||
if is_abs_path {
|
||||
ls_path = nearest_path_or_root(part)
|
||||
} else {
|
||||
ls_path = os.dir(part)
|
||||
}
|
||||
path_complete = true
|
||||
}
|
||||
|
||||
entries := os.ls(ls_path) or { return list }
|
||||
last := part.all_after_last(os.path_separator)
|
||||
mut last := part.all_after_last(os.path_separator)
|
||||
if is_abs_path && os.is_dir(part) {
|
||||
last = ''
|
||||
}
|
||||
if path_complete {
|
||||
path := part.all_before_last(os.path_separator)
|
||||
for entry in entries {
|
||||
@@ -453,27 +470,80 @@ fn auto_complete_request(args []string) []string {
|
||||
list << append_separator_if_dir(os.join_path(path, entry))
|
||||
}
|
||||
}
|
||||
// If only one possible file - send full path to completion system.
|
||||
// Please note that this might be bash specific - needs more testing.
|
||||
if list.len == 1 {
|
||||
list = [list[0]]
|
||||
}
|
||||
} else {
|
||||
for entry in entries {
|
||||
if collect_all {
|
||||
if collect_all || entry.starts_with(last) {
|
||||
list << append_separator_if_dir(entry)
|
||||
} else {
|
||||
if entry.starts_with(last) {
|
||||
list << append_separator_if_dir(entry)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if do_home_expand {
|
||||
return list.map(it.replace_once(os.home_dir().trim_right(os.path_separator), '~'))
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
fn setup_for_shell(shell string) string {
|
||||
mut setup := ''
|
||||
match shell {
|
||||
'bash' {
|
||||
setup = '
|
||||
_v_completions() {
|
||||
local src
|
||||
local limit
|
||||
# Send all words up to the word the cursor is currently on
|
||||
let limit=1+\$COMP_CWORD
|
||||
src=\$($vexe complete bash \$(printf "%s\\n" \${COMP_WORDS[@]: 0:\$limit}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
fi
|
||||
}
|
||||
|
||||
complete -o nospace -F _v_completions v
|
||||
'
|
||||
}
|
||||
'fish' {
|
||||
setup = '
|
||||
function __v_completions
|
||||
# Send all words up to the one before the cursor
|
||||
$vexe complete fish (commandline -cop)
|
||||
end
|
||||
complete -f -c v -a "(__v_completions)"
|
||||
'
|
||||
}
|
||||
'zsh' {
|
||||
setup = '
|
||||
#compdef v
|
||||
_v() {
|
||||
local src
|
||||
# Send all words up to the word the cursor is currently on
|
||||
src=\$($vexe complete zsh \$(printf "%s\\n" \${(@)words[1,\$CURRENT]}))
|
||||
if [[ \$? == 0 ]]; then
|
||||
eval \${src}
|
||||
#echo \${src}
|
||||
fi
|
||||
}
|
||||
compdef _v v
|
||||
'
|
||||
}
|
||||
'powershell' {
|
||||
setup = '
|
||||
Register-ArgumentCompleter -Native -CommandName v -ScriptBlock {
|
||||
param(\$commandName, \$wordToComplete, \$cursorPosition)
|
||||
$vexe complete powershell "\$wordToComplete" | ForEach-Object {
|
||||
[System.Management.Automation.CompletionResult]::new(\$_, \$_, \'ParameterValue\', \$_)
|
||||
}
|
||||
}
|
||||
'
|
||||
}
|
||||
else {}
|
||||
}
|
||||
return setup
|
||||
}
|
||||
|
||||
fn main() {
|
||||
args := os.args[1..]
|
||||
// println('"$args"')
|
||||
|
||||
@@ -42,49 +42,55 @@ fn check_name(name string) string {
|
||||
}
|
||||
|
||||
fn vmod_content(c Create) string {
|
||||
return [
|
||||
'Module {',
|
||||
" name: '$c.name'",
|
||||
" description: '$c.description'",
|
||||
" version: '$c.version'",
|
||||
" license: '$c.license'",
|
||||
' dependencies: []',
|
||||
'}',
|
||||
'',
|
||||
].join_lines()
|
||||
return "Module {
|
||||
name: '$c.name'
|
||||
description: '$c.description'
|
||||
version: '$c.version'
|
||||
license: '$c.license'
|
||||
dependencies: []
|
||||
}
|
||||
"
|
||||
}
|
||||
|
||||
fn main_content() string {
|
||||
return [
|
||||
'module main\n',
|
||||
'fn main() {',
|
||||
" println('Hello World!')",
|
||||
'}',
|
||||
'',
|
||||
].join_lines()
|
||||
return "module main
|
||||
|
||||
fn main() {
|
||||
println('Hello World!')
|
||||
}
|
||||
"
|
||||
}
|
||||
|
||||
fn gen_gitignore(name string) string {
|
||||
return [
|
||||
'# Binaries for programs and plugins',
|
||||
'main',
|
||||
'$name',
|
||||
'*.exe',
|
||||
'*.exe~',
|
||||
'*.so',
|
||||
'*.dylib',
|
||||
'*.dll',
|
||||
'vls.log',
|
||||
'',
|
||||
].join_lines()
|
||||
return '# Binaries for programs and plugins
|
||||
main
|
||||
$name
|
||||
*.exe
|
||||
*.exe~
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
vls.log
|
||||
'
|
||||
}
|
||||
|
||||
fn gitattributes_content() string {
|
||||
return [
|
||||
'*.v linguist-language=V text=auto eol=lf',
|
||||
'*.vv linguist-language=V text=auto eol=lf',
|
||||
'',
|
||||
].join_lines()
|
||||
return '*.v linguist-language=V text=auto eol=lf
|
||||
*.vv linguist-language=V text=auto eol=lf
|
||||
'
|
||||
}
|
||||
|
||||
fn editorconfig_content() string {
|
||||
return '[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.v]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
'
|
||||
}
|
||||
|
||||
fn (c &Create) write_vmod(new bool) {
|
||||
@@ -102,9 +108,20 @@ fn (c &Create) write_main(new bool) {
|
||||
|
||||
fn (c &Create) write_gitattributes(new bool) {
|
||||
gitattributes_path := if new { '$c.name/.gitattributes' } else { '.gitattributes' }
|
||||
if !new && os.exists(gitattributes_path) {
|
||||
return
|
||||
}
|
||||
os.write_file(gitattributes_path, gitattributes_content()) or { panic(err) }
|
||||
}
|
||||
|
||||
fn (c &Create) write_editorconfig(new bool) {
|
||||
editorconfig_path := if new { '$c.name/.editorconfig' } else { '.editorconfig' }
|
||||
if !new && os.exists(editorconfig_path) {
|
||||
return
|
||||
}
|
||||
os.write_file(editorconfig_path, editorconfig_content()) or { panic(err) }
|
||||
}
|
||||
|
||||
fn (c &Create) create_git_repo(dir string) {
|
||||
// Create Git Repo and .gitignore file
|
||||
if !os.is_dir('$dir/.git') {
|
||||
@@ -151,23 +168,22 @@ fn create(args []string) {
|
||||
c.write_vmod(true)
|
||||
c.write_main(true)
|
||||
c.write_gitattributes(true)
|
||||
c.write_editorconfig(true)
|
||||
c.create_git_repo(c.name)
|
||||
}
|
||||
|
||||
fn init_project() {
|
||||
if os.exists('v.mod') {
|
||||
cerror('`v init` cannot be run on existing v modules')
|
||||
exit(3)
|
||||
}
|
||||
mut c := Create{}
|
||||
c.name = check_name(os.file_name(os.getwd()))
|
||||
c.description = ''
|
||||
c.write_vmod(false)
|
||||
if !os.exists('v.mod') {
|
||||
c.description = ''
|
||||
c.write_vmod(false)
|
||||
println('Change the description of your project in `v.mod`')
|
||||
}
|
||||
c.write_main(false)
|
||||
c.write_gitattributes(false)
|
||||
c.write_editorconfig(false)
|
||||
c.create_git_repo('.')
|
||||
|
||||
println('Change the description of your project in `v.mod`')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
|
||||
@@ -3,8 +3,7 @@ import os
|
||||
const test_path = 'vcreate_test'
|
||||
|
||||
fn init_and_check() ? {
|
||||
vexe := @VEXE
|
||||
os.execute_or_exit('$vexe init')
|
||||
os.execute_or_exit('${os.quoted_path(@VEXE)} init')
|
||||
|
||||
assert os.read_file('vcreate_test.v') ? == [
|
||||
'module main\n',
|
||||
@@ -43,6 +42,19 @@ fn init_and_check() ? {
|
||||
'*.vv linguist-language=V text=auto eol=lf',
|
||||
'',
|
||||
].join_lines()
|
||||
|
||||
assert os.read_file('.editorconfig') ? == [
|
||||
'[*]',
|
||||
'charset = utf-8',
|
||||
'end_of_line = lf',
|
||||
'insert_final_newline = true',
|
||||
'trim_trailing_whitespace = true',
|
||||
'',
|
||||
'[*.v]',
|
||||
'indent_style = tab',
|
||||
'indent_size = 4',
|
||||
'',
|
||||
].join_lines()
|
||||
}
|
||||
|
||||
fn test_v_init() ? {
|
||||
@@ -79,8 +91,36 @@ fn test_v_init_no_overwrite_gitignore() ? {
|
||||
}
|
||||
os.chdir(dir) ?
|
||||
|
||||
vexe := @VEXE
|
||||
os.execute_or_exit('$vexe init')
|
||||
os.execute_or_exit('${os.quoted_path(@VEXE)} init')
|
||||
|
||||
assert os.read_file('.gitignore') ? == 'blah'
|
||||
}
|
||||
|
||||
fn test_v_init_no_overwrite_gitattributes_and_editorconfig() ? {
|
||||
git_attributes_content := '*.v linguist-language=V text=auto eol=lf'
|
||||
editor_config_content := '[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.v]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
'
|
||||
|
||||
dir := os.join_path(os.temp_dir(), test_path)
|
||||
os.rmdir_all(dir) or {}
|
||||
os.mkdir(dir) or {}
|
||||
os.write_file('$dir/.gitattributes', git_attributes_content) ?
|
||||
os.write_file('$dir/.editorconfig', editor_config_content) ?
|
||||
defer {
|
||||
os.rmdir_all(dir) or {}
|
||||
}
|
||||
os.chdir(dir) ?
|
||||
|
||||
os.execute_or_exit('${os.quoted_path(@VEXE)} init')
|
||||
|
||||
assert os.read_file('.gitattributes') ? == git_attributes_content
|
||||
assert os.read_file('.editorconfig') ? == editor_config_content
|
||||
}
|
||||
|
||||
@@ -254,7 +254,8 @@ fn (vd VDoc) gen_html(d doc.Doc) string {
|
||||
write_toc(cn, mut symbols_toc)
|
||||
} // write head
|
||||
// write css
|
||||
version := if vd.manifest.version.len != 0 { vd.manifest.version } else { '' }
|
||||
mut version := if vd.manifest.version.len != 0 { vd.manifest.version } else { '' }
|
||||
version = [version, @VHASH].join(' ')
|
||||
header_name := if cfg.is_multi && vd.docs.len > 1 {
|
||||
os.file_name(os.real_path(cfg.input_path))
|
||||
} else {
|
||||
@@ -421,7 +422,7 @@ fn html_highlight(code string, tb &ast.Table) string {
|
||||
break
|
||||
}
|
||||
} else {
|
||||
buf.write_b(code[i])
|
||||
buf.write_byte(code[i])
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ fn check_path(vexe string, dir string, tests []string) int {
|
||||
for path in paths {
|
||||
program := path
|
||||
print(path + ' ')
|
||||
res := os.execute('$vexe doc $program')
|
||||
res := os.execute('${os.quoted_path(vexe)} doc ${os.quoted_path(program)}')
|
||||
if res.exit_code < 0 {
|
||||
panic(res.output)
|
||||
}
|
||||
@@ -46,7 +46,7 @@ fn check_path(vexe string, dir string, tests []string) int {
|
||||
print_compare(expected, found)
|
||||
}
|
||||
|
||||
res_comments := os.execute('$vexe doc -comments $program')
|
||||
res_comments := os.execute('${os.quoted_path(vexe)} doc -comments ${os.quoted_path(program)}')
|
||||
if res_comments.exit_code < 0 {
|
||||
panic(res_comments.output)
|
||||
}
|
||||
|
||||
@@ -266,7 +266,7 @@ fn color_highlight(code string, tb &ast.Table) string {
|
||||
tok = next_tok
|
||||
next_tok = s.scan()
|
||||
} else {
|
||||
buf.write_b(code[i])
|
||||
buf.write_byte(code[i])
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
@@ -505,7 +505,7 @@ fn parse_arguments(args []string) Config {
|
||||
fn main() {
|
||||
if os.args.len < 2 || '-h' in os.args || '-help' in os.args || '--help' in os.args
|
||||
|| os.args[1..] == ['doc', 'help'] {
|
||||
os.system('$vexe help doc')
|
||||
os.system('${os.quoted_path(vexe)} help doc')
|
||||
exit(0)
|
||||
}
|
||||
args := os.args[2..].clone()
|
||||
|
||||
@@ -241,8 +241,12 @@ fn (mut a App) report_tcc_version(tccfolder string) {
|
||||
a.line(tccfolder, 'N/A')
|
||||
return
|
||||
}
|
||||
tcc_branch_name := a.cmd(command: 'git -C $tccfolder rev-parse --abbrev-ref HEAD')
|
||||
tcc_commit := a.cmd(command: 'git -C $tccfolder describe --abbrev=8 --dirty --always --tags')
|
||||
tcc_branch_name := a.cmd(
|
||||
command: 'git -C ${os.quoted_path(tccfolder)} rev-parse --abbrev-ref HEAD'
|
||||
)
|
||||
tcc_commit := a.cmd(
|
||||
command: 'git -C ${os.quoted_path(tccfolder)} describe --abbrev=8 --dirty --always --tags'
|
||||
)
|
||||
a.line('$tccfolder status', '$tcc_branch_name $tcc_commit')
|
||||
}
|
||||
|
||||
|
||||
@@ -92,7 +92,11 @@ fn main() {
|
||||
exit(0)
|
||||
}
|
||||
mut cli_args_no_files := []string{}
|
||||
for a in os.args {
|
||||
for idx, a in os.args {
|
||||
if idx == 0 {
|
||||
cli_args_no_files << os.quoted_path(a)
|
||||
continue
|
||||
}
|
||||
if a !in files {
|
||||
cli_args_no_files << a
|
||||
}
|
||||
@@ -255,7 +259,15 @@ fn (foptions &FormatOptions) post_process_file(file string, formatted_file_path
|
||||
file_bak := '${file}.bak'
|
||||
os.cp(file, file_bak) or {}
|
||||
}
|
||||
mut perms_to_restore := u32(0)
|
||||
$if !windows {
|
||||
fm := os.inode(file)
|
||||
perms_to_restore = fm.bitmask()
|
||||
}
|
||||
os.mv_by_cp(formatted_file_path, file) or { panic(err) }
|
||||
$if !windows {
|
||||
os.chmod(file, int(perms_to_restore)) or { panic(err) }
|
||||
}
|
||||
eprintln('Reformatted file: $file')
|
||||
} else {
|
||||
eprintln('Already formatted file: $file')
|
||||
|
||||
@@ -187,7 +187,7 @@ fn generate_screenshots(mut opt Options, output_path string) ? {
|
||||
if opt.verbose {
|
||||
eprintln('Compiling shaders (if needed) for `$file`')
|
||||
}
|
||||
sh_result := os.execute('$v_exe shader "$app_path"')
|
||||
sh_result := os.execute('${os.quoted_path(v_exe)} shader ${os.quoted_path(app_path)}')
|
||||
if sh_result.exit_code != 0 {
|
||||
if opt.verbose {
|
||||
eprintln('Skipping shader compile for `$file` v shader failed with:\n$sh_result.output')
|
||||
@@ -241,14 +241,18 @@ fn compare_screenshots(opt Options, output_path string, target_path string) ? {
|
||||
diff_file := os.join_path(os.temp_dir(), os.file_name(src).all_before_last('.') +
|
||||
'.diff.tif')
|
||||
flags := app_config.compare.flags.join(' ')
|
||||
diff_cmd := '$idiff_exe $flags -od -o "$diff_file" -abs "$src" "$target"'
|
||||
result := os.execute(diff_cmd)
|
||||
if opt.verbose && result.exit_code == 0 {
|
||||
diff_cmd := '${os.quoted_path(idiff_exe)} $flags -abs -od -o ${os.quoted_path(diff_file)} -abs ${os.quoted_path(src)} ${os.quoted_path(target)}'
|
||||
if opt.verbose {
|
||||
eprintln('Running: $diff_cmd')
|
||||
eprintln('$result.output')
|
||||
}
|
||||
|
||||
result := os.execute(diff_cmd)
|
||||
|
||||
if opt.verbose && result.exit_code == 0 {
|
||||
eprintln('OUTPUT: \n$result.output')
|
||||
}
|
||||
if result.exit_code != 0 {
|
||||
eprintln('$result.output')
|
||||
eprintln('OUTPUT: \n$result.output')
|
||||
if result.exit_code == 1 {
|
||||
warns[src] = target
|
||||
} else {
|
||||
@@ -305,7 +309,7 @@ fn take_screenshots(opt Options, app AppConfig) ?[]string {
|
||||
}
|
||||
|
||||
mut flags := app.capture.flags.join(' ')
|
||||
v_cmd := '$v_exe $flags -d gg_record run "$app.abs_path"'
|
||||
v_cmd := '${os.quoted_path(v_exe)} $flags -d gg_record run ${os.quoted_path(app.abs_path)}'
|
||||
if opt.verbose {
|
||||
eprintln('Running `$v_cmd`')
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ module main
|
||||
import os
|
||||
import os.cmdline
|
||||
import net.http
|
||||
import net.urllib
|
||||
import json
|
||||
import vhelp
|
||||
import v.vmod
|
||||
@@ -139,7 +140,6 @@ fn vpm_search(keywords []string) {
|
||||
joined := search_keys.join(', ')
|
||||
mut index := 0
|
||||
for mod in modules {
|
||||
// TODO for some reason .filter results in substr error, so do it manually
|
||||
for k in search_keys {
|
||||
if !mod.contains(k) {
|
||||
continue
|
||||
@@ -208,7 +208,7 @@ fn vpm_install_from_vpm(module_names []string) {
|
||||
vpm_update([name])
|
||||
continue
|
||||
}
|
||||
println('Installing module "$name" from $mod.url to $final_module_path ...')
|
||||
println('Installing module "$name" from "$mod.url" to "$final_module_path" ...')
|
||||
vcs_install_cmd := supported_vcs_install_cmds[vcs]
|
||||
cmd := '$vcs_install_cmd "$mod.url" "$final_module_path"'
|
||||
verbose_println(' command: $cmd')
|
||||
@@ -249,7 +249,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
|
||||
first_cut_pos := url.last_index('/') or {
|
||||
errors++
|
||||
println('Errors while retrieving name for module $url:')
|
||||
println('Errors while retrieving name for module "$url" :')
|
||||
println(err)
|
||||
continue
|
||||
}
|
||||
@@ -258,7 +258,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
|
||||
second_cut_pos := url.substr(0, first_cut_pos).last_index('/') or {
|
||||
errors++
|
||||
println('Errors while retrieving name for module $url:')
|
||||
println('Errors while retrieving name for module "$url" :')
|
||||
println(err)
|
||||
continue
|
||||
}
|
||||
@@ -276,7 +276,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
println('VPM needs `$vcs_key` to be installed.')
|
||||
continue
|
||||
}
|
||||
println('Installing module "$name" from $url to $final_module_path ...')
|
||||
println('Installing module "$name" from "$url" to "$final_module_path" ...')
|
||||
vcs_install_cmd := supported_vcs_install_cmds[vcs_key]
|
||||
cmd := '$vcs_install_cmd "$url" "$final_module_path"'
|
||||
verbose_println(' command: $cmd')
|
||||
@@ -293,7 +293,7 @@ fn vpm_install_from_vcs(module_names []string, vcs_key string) {
|
||||
vmod := parse_vmod(data)
|
||||
mod_path := os.real_path(os.join_path(settings.vmodules_path, vmod.name.replace('.',
|
||||
os.path_separator)))
|
||||
println('Relocating module from "$name" to "$vmod.name" ( $mod_path ) ...')
|
||||
println('Relocating module from "$name" to "$vmod.name" ( "$mod_path" ) ...')
|
||||
if os.exists(mod_path) {
|
||||
println('Warning module "$mod_path" already exsits!')
|
||||
println('Removing module "$mod_path" ...')
|
||||
@@ -358,11 +358,14 @@ fn vpm_update(m []string) {
|
||||
module_names = get_installed_modules()
|
||||
}
|
||||
mut errors := 0
|
||||
for name in module_names {
|
||||
final_module_path := valid_final_path_of_existing_module(name) or { continue }
|
||||
for modulename in module_names {
|
||||
mut zname := modulename
|
||||
if mod := get_mod_by_url(modulename) {
|
||||
zname = mod.name
|
||||
}
|
||||
final_module_path := valid_final_path_of_existing_module(modulename) or { continue }
|
||||
os.chdir(final_module_path) or {}
|
||||
println('Updating module "$name"...')
|
||||
verbose_println(' work folder: $final_module_path')
|
||||
println('Updating module "$zname" in "$final_module_path" ...')
|
||||
vcs := vcs_used_in_dir(final_module_path) or { continue }
|
||||
if !ensure_vcs_is_installed(vcs[0]) {
|
||||
errors++
|
||||
@@ -374,13 +377,13 @@ fn vpm_update(m []string) {
|
||||
vcs_res := os.execute('$vcs_cmd')
|
||||
if vcs_res.exit_code != 0 {
|
||||
errors++
|
||||
println('Failed updating module "$name".')
|
||||
println('Failed updating module "$zname" in "$final_module_path" .')
|
||||
print_failed_cmd(vcs_cmd, vcs_res)
|
||||
continue
|
||||
} else {
|
||||
verbose_println(' $vcs_res.output.trim_space()')
|
||||
}
|
||||
resolve_dependencies(name, final_module_path, module_names)
|
||||
resolve_dependencies(modulename, final_module_path, module_names)
|
||||
}
|
||||
if errors > 0 {
|
||||
exit(1)
|
||||
@@ -401,7 +404,7 @@ fn get_outdated() ?[]string {
|
||||
if res.exit_code < 0 {
|
||||
verbose_println('Error command: $step')
|
||||
verbose_println('Error details:\n$res.output')
|
||||
return error('Error while checking latest commits for "$name".')
|
||||
return error('Error while checking latest commits for "$name" .')
|
||||
}
|
||||
if vcs[0] == 'hg' {
|
||||
if res.exit_code == 1 {
|
||||
@@ -462,7 +465,7 @@ fn vpm_remove(module_names []string) {
|
||||
}
|
||||
for name in module_names {
|
||||
final_module_path := valid_final_path_of_existing_module(name) or { continue }
|
||||
println('Removing module "$name"...')
|
||||
println('Removing module "$name" ...')
|
||||
verbose_println('removing folder $final_module_path')
|
||||
os.rmdir_all(final_module_path) or {
|
||||
verbose_println('error while removing "$final_module_path": $err.msg')
|
||||
@@ -482,7 +485,11 @@ fn vpm_remove(module_names []string) {
|
||||
}
|
||||
}
|
||||
|
||||
fn valid_final_path_of_existing_module(name string) ?string {
|
||||
fn valid_final_path_of_existing_module(modulename string) ?string {
|
||||
mut name := modulename
|
||||
if mod := get_mod_by_url(name) {
|
||||
name = mod.name
|
||||
}
|
||||
mod_name_as_path := name.replace('.', os.path_separator).replace('-', '_').to_lower()
|
||||
name_of_vmodules_folder := os.join_path(settings.vmodules_path, mod_name_as_path)
|
||||
final_module_path := os.real_path(name_of_vmodules_folder)
|
||||
@@ -503,7 +510,7 @@ fn valid_final_path_of_existing_module(name string) ?string {
|
||||
|
||||
fn ensure_vmodules_dir_exist() {
|
||||
if !os.is_dir(settings.vmodules_path) {
|
||||
println('Creating $settings.vmodules_path/ ...')
|
||||
println('Creating "$settings.vmodules_path/" ...')
|
||||
os.mkdir(settings.vmodules_path) or { panic(err) }
|
||||
}
|
||||
}
|
||||
@@ -600,7 +607,7 @@ fn resolve_dependencies(name string, module_path string, module_names []string)
|
||||
}
|
||||
}
|
||||
if deps.len > 0 {
|
||||
println('Resolving $deps.len dependencies for module "$name"...')
|
||||
println('Resolving $deps.len dependencies for module "$name" ...')
|
||||
verbose_println('Found dependencies: $deps')
|
||||
vpm_install(deps, Source.vpm)
|
||||
}
|
||||
@@ -662,28 +669,44 @@ fn verbose_println(s string) {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_mod_by_url(name string) ?Mod {
|
||||
if purl := urllib.parse(name) {
|
||||
verbose_println('purl: $purl')
|
||||
mod := Mod{
|
||||
name: purl.path.trim_left('/').trim_right('/').replace('/', '.')
|
||||
url: name
|
||||
}
|
||||
verbose_println(mod.str())
|
||||
return mod
|
||||
}
|
||||
return error('invalid url: $name')
|
||||
}
|
||||
|
||||
fn get_module_meta_info(name string) ?Mod {
|
||||
if mod := get_mod_by_url(name) {
|
||||
return mod
|
||||
}
|
||||
mut errors := []string{}
|
||||
for server_url in default_vpm_server_urls {
|
||||
modurl := server_url + '/jsmod/$name'
|
||||
verbose_println('Retrieving module metadata from: $modurl ...')
|
||||
verbose_println('Retrieving module metadata from: "$modurl" ...')
|
||||
r := http.get(modurl) or {
|
||||
errors << 'Http server did not respond to our request for ${modurl}.'
|
||||
errors << 'Http server did not respond to our request for "$modurl" .'
|
||||
errors << 'Error details: $err'
|
||||
continue
|
||||
}
|
||||
if r.status_code == 404 || r.text.trim_space() == '404' {
|
||||
errors << 'Skipping module "$name", since $server_url reported that "$name" does not exist.'
|
||||
errors << 'Skipping module "$name", since "$server_url" reported that "$name" does not exist.'
|
||||
continue
|
||||
}
|
||||
if r.status_code != 200 {
|
||||
errors << 'Skipping module "$name", since $server_url responded with $r.status_code http status code. Please try again later.'
|
||||
errors << 'Skipping module "$name", since "$server_url" responded with $r.status_code http status code. Please try again later.'
|
||||
continue
|
||||
}
|
||||
s := r.text
|
||||
if s.len > 0 && s[0] != `{` {
|
||||
errors << 'Invalid json data'
|
||||
errors << s.trim_space().limit(100) + '...'
|
||||
errors << s.trim_space().limit(100) + ' ...'
|
||||
continue
|
||||
}
|
||||
mod := json.decode(Mod, s) or {
|
||||
|
||||
@@ -17,13 +17,15 @@ mut:
|
||||
in_func bool // are we inside a new custom user function
|
||||
line string // the current line entered by the user
|
||||
//
|
||||
modules []string // all the import modules
|
||||
includes []string // all the #include statements
|
||||
functions []string // all the user function declarations
|
||||
functions_name []string // all the user function names
|
||||
lines []string // all the other lines/statements
|
||||
temp_lines []string // all the temporary expressions/printlns
|
||||
vstartup_lines []string // lines in the `VSTARTUP` file
|
||||
modules []string // all the import modules
|
||||
alias map[string]string // all the alias used in the import
|
||||
includes []string // all the #include statements
|
||||
functions []string // all the user function declarations
|
||||
functions_name []string // all the user function names
|
||||
lines []string // all the other lines/statements
|
||||
temp_lines []string // all the temporary expressions/printlns
|
||||
vstartup_lines []string // lines in the `VSTARTUP` file
|
||||
eval_func_lines []string // same line of the `VSTARTUP` file, but used to test fn type
|
||||
}
|
||||
|
||||
const is_stdin_a_pipe = (os.is_atty(0) == 0)
|
||||
@@ -32,14 +34,43 @@ const vexe = os.getenv('VEXE')
|
||||
|
||||
const vstartup = os.getenv('VSTARTUP')
|
||||
|
||||
enum FnType {
|
||||
@none
|
||||
void
|
||||
fn_type
|
||||
}
|
||||
|
||||
fn new_repl() Repl {
|
||||
return Repl{
|
||||
readline: readline.Readline{}
|
||||
readline: readline.Readline{
|
||||
skip_empty: true
|
||||
}
|
||||
modules: ['os', 'time', 'math']
|
||||
vstartup_lines: os.read_file(vstartup) or { '' }.trim_right('\n\r').split_into_lines()
|
||||
// Test file used to check if a function as a void return or a
|
||||
// value return.
|
||||
eval_func_lines: os.read_file(vstartup) or { '' }.trim_right('\n\r').split_into_lines()
|
||||
}
|
||||
}
|
||||
|
||||
fn endline_if_missed(line string) string {
|
||||
if line.ends_with('\n') {
|
||||
return line
|
||||
}
|
||||
return line + '\n'
|
||||
}
|
||||
|
||||
fn repl_help() {
|
||||
println(version.full_v_version(false))
|
||||
println('
|
||||
|help Displays this information.
|
||||
|list Show the program so far.
|
||||
|reset Clears the accumulated program, so you can start a fresh.
|
||||
|Ctrl-C, Ctrl-D, exit Exits the REPL.
|
||||
|clear Clears the screen.
|
||||
'.strip_margin())
|
||||
}
|
||||
|
||||
fn (mut r Repl) checks() bool {
|
||||
mut in_string := false
|
||||
was_indent := r.indent > 0
|
||||
@@ -67,21 +98,55 @@ fn (mut r Repl) checks() bool {
|
||||
return r.in_func || (was_indent && r.indent <= 0) || r.indent > 0
|
||||
}
|
||||
|
||||
fn (r &Repl) function_call(line string) bool {
|
||||
fn (r &Repl) function_call(line string) (bool, FnType) {
|
||||
for function in r.functions_name {
|
||||
is_function_definition := line.replace(' ', '').starts_with('$function:=')
|
||||
if line.starts_with(function) && !is_function_definition {
|
||||
return true
|
||||
// TODO(vincenzopalazzo) store the type of the function here
|
||||
fntype := r.check_fn_type_kind(line)
|
||||
return true, fntype
|
||||
}
|
||||
}
|
||||
return false
|
||||
|
||||
if line.contains(':=') {
|
||||
// an assignment to a variable:
|
||||
// `z := abc()`
|
||||
return false, FnType.@none
|
||||
}
|
||||
|
||||
// Check if it is a Vlib call
|
||||
// TODO(vincenzopalazzo): auto import the module?
|
||||
if r.is_function_call(line) {
|
||||
fntype := r.check_fn_type_kind(line)
|
||||
return true, fntype
|
||||
}
|
||||
return false, FnType.@none
|
||||
}
|
||||
|
||||
// TODO(vincenzopalazzo) Remove this fancy check and add a regex
|
||||
fn (r &Repl) is_function_call(line string) bool {
|
||||
return !line.starts_with('[') && line.contains('.') && line.contains('(')
|
||||
&& (line.ends_with(')') || line.ends_with('?'))
|
||||
}
|
||||
|
||||
// Convert the list of modules that we parsed already,
|
||||
// to a sequence of V source code lines
|
||||
fn (r &Repl) import_to_source_code() []string {
|
||||
mut imports_line := []string{}
|
||||
for mod in r.modules {
|
||||
mut import_str := 'import $mod'
|
||||
if mod in r.alias {
|
||||
import_str += ' as ${r.alias[mod]}'
|
||||
}
|
||||
imports_line << endline_if_missed(import_str)
|
||||
}
|
||||
return imports_line
|
||||
}
|
||||
|
||||
fn (r &Repl) current_source_code(should_add_temp_lines bool, not_add_print bool) string {
|
||||
mut all_lines := []string{}
|
||||
for mod in r.modules {
|
||||
all_lines << 'import $mod\n'
|
||||
}
|
||||
all_lines.insert(0, r.import_to_source_code())
|
||||
|
||||
if vstartup != '' {
|
||||
mut lines := []string{}
|
||||
if !not_add_print {
|
||||
@@ -101,21 +166,92 @@ fn (r &Repl) current_source_code(should_add_temp_lines bool, not_add_print bool)
|
||||
return all_lines.join('\n')
|
||||
}
|
||||
|
||||
fn repl_help() {
|
||||
println(version.full_v_version(false))
|
||||
println('
|
||||
|help Displays this information.
|
||||
|list Show the program so far.
|
||||
|reset Clears the accumulated program, so you can start a fresh.
|
||||
|Ctrl-C, Ctrl-D, exit Exits the REPL.
|
||||
|clear Clears the screen.
|
||||
'.strip_margin())
|
||||
// the new_line is probably a function call, but some function calls
|
||||
// do not return anything, while others return results.
|
||||
// This function checks which one we have:
|
||||
fn (r &Repl) check_fn_type_kind(new_line string) FnType {
|
||||
source_code := r.current_source_code(true, false) + '\nprintln($new_line)'
|
||||
check_file := os.join_path(os.temp_dir(), '${rand.ulid()}.vrepl.check.v')
|
||||
os.write_file(check_file, source_code) or { panic(err) }
|
||||
defer {
|
||||
os.rm(check_file) or {}
|
||||
}
|
||||
// -w suppresses the unused import warnings
|
||||
// -check just does syntax and checker analysis without generating/running code
|
||||
os_response := os.execute('${os.quoted_path(vexe)} -w -check ${os.quoted_path(check_file)}')
|
||||
str_response := convert_output(os_response)
|
||||
if os_response.exit_code != 0 && str_response.contains('can not print void expressions') {
|
||||
return FnType.void
|
||||
}
|
||||
return FnType.fn_type
|
||||
}
|
||||
|
||||
// parse the import statement in `line`, updating the Repl alias maps
|
||||
fn (mut r Repl) parse_import(line string) {
|
||||
if !line.contains('import') {
|
||||
eprintln("the line doesn't contain an `import` keyword")
|
||||
return
|
||||
}
|
||||
tokens := r.line.fields()
|
||||
// module name
|
||||
mod := tokens[1]
|
||||
if mod !in r.modules {
|
||||
r.modules << mod
|
||||
}
|
||||
// Check if the import contains an alias
|
||||
// import mod_name as alias_mod
|
||||
if line.contains('as ') && tokens.len >= 4 {
|
||||
alias := tokens[3]
|
||||
if mod !in r.alias {
|
||||
r.alias[mod] = alias
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn print_welcome_screen() {
|
||||
cmd_exit := term.highlight_command('exit')
|
||||
cmd_help := term.highlight_command('v help')
|
||||
file_main := term.highlight_command('main.v')
|
||||
cmd_run := term.highlight_command('v run main.v')
|
||||
vbar := term.bright_green('|')
|
||||
width, _ := term.get_terminal_size() // get the size of the terminal
|
||||
vlogo := [
|
||||
term.bright_blue(r' ____ ____ '),
|
||||
term.bright_blue(r' \ \ / / '),
|
||||
term.bright_blue(r' \ \/ / '),
|
||||
term.bright_blue(r' \ / '),
|
||||
term.bright_blue(r' \ / '),
|
||||
term.bright_blue(r' \__/ '),
|
||||
]
|
||||
help_text := [
|
||||
'Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_help).',
|
||||
'NB: the REPL is highly experimental. For best V experience, use a text editor, ',
|
||||
'save your code in a $file_main file and execute: $cmd_run',
|
||||
version.full_v_version(false),
|
||||
'Use Ctrl-C or ${term.highlight_command('exit')} to exit, or ${term.highlight_command('help')} to see other available commands',
|
||||
]
|
||||
if width >= 97 {
|
||||
eprintln('${vlogo[0]}')
|
||||
eprintln('${vlogo[1]} $vbar ${help_text[0]}')
|
||||
eprintln('${vlogo[2]} $vbar ${help_text[1]}')
|
||||
eprintln('${vlogo[3]} $vbar ${help_text[2]}')
|
||||
eprintln('${vlogo[4]} $vbar ${help_text[3]}')
|
||||
eprintln('${vlogo[5]} $vbar ${help_text[4]}')
|
||||
eprintln('')
|
||||
} else {
|
||||
if width >= 14 {
|
||||
left_margin := ' '.repeat(int(width / 2 - 7))
|
||||
for l in vlogo {
|
||||
println(left_margin + l)
|
||||
}
|
||||
}
|
||||
println(help_text.join('\n'))
|
||||
}
|
||||
}
|
||||
|
||||
fn run_repl(workdir string, vrepl_prefix string) {
|
||||
if !is_stdin_a_pipe {
|
||||
println(version.full_v_version(false))
|
||||
println('Use Ctrl-C or ${term.highlight_command('exit')} to exit, or ${term.highlight_command('help')} to see other available commands')
|
||||
print_welcome_screen()
|
||||
}
|
||||
|
||||
if vstartup != '' {
|
||||
@@ -127,7 +263,6 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
print('\n')
|
||||
print_output(result)
|
||||
}
|
||||
|
||||
file := os.join_path(workdir, '.${vrepl_prefix}vrepl.v')
|
||||
temp_file := os.join_path(workdir, '.${vrepl_prefix}vrepl_temp.v')
|
||||
mut prompt := '>>> '
|
||||
@@ -196,9 +331,7 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
}
|
||||
if r.line == 'list' {
|
||||
source_code := r.current_source_code(true, true)
|
||||
println('//////////////////////////////////////////////////////////////////////////////////////')
|
||||
println(source_code)
|
||||
println('//////////////////////////////////////////////////////////////////////////////////////')
|
||||
println('\n${source_code.replace('\n\n', '\n')}')
|
||||
continue
|
||||
}
|
||||
// Save the source only if the user is printing something,
|
||||
@@ -215,7 +348,7 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
} else {
|
||||
mut temp_line := r.line
|
||||
mut temp_flag := false
|
||||
func_call := r.function_call(r.line)
|
||||
func_call, fntype := r.function_call(r.line)
|
||||
filter_line := r.line.replace(r.line.find_between("'", "'"), '').replace(r.line.find_between('"',
|
||||
'"'), '')
|
||||
possible_statement_patterns := [
|
||||
@@ -241,6 +374,7 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
'sort',
|
||||
'clear',
|
||||
'trim',
|
||||
'as',
|
||||
]
|
||||
mut is_statement := false
|
||||
if filter_line.count('=') % 2 == 1 {
|
||||
@@ -257,7 +391,7 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
if oline.starts_with(' ') {
|
||||
is_statement = true
|
||||
}
|
||||
if !is_statement && !func_call && r.line != '' {
|
||||
if !is_statement && (!func_call || fntype == FnType.fn_type) && r.line != '' {
|
||||
temp_line = 'println($r.line)'
|
||||
temp_flag = true
|
||||
}
|
||||
@@ -288,10 +422,7 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
r.temp_lines.delete(0)
|
||||
}
|
||||
if r.line.starts_with('import ') {
|
||||
mod := r.line.fields()[1]
|
||||
if mod !in r.modules {
|
||||
r.modules << mod
|
||||
}
|
||||
r.parse_import(r.line)
|
||||
} else if r.line.starts_with('#include ') {
|
||||
r.includes << r.line
|
||||
} else {
|
||||
@@ -307,26 +438,33 @@ fn run_repl(workdir string, vrepl_prefix string) {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_output(s os.Result) {
|
||||
lines := s.output.trim_right('\n\r').split_into_lines()
|
||||
fn convert_output(os_result os.Result) string {
|
||||
lines := os_result.output.trim_right('\n\r').split_into_lines()
|
||||
mut content := ''
|
||||
for line in lines {
|
||||
if line.contains('.vrepl_temp.v:') {
|
||||
// Hide the temporary file name
|
||||
sline := line.all_after('.vrepl_temp.v:')
|
||||
idx := sline.index(' ') or {
|
||||
println(sline)
|
||||
return
|
||||
content += endline_if_missed(sline)
|
||||
return content
|
||||
}
|
||||
println(sline[idx + 1..])
|
||||
content += endline_if_missed(sline[idx + 1..])
|
||||
} else if line.contains('.vrepl.v:') {
|
||||
// Ensure that .vrepl.v: is at the start, ignore the path
|
||||
// This is needed to have stable .repl tests.
|
||||
idx := line.index('.vrepl.v:') or { return }
|
||||
println(line[idx..])
|
||||
idx := line.index('.vrepl.v:') or { panic(err) }
|
||||
content += endline_if_missed(line[idx..])
|
||||
} else {
|
||||
println(line)
|
||||
content += endline_if_missed(line)
|
||||
}
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
fn print_output(os_result os.Result) {
|
||||
content := convert_output(os_result)
|
||||
print(content)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@@ -381,7 +519,7 @@ fn repl_run_vfile(file string) ?os.Result {
|
||||
$if trace_repl_temp_files ? {
|
||||
eprintln('>> repl_run_vfile file: $file')
|
||||
}
|
||||
s := os.execute('"$vexe" -repl run "$file"')
|
||||
s := os.execute('${os.quoted_path(vexe)} -repl run ${os.quoted_path(file)}')
|
||||
if s.exit_code < 0 {
|
||||
rerror(s.output)
|
||||
return error(s.output)
|
||||
|
||||
@@ -8,16 +8,20 @@ import v.util.recompilation
|
||||
const is_debug = os.args.contains('-debug')
|
||||
|
||||
fn main() {
|
||||
// support a renamed `v` executable too:
|
||||
vexe := pref.vexe_path()
|
||||
vroot := os.dir(vexe)
|
||||
recompilation.must_be_enabled(vroot, 'Please install V from source, to use `v self` .')
|
||||
vexe_name := os.file_name(vexe)
|
||||
short_v_name := vexe_name.all_before('.')
|
||||
//
|
||||
recompilation.must_be_enabled(vroot, 'Please install V from source, to use `$vexe_name self` .')
|
||||
os.chdir(vroot) ?
|
||||
os.setenv('VCOLORS', 'always', true)
|
||||
args := os.args[1..].filter(it != 'self')
|
||||
jargs := args.join(' ')
|
||||
obinary := cmdline.option(args, '-o', '')
|
||||
sargs := if obinary != '' { jargs } else { '$jargs -o v2' }
|
||||
cmd := '$vexe $sargs cmd/v'
|
||||
cmd := '${os.quoted_path(vexe)} $sargs ${os.quoted_path('cmd/v')}'
|
||||
options := if args.len > 0 { '($sargs)' } else { '' }
|
||||
println('V self compiling ${options}...')
|
||||
compile(vroot, cmd)
|
||||
@@ -26,8 +30,8 @@ fn main() {
|
||||
// The user just wants an independent copy of v, and so we are done.
|
||||
return
|
||||
}
|
||||
backup_old_version_and_rename_newer() or { panic(err.msg) }
|
||||
println('V built successfully!')
|
||||
backup_old_version_and_rename_newer(short_v_name) or { panic(err.msg) }
|
||||
println('V built successfully as executable "$vexe_name".')
|
||||
}
|
||||
|
||||
fn compile(vroot string, cmd string) {
|
||||
@@ -41,7 +45,7 @@ fn compile(vroot string, cmd string) {
|
||||
}
|
||||
}
|
||||
|
||||
fn list_folder(bmessage string, message string) {
|
||||
fn list_folder(short_v_name string, bmessage string, message string) {
|
||||
if !is_debug {
|
||||
return
|
||||
}
|
||||
@@ -49,37 +53,37 @@ fn list_folder(bmessage string, message string) {
|
||||
println(bmessage)
|
||||
}
|
||||
if os.user_os() == 'windows' {
|
||||
os.system('dir v*.exe')
|
||||
os.system('dir $short_v_name*.exe')
|
||||
} else {
|
||||
os.system('ls -lartd v*')
|
||||
os.system('ls -lartd $short_v_name*')
|
||||
}
|
||||
println(message)
|
||||
}
|
||||
|
||||
fn backup_old_version_and_rename_newer() ?bool {
|
||||
fn backup_old_version_and_rename_newer(short_v_name string) ?bool {
|
||||
mut errors := []string{}
|
||||
short_v_file := if os.user_os() == 'windows' { 'v.exe' } else { 'v' }
|
||||
short_v_file := if os.user_os() == 'windows' { '${short_v_name}.exe' } else { '$short_v_name' }
|
||||
short_v2_file := if os.user_os() == 'windows' { 'v2.exe' } else { 'v2' }
|
||||
short_bak_file := if os.user_os() == 'windows' { 'v_old.exe' } else { 'v_old' }
|
||||
v_file := os.real_path(short_v_file)
|
||||
v2_file := os.real_path(short_v2_file)
|
||||
bak_file := os.real_path(short_bak_file)
|
||||
|
||||
list_folder('before:', 'removing $bak_file ...')
|
||||
list_folder(short_v_name, 'before:', 'removing $bak_file ...')
|
||||
if os.exists(bak_file) {
|
||||
os.rm(bak_file) or { errors << 'failed removing $bak_file: $err.msg' }
|
||||
}
|
||||
|
||||
list_folder('', 'moving $v_file to $bak_file ...')
|
||||
list_folder(short_v_name, '', 'moving $v_file to $bak_file ...')
|
||||
os.mv(v_file, bak_file) or { errors << err.msg }
|
||||
|
||||
list_folder('', 'removing $v_file ...')
|
||||
list_folder(short_v_name, '', 'removing $v_file ...')
|
||||
os.rm(v_file) or {}
|
||||
|
||||
list_folder('', 'moving $v2_file to $v_file ...')
|
||||
list_folder(short_v_name, '', 'moving $v2_file to $v_file ...')
|
||||
os.mv_by_cp(v2_file, v_file) or { panic(err.msg) }
|
||||
|
||||
list_folder('after:', '')
|
||||
list_folder(short_v_name, 'after:', '')
|
||||
|
||||
if errors.len > 0 {
|
||||
eprintln('backup errors:\n >> ' + errors.join('\n >> '))
|
||||
|
||||
@@ -18,9 +18,10 @@ import flag
|
||||
import net.http
|
||||
|
||||
const (
|
||||
tool_name = os.file_name(os.executable())
|
||||
shdc_full_hash = '33d2e4cc26088c6c28eaef5467990f8940d15aab'
|
||||
tool_version = '0.0.1'
|
||||
tool_description = "Compile shaders in sokol's annotated GLSL format to C headers for use with sokol based apps"
|
||||
tool_name = os.file_name(os.executable())
|
||||
cache_dir = os.join_path(os.cache_dir(), 'v', tool_name)
|
||||
runtime_os = os.user_os()
|
||||
)
|
||||
@@ -50,11 +51,11 @@ const (
|
||||
'wgpu',
|
||||
]
|
||||
|
||||
shdc_version = '33d2e4cc'
|
||||
shdc_version = shdc_full_hash[0..8]
|
||||
shdc_urls = {
|
||||
'windows': 'https://github.com/floooh/sokol-tools-bin/raw/33d2e4cc26088c6c28eaef5467990f8940d15aab/bin/win32/sokol-shdc.exe'
|
||||
'macos': 'https://github.com/floooh/sokol-tools-bin/raw/33d2e4cc26088c6c28eaef5467990f8940d15aab/bin/osx/sokol-shdc'
|
||||
'linux': 'https://github.com/floooh/sokol-tools-bin/raw/33d2e4cc26088c6c28eaef5467990f8940d15aab/bin/linux/sokol-shdc'
|
||||
'windows': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/win32/sokol-shdc.exe'
|
||||
'macos': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/osx/sokol-shdc'
|
||||
'linux': 'https://github.com/floooh/sokol-tools-bin/raw/$shdc_full_hash/bin/linux/sokol-shdc'
|
||||
}
|
||||
shdc_version_file = os.join_path(cache_dir, 'sokol-shdc.version')
|
||||
shdc = shdc_exe()
|
||||
@@ -192,7 +193,9 @@ fn compile_shader(opt CompileOptions, shader_file string) ? {
|
||||
eprintln('$tool_name generating shader code for $slangs in header "$header_name" in "$path" from $shader_file')
|
||||
}
|
||||
|
||||
cmd := '$shdc --input "$shader_file" --output "$out_file" --slang "' + slangs.join(':') + '"'
|
||||
cmd :=
|
||||
'${os.quoted_path(shdc)} --input ${os.quoted_path(shader_file)} --output ${os.quoted_path(out_file)} --slang ' +
|
||||
os.quoted_path(slangs.join(':'))
|
||||
if opt.verbose {
|
||||
eprintln('$tool_name executing:\n$cmd')
|
||||
}
|
||||
|
||||
@@ -8,13 +8,26 @@ $if windows {
|
||||
#flag -luser32
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
C.atexit(cleanup_vtmp_folder)
|
||||
|
||||
if os.args.len > 3 {
|
||||
print('usage: v symlink [OPTIONS]')
|
||||
exit(1)
|
||||
}
|
||||
|
||||
ci_mode := '-githubci' in os.args
|
||||
|
||||
vexe := os.real_path(pref.vexe_path())
|
||||
$if windows {
|
||||
setup_symlink_windows(vexe)
|
||||
} $else {
|
||||
setup_symlink_unix(vexe)
|
||||
if ci_mode {
|
||||
setup_symlink_github()
|
||||
} else {
|
||||
$if windows {
|
||||
setup_symlink_windows(vexe)
|
||||
} $else {
|
||||
setup_symlink_unix(vexe)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +35,20 @@ fn cleanup_vtmp_folder() {
|
||||
os.rmdir_all(util.get_vtmp_folder()) or {}
|
||||
}
|
||||
|
||||
fn setup_symlink_github() {
|
||||
// We append V's install location (which should
|
||||
// be the current directory) to the PATH environment variable.
|
||||
|
||||
// Resources:
|
||||
// 1. https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files
|
||||
// 2. https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-environment-variable
|
||||
mut content := os.read_file(os.getenv('GITHUB_PATH')) or {
|
||||
panic('Failed to read GITHUB_PATH.')
|
||||
}
|
||||
content += '\n$os.getwd()\n'
|
||||
os.write_file(os.getenv('GITHUB_PATH'), content) or { panic('Failed to write to GITHUB_PATH.') }
|
||||
}
|
||||
|
||||
fn setup_symlink_unix(vexe string) {
|
||||
mut link_path := '/data/data/com.termux/files/usr/bin/v'
|
||||
if !os.is_dir('/data/data/com.termux/files') {
|
||||
|
||||
@@ -4,9 +4,11 @@ import os
|
||||
import term
|
||||
import time
|
||||
|
||||
const vexe = os.getenv('VEXE')
|
||||
const vexe_path = os.getenv('VEXE')
|
||||
|
||||
const vroot = os.dir(vexe)
|
||||
const vroot = os.dir(vexe_path)
|
||||
|
||||
const vexe = os.quoted_path(vexe_path)
|
||||
|
||||
const args_string = os.args[1..].join(' ')
|
||||
|
||||
@@ -208,8 +210,9 @@ fn get_all_commands() []Command {
|
||||
rmfile: 'examples/tetris/tetris'
|
||||
}
|
||||
$if macos || linux {
|
||||
ipath := '$vroot/thirdparty/stdatomic/nix'
|
||||
res << Command{
|
||||
line: '$vexe -o v.c cmd/v && cc -Werror -I "$vroot/thirdparty/stdatomic/nix" v.c -lpthread -lm && rm -rf a.out'
|
||||
line: '$vexe -o v.c cmd/v && cc -Werror -I ${os.quoted_path(ipath)} v.c -lpthread -lm && rm -rf a.out'
|
||||
label: 'v.c should be buildable with no warnings...'
|
||||
okmsg: 'v.c can be compiled without warnings. This is good :)'
|
||||
rmfile: 'v.c'
|
||||
|
||||
@@ -25,6 +25,7 @@ const (
|
||||
'examples/sokol/04_multi_shader_glsl/rt_glsl.v',
|
||||
'examples/sokol/05_instancing_glsl/rt_glsl.v',
|
||||
'examples/sokol/06_obj_viewer/show_obj.v',
|
||||
'vlib/v/checker/tests/modules/deprecated_module/main.v' /* adds deprecated_module. module prefix to imports, even though the folder has v.mod */,
|
||||
'vlib/gg/m4/graphic.v',
|
||||
'vlib/gg/m4/m4_test.v',
|
||||
'vlib/gg/m4/matrix.v',
|
||||
@@ -75,9 +76,14 @@ fn tsession(vargs string, tool_source string, tool_cmd string, tool_args string,
|
||||
|
||||
fn v_test_vetting(vargs string) {
|
||||
expanded_vet_list := util.find_all_v_files(vet_folders) or { return }
|
||||
vet_session := tsession(vargs, 'vvet', 'v vet', 'vet', expanded_vet_list, vet_known_failing_exceptions)
|
||||
vet_session := tsession(vargs, 'vvet', '${os.quoted_path(vexe)} vet', 'vet', expanded_vet_list,
|
||||
vet_known_failing_exceptions)
|
||||
//
|
||||
fmt_cmd, fmt_args := if is_fix { 'v fmt -w', 'fmt -w' } else { 'v fmt -verify', 'fmt -verify' }
|
||||
fmt_cmd, fmt_args := if is_fix {
|
||||
'${os.quoted_path(vexe)} fmt -w', 'fmt -w'
|
||||
} else {
|
||||
'${os.quoted_path(vexe)} fmt -verify', 'fmt -verify'
|
||||
}
|
||||
vfmt_list := util.find_all_v_files(vfmt_verify_list) or { return }
|
||||
exceptions := util.find_all_v_files(vfmt_known_failing_exceptions) or { return }
|
||||
verify_session := tsession(vargs, 'vfmt.v', fmt_cmd, fmt_args, vfmt_list, exceptions)
|
||||
|
||||
@@ -212,7 +212,7 @@ fn (mut context Context) process_whole_file_in_worker(path string) (int, int) {
|
||||
for i in 0 .. len {
|
||||
verbosity := if context.is_verbose { '-v' } else { '' }
|
||||
context.cut_index = i // needed for the progress bar
|
||||
cmd := '"$context.myself" $verbosity --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path "$path" '
|
||||
cmd := '${os.quoted_path(context.myself)} $verbosity --worker --timeout_ms ${context.timeout_ms:5} --cut_index ${i:5} --path ${os.quoted_path(path)} '
|
||||
context.log(cmd)
|
||||
mut res := os.execute(cmd)
|
||||
context.log('worker exit_code: $res.exit_code | worker output:\n$res.output')
|
||||
|
||||
@@ -45,6 +45,7 @@ const (
|
||||
'vlib/sqlite/sqlite_orm_test.v',
|
||||
'vlib/v/tests/orm_sub_struct_test.v',
|
||||
'vlib/v/tests/orm_sub_array_struct_test.v',
|
||||
'vlib/v/tests/sql_statement_inside_fn_call_test.v',
|
||||
'vlib/vweb/tests/vweb_test.v',
|
||||
'vlib/vweb/request_test.v',
|
||||
'vlib/net/http/request_test.v',
|
||||
@@ -83,6 +84,7 @@ const (
|
||||
'vlib/orm/orm_test.v',
|
||||
'vlib/v/tests/orm_sub_struct_test.v',
|
||||
'vlib/v/tests/orm_sub_array_struct_test.v',
|
||||
'vlib/v/tests/sql_statement_inside_fn_call_test.v',
|
||||
'vlib/clipboard/clipboard_test.v',
|
||||
'vlib/vweb/tests/vweb_test.v',
|
||||
'vlib/vweb/request_test.v',
|
||||
|
||||
@@ -13,5 +13,5 @@ fn main() {
|
||||
args_str := args.join(' ')
|
||||
options := if args.len > 0 { '($args_str)' } else { '' }
|
||||
println('Compiling a `tracev` executable ${options}...')
|
||||
os.system('"$vexe" -cg -d trace_parser -d trace_checker -d trace_gen -o tracev $args_str cmd/v')
|
||||
os.system('${os.quoted_path(vexe)} -cg -d trace_parser -d trace_checker -d trace_gen -o tracev $args_str cmd/v')
|
||||
}
|
||||
|
||||
@@ -69,7 +69,7 @@ fn (app App) update_from_master() {
|
||||
fn (app App) recompile_v() {
|
||||
// NB: app.vexe is more reliable than just v (which may be a symlink)
|
||||
opts := if app.is_prod { '-prod' } else { '' }
|
||||
vself := '"$app.vexe" $opts self'
|
||||
vself := '${os.quoted_path(app.vexe)} $opts self'
|
||||
app.vprintln('> recompiling v itself with `$vself` ...')
|
||||
self_result := os.execute(vself)
|
||||
if self_result.exit_code == 0 {
|
||||
@@ -83,7 +83,7 @@ fn (app App) recompile_v() {
|
||||
}
|
||||
|
||||
fn (app App) recompile_vup() {
|
||||
vup_result := os.execute('"$app.vexe" -g cmd/tools/vup.v')
|
||||
vup_result := os.execute('${os.quoted_path(app.vexe)} -g cmd/tools/vup.v')
|
||||
if vup_result.exit_code != 0 {
|
||||
eprintln('recompiling vup.v failed:')
|
||||
eprintln(vup_result.output)
|
||||
@@ -106,7 +106,7 @@ fn (app App) make(vself string) {
|
||||
}
|
||||
|
||||
fn (app App) show_current_v_version() {
|
||||
vout := os.execute('"$app.vexe" version')
|
||||
vout := os.execute('${os.quoted_path(app.vexe)} version')
|
||||
if vout.exit_code >= 0 {
|
||||
mut vversion := vout.output.trim_space()
|
||||
if vout.exit_code == 0 {
|
||||
@@ -153,7 +153,7 @@ fn (app App) get_git() {
|
||||
eprintln('Unable to install git automatically: please install git manually')
|
||||
panic(res_download.output)
|
||||
}
|
||||
res_git32 := os.execute('$os.getwd()/git32.exe')
|
||||
res_git32 := os.execute(os.quoted_path(os.join_path_single(os.getwd(), 'git32.exe')))
|
||||
if res_git32.exit_code != 0 {
|
||||
eprintln('Unable to install git automatically: please install git manually')
|
||||
panic(res_git32.output)
|
||||
|
||||
@@ -35,7 +35,7 @@ fn check_path(vexe string, dir string, tests []string) int {
|
||||
program := path
|
||||
print(path + ' ')
|
||||
// -force is needed so that `v vet` would not skip the regression files
|
||||
res := os.execute('$vexe vet -force -nocolor $program')
|
||||
res := os.execute('${os.quoted_path(vexe)} vet -force -nocolor ${os.quoted_path(program)}')
|
||||
if res.exit_code < 0 {
|
||||
panic(res.output)
|
||||
}
|
||||
|
||||
@@ -222,7 +222,7 @@ fn (vt &Vet) e2string(err vet.Error) string {
|
||||
}
|
||||
|
||||
fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
|
||||
pos := token.Position{
|
||||
pos := token.Pos{
|
||||
line_nr: line + 1
|
||||
}
|
||||
vt.errors << vet.Error{
|
||||
@@ -236,7 +236,7 @@ fn (mut vt Vet) error(msg string, line int, fix vet.FixKind) {
|
||||
}
|
||||
|
||||
fn (mut vt Vet) warn(msg string, line int, fix vet.FixKind) {
|
||||
pos := token.Position{
|
||||
pos := token.Pos{
|
||||
line_nr: line + 1
|
||||
}
|
||||
mut w := vet.Error{
|
||||
|
||||
@@ -190,7 +190,11 @@ see also `v help build`.
|
||||
this loop is very short usually.
|
||||
|
||||
-compress
|
||||
Strip the compiled executable to compress it.
|
||||
Compress the compiled executable with UPX.
|
||||
Note: `upx` should be installed beforehand.
|
||||
In most Linux distros it is in a package named `upx-ucl`.
|
||||
On macOS, you can install it with `brew install upx`.
|
||||
On Windows, you can download it from https://upx.github.io/ .
|
||||
|
||||
-live
|
||||
Build the executable with live capabilities (`[live]`).
|
||||
|
||||
@@ -77,15 +77,43 @@ NB: the build flags are shared with the run command too:
|
||||
to fix them first.
|
||||
|
||||
-prof, -profile <file.txt>
|
||||
Compile the executable with all functions profiled.
|
||||
Compile the executable with all functions profiled.
|
||||
The profile results will be stored in `file.txt`.
|
||||
The format is 4 fields, separated by a space, for each v function:
|
||||
a) how many times it was called
|
||||
b) how much *nanoseconds in total* it took
|
||||
c) an average for each function (i.e. (b) / (a) )
|
||||
d) the function name
|
||||
|
||||
NB: you can also combine this command with `run` command.
|
||||
For example - `v -prof prof.txt run main.v`
|
||||
|
||||
NB: the profiler is *NOT* currently thread safe, so look at the profile results of
|
||||
multithreaded programs very sceptically !
|
||||
|
||||
NB: if you want to output the profile info to stdout, use `-profile -`.
|
||||
|
||||
NB: you can use `import v.profile`, and then calls to `profile.on(false)`
|
||||
and `profile.on(true)` to temporarily turn it off and on again.
|
||||
|
||||
NB: if you do NOT want the profile to contain information from before your
|
||||
program's `fn main()` starts, pass `-d no_profile_startup` too.
|
||||
(V constants, and module init() functions are evaluated before `main()` is called)
|
||||
|
||||
NB: You can also select specific functions for profiling. For example:
|
||||
v -profile-fns println,i64_str -profile - run examples/hanoi.v
|
||||
In this case, the profile counters will be updated only for them, *and* for the functions that they call.
|
||||
The profile result (after the program finishes), will look similar to this:
|
||||
127 0.721ms 5680ns println
|
||||
127 0.693ms 5456ns _writeln_to_fd
|
||||
127 0.565ms 4449ns _write_buf_to_fd
|
||||
127 0.045ms 353ns _v_malloc
|
||||
127 0.017ms 131ns malloc_noscan
|
||||
127 0.017ms 133ns _v_free
|
||||
127 0.014ms 113ns vmemmove
|
||||
127 0.110ms 866ns i64_str
|
||||
127 0.016ms 127ns tos
|
||||
|
||||
-message-limit <limit>
|
||||
The maximum amount of warnings / errors / notices, that will be accumulated (defaults to 100).
|
||||
The checker will abort prematurely once this limit has been reached.
|
||||
|
||||
@@ -13,6 +13,7 @@ Flags:
|
||||
NB: There are several special keywords, which you can put after the code fences for v.
|
||||
These are:
|
||||
compile - Default, can be omitted. The example will be compiled and formatting is verified.
|
||||
cgen - The example produces C code, which may not be compilable (when external libs are not installed). Formatting is verified.
|
||||
live - Compile hot reload examples with the ´-live´ flag set and verify formatting.
|
||||
ignore - Ignore the example, useful for examples that just use the syntax highlighting
|
||||
failcompile - Known failing compilation. Useful for examples demonstrating compiler errors.
|
||||
|
||||
28
cmd/v/help/complete.txt
Normal file
28
cmd/v/help/complete.txt
Normal file
@@ -0,0 +1,28 @@
|
||||
Usage:
|
||||
v complete [options] [SUBCMD] QUERY...
|
||||
|
||||
Description:
|
||||
Tool for bridging auto completion between various shells and v
|
||||
|
||||
Supported shells:
|
||||
bash, fish, zsh, powershell
|
||||
|
||||
Examples:
|
||||
Echo auto-detected shell install script to STDOUT
|
||||
v complete
|
||||
Echo specific shell install script to STDOUT
|
||||
v complete setup bash
|
||||
Auto complete input `v tes`*USER PUSHES TAB* (in Bash compatible format).
|
||||
This is not meant for manual invocation - it's called by the relevant
|
||||
shell via the script installed with `v complete` or `v complete setup SHELL`.
|
||||
v complete bash v tes
|
||||
|
||||
Options:
|
||||
-h, --help Show this help text.
|
||||
|
||||
SUBCMD:
|
||||
setup : setup [SHELL] - returns the code for completion setup for SHELL
|
||||
bash : [QUERY] - returns Bash compatible completion code with completions computed from QUERY
|
||||
fish : [QUERY] - returns Fish compatible completion code with completions computed from QUERY
|
||||
zsh : [QUERY] - returns ZSH compatible completion code with completions computed from QUERY
|
||||
powershell: [QUERY] - returns PowerShell compatible completion code with completions computed from QUERY
|
||||
@@ -1,22 +1,21 @@
|
||||
import os
|
||||
|
||||
const vexe = os.getenv('VEXE')
|
||||
|
||||
fn test_help() {
|
||||
vexe := os.getenv('VEXE')
|
||||
res := os.execute('"$vexe" help')
|
||||
res := os.execute('${os.quoted_path(vexe)} help')
|
||||
assert res.exit_code == 0
|
||||
assert res.output.starts_with('V is a tool for managing V source code.')
|
||||
}
|
||||
|
||||
fn test_help_as_short_option() {
|
||||
vexe := os.getenv('VEXE')
|
||||
res := os.execute('"$vexe" -h')
|
||||
res := os.execute('${os.quoted_path(vexe)} -h')
|
||||
assert res.exit_code == 0
|
||||
assert res.output.starts_with('V is a tool for managing V source code.')
|
||||
}
|
||||
|
||||
fn test_help_as_long_option() {
|
||||
vexe := os.getenv('VEXE')
|
||||
res := os.execute('"$vexe" --help')
|
||||
res := os.execute('${os.quoted_path(vexe)} --help')
|
||||
assert res.exit_code == 0
|
||||
assert res.output.starts_with('V is a tool for managing V source code.')
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
Usage: v symlink
|
||||
Usage: v symlink [OPTIONS]
|
||||
|
||||
This command adds a symlink for the V compiler executable.
|
||||
|
||||
Note that on Unix systems this command requires write permissions to /usr/local/bin to work.
|
||||
|
||||
For GitHub Actions, the option -githubci needs to be specified.
|
||||
12
cmd/v/v.v
12
cmd/v/v.v
@@ -34,7 +34,7 @@ const (
|
||||
'shader',
|
||||
'symlink',
|
||||
'test',
|
||||
'test-all', /* runs most of the tests and other checking tools, that will be run by the CI */
|
||||
'test-all', // runs most of the tests and other checking tools, that will be run by the CI
|
||||
'test-cleancode',
|
||||
'test-fmt',
|
||||
'test-parser',
|
||||
@@ -65,15 +65,7 @@ fn main() {
|
||||
if args.len == 0 || args[0] in ['-', 'repl'] {
|
||||
if args.len == 0 {
|
||||
// Running `./v` without args launches repl
|
||||
if os.is_atty(0) != 0 {
|
||||
cmd_exit := term.highlight_command('exit')
|
||||
cmd_help := term.highlight_command('v help')
|
||||
file_main := term.highlight_command('main.v')
|
||||
cmd_run := term.highlight_command('v run main.v')
|
||||
println('Welcome to the V REPL (for help with V itself, type $cmd_exit, then run $cmd_help).')
|
||||
eprintln(' NB: the REPL is highly experimental. For best V experience, use a text editor,')
|
||||
eprintln(' save your code in a $file_main file and execute: $cmd_run')
|
||||
} else {
|
||||
if os.is_atty(0) == 0 {
|
||||
mut args_and_flags := util.join_env_vflags_and_os_args()[1..].clone()
|
||||
args_and_flags << ['run', '-']
|
||||
pref.parse_args_and_show_errors(external_tools, args_and_flags, true)
|
||||
|
||||
512
doc/docs.md
512
doc/docs.md
@@ -113,12 +113,15 @@ For more details and troubleshooting, please visit the [vab GitHub repository](h
|
||||
* [Sum types](#sum-types)
|
||||
* [Type aliases](#type-aliases)
|
||||
* [Option/Result types & error handling](#optionresult-types-and-error-handling)
|
||||
* [Custom error types](#custom-error-types)
|
||||
* [Generics](#generics)
|
||||
* [Concurrency](#concurrency)
|
||||
* [Spawning Concurrent Tasks](#spawning-concurrent-tasks)
|
||||
* [Channels](#channels)
|
||||
* [Shared Objects](#shared-objects)
|
||||
* [Decoding JSON](#decoding-json)
|
||||
* [JSON](#json)
|
||||
* [Decoding JSON](#decoding-json)
|
||||
* [Encoding JSON](#encoding-json)
|
||||
* [Testing](#testing)
|
||||
* [Memory management](#memory-management)
|
||||
* [Stack and Heap](#stack-and-heap)
|
||||
@@ -163,7 +166,7 @@ For more details and troubleshooting, please visit the [vab GitHub repository](h
|
||||
|
||||
<!--
|
||||
NB: there are several special keywords, which you can put after the code fences for v:
|
||||
compile, live, ignore, failcompile, oksyntax, badsyntax, wip, nofmt
|
||||
compile, cgen, live, ignore, failcompile, oksyntax, badsyntax, wip, nofmt
|
||||
For more details, do: `v check-md`
|
||||
-->
|
||||
|
||||
@@ -476,16 +479,33 @@ d := b + x // d is of type `f64` - automatic promotion of `x`'s value
|
||||
|
||||
### Strings
|
||||
|
||||
```v
|
||||
```v nofmt
|
||||
name := 'Bob'
|
||||
println(name.len)
|
||||
println(name[0]) // indexing gives a byte B
|
||||
println(name[1..3]) // slicing gives a string 'ob'
|
||||
windows_newline := '\r\n' // escape special characters like in C
|
||||
assert name.len == 3 // will print 3
|
||||
assert name[0] == byte(66) // indexing gives a byte, byte(66) == `B`
|
||||
assert name[1..3] == 'ob' // slicing gives a string 'ob'
|
||||
|
||||
// escape codes
|
||||
windows_newline := '\r\n' // escape special characters like in C
|
||||
assert windows_newline.len == 2
|
||||
|
||||
// arbitrary bytes can be directly specified using `\x##` notation where `#` is
|
||||
// a hex digit aardvark_str := '\x61ardvark' assert aardvark_str == 'aardvark'
|
||||
assert '\xc0'[0] == byte(0xc0)
|
||||
|
||||
// or using octal escape `\###` notation where `#` is an octal digit
|
||||
aardvark_str2 := '\141ardvark'
|
||||
assert aardvark_str2 == 'aardvark'
|
||||
|
||||
// Unicode can be specified directly as `\u####` where # is a hex digit
|
||||
// and will be converted internally to its UTF-8 representation
|
||||
star_str := '\u2605' // ★
|
||||
assert star_str == '★'
|
||||
assert star_str == '\xe2\x98\x85' // UTF-8 can be specified this way too.
|
||||
```
|
||||
|
||||
In V, a string is a read-only array of bytes. String data is encoded using UTF-8:
|
||||
In V, a string is a read-only array of bytes. All Unicode characters are encoded using UTF-8:
|
||||
|
||||
```v
|
||||
s := 'hello 🌎' // emoji takes 4 bytes
|
||||
assert s.len == 10
|
||||
@@ -503,11 +523,12 @@ String values are immutable. You cannot mutate elements:
|
||||
mut s := 'hello 🌎'
|
||||
s[0] = `H` // not allowed
|
||||
```
|
||||
|
||||
> error: cannot assign to `s[i]` since V strings are immutable
|
||||
|
||||
Note that indexing a string will produce a `byte`, not a `rune` nor another `string`.
|
||||
Indexes correspond to bytes in the string, not Unicode code points. If you want to
|
||||
convert the `byte` to a `string`, use the `ascii_str()` method:
|
||||
Note that indexing a string will produce a `byte`, not a `rune` nor another `string`. Indexes
|
||||
correspond to _bytes_ in the string, not Unicode code points. If you want to convert the `byte` to a
|
||||
`string`, use the `.ascii_str()` method on the `byte`:
|
||||
|
||||
```v
|
||||
country := 'Netherlands'
|
||||
@@ -515,20 +536,13 @@ println(country[0]) // Output: 78
|
||||
println(country[0].ascii_str()) // Output: N
|
||||
```
|
||||
|
||||
Character literals have type `rune`. To denote them, use `
|
||||
Both single and double quotes can be used to denote strings. For consistency, `vfmt` converts double
|
||||
quotes to single quotes unless the string contains a single quote character.
|
||||
|
||||
For raw strings, prepend `r`. Escape handling is not done for raw strings:
|
||||
|
||||
```v
|
||||
rocket := `🚀`
|
||||
assert 'aloha!'[0] == `a`
|
||||
```
|
||||
|
||||
Both single and double quotes can be used to denote strings. For consistency,
|
||||
`vfmt` converts double quotes to single quotes unless the string contains a single quote character.
|
||||
|
||||
For raw strings, prepend `r`. Raw strings are not escaped:
|
||||
|
||||
```v
|
||||
s := r'hello\nworld'
|
||||
s := r'hello\nworld' // the `\n` will be preserved as two characters
|
||||
println(s) // "hello\nworld"
|
||||
```
|
||||
|
||||
@@ -537,41 +551,79 @@ Strings can be easily converted to integers:
|
||||
```v
|
||||
s := '42'
|
||||
n := s.int() // 42
|
||||
|
||||
// all int literals are supported
|
||||
assert '0xc3'.int() == 195
|
||||
assert '0o10'.int() == 8
|
||||
assert '0b1111_0000_1010'.int() == 3850
|
||||
assert '-0b1111_0000_1010'.int() == -3850
|
||||
```
|
||||
|
||||
### Runes
|
||||
A `rune` represents a unicode character and is an alias for `u32`. Runes can be created like this:
|
||||
```v
|
||||
x := `🚀`
|
||||
```
|
||||
|
||||
A string can be converted to runes by the `.runes()` method.
|
||||
```v
|
||||
hello := 'Hello World 👋'
|
||||
hello_runes := hello.runes() // [`H`, `e`, `l`, `l`, `o`, ` `, `W`, `o`, `r`, `l`, `d`, ` `, `👋`]
|
||||
```
|
||||
For more advanced `string` processing and conversions, refer to the
|
||||
[vlib/strconv](https://modules.vlang.io/strconv.html) module.
|
||||
|
||||
### String interpolation
|
||||
|
||||
Basic interpolation syntax is pretty simple - use `$` before a variable name.
|
||||
The variable will be converted to a string and embedded into the literal:
|
||||
Basic interpolation syntax is pretty simple - use `$` before a variable name. The variable will be
|
||||
converted to a string and embedded into the literal:
|
||||
|
||||
```v
|
||||
name := 'Bob'
|
||||
println('Hello, $name!') // Hello, Bob!
|
||||
```
|
||||
It also works with fields: `'age = $user.age'`.
|
||||
If you need more complex expressions, use `${}`: `'can register = ${user.age > 13}'`.
|
||||
|
||||
Format specifiers similar to those in C's `printf()` are also supported.
|
||||
`f`, `g`, `x`, etc. are optional and specify the output format.
|
||||
The compiler takes care of the storage size, so there is no `hd` or `llu`.
|
||||
It also works with fields: `'age = $user.age'`. If you need more complex expressions, use `${}`:
|
||||
`'can register = ${user.age > 13}'`.
|
||||
|
||||
Format specifiers similar to those in C's `printf()` are also supported. `f`, `g`, `x`, `o`, `b`,
|
||||
etc. are optional and specify the output format. The compiler takes care of the storage size, so
|
||||
there is no `hd` or `llu`.
|
||||
|
||||
To use a format specifier, follow this pattern:
|
||||
|
||||
`${varname:[flags][width][.precision][type]}`
|
||||
|
||||
- flags: may be zero or more of the following: `-` to left-align output within the field, `0` to use
|
||||
`0` as the padding character instead of the default `space` character. (Note: V does not currently
|
||||
support the use of `'` or `#` as format flags, and V supports but doesn't need `+` to right-align
|
||||
since that's the default.)
|
||||
- width: may be an integer value describing the minimum width of total field to output.
|
||||
- precision: an integer value preceded by a `.` will guarantee that many digits after the decimal
|
||||
point, if the input variable is a float. Ignored if variable is an integer.
|
||||
- type: `f` and `F` specify the input is a float and should be rendered as such, `e` and `E` specify
|
||||
the input is a float and should be rendered as an exponent (partially broken), `g` and `G` specify
|
||||
the input is a float--the renderer will use floating point notation for small values and exponent
|
||||
notation for large values, `d` specifies the input is an integer and should be rendered in base-10
|
||||
digits, `x` and `X` require an integer and will render it as hexadecimal digits, `o` requires an
|
||||
integer and will render it as octal digits, `b` requires an integer and will render it as binary
|
||||
digits, `s` requires a string (almost never used).
|
||||
|
||||
Note: when a numeric type can render alphabetic characters, such as hex strings or special values
|
||||
like `infinity`, the lowercase version of the type forces lowercase alphabetics and the uppercase
|
||||
version forces uppercase alphabetics.
|
||||
|
||||
Also note: in most cases, it's best to leave the format type empty. Floats will be rendered by
|
||||
default as `g`, integers will be rendered by default as `d`, and `s` is almost always redundant.
|
||||
There are only three cases where specifying a type is recommended:
|
||||
|
||||
- format strings are parsed at compile time, so specifing a type can help detect errors then
|
||||
- format strings default to using lowercase letters for hex digits and the `e` in exponents. Use a
|
||||
uppercase type to force the use of uppercase hex digits and an uppercase `E` in exponents.
|
||||
- format strings are the most convenient way to get hex, binary or octal strings from an integer.
|
||||
|
||||
See
|
||||
[Format Placeholder Specification](https://en.wikipedia.org/wiki/Printf_format_string#Format_placeholder_specification)
|
||||
for more information.
|
||||
|
||||
```v
|
||||
x := 123.4567
|
||||
println('x = ${x:4.2f}')
|
||||
println('[${x:10}]') // pad with spaces on the left => [ 123.457]
|
||||
println('[${int(x):-10}]') // pad with spaces on the right => [123 ]
|
||||
println('[${x:.2}]') // round to two decimal places => [123.46]
|
||||
println('[${x:10}]') // right-align with spaces on the left => [ 123.457]
|
||||
println('[${int(x):-10}]') // left-align with spaces on the right => [123 ]
|
||||
println('[${int(x):010}]') // pad with zeros on the left => [0000000123]
|
||||
println('[${int(x):b}]') // output as binary => [1111011]
|
||||
println('[${int(x):o}]') // output as octal => [173]
|
||||
println('[${int(x):X}]') // output as uppercase hex => [7B]
|
||||
```
|
||||
|
||||
### String operators
|
||||
@@ -585,13 +637,14 @@ s += 'world' // `+=` is used to append to a string
|
||||
println(s) // "hello world"
|
||||
```
|
||||
|
||||
All operators in V must have values of the same type on both sides.
|
||||
You cannot concatenate an integer to a string:
|
||||
All operators in V must have values of the same type on both sides. You cannot concatenate an
|
||||
integer to a string:
|
||||
|
||||
```v failcompile
|
||||
age := 10
|
||||
println('age = ' + age) // not allowed
|
||||
```
|
||||
|
||||
> error: infix expr: cannot use `int` (right expression) as `string`
|
||||
|
||||
We have to either convert `age` to a `string`:
|
||||
@@ -608,6 +661,63 @@ age := 12
|
||||
println('age = $age')
|
||||
```
|
||||
|
||||
### Runes
|
||||
|
||||
A `rune` represents a single Unicode character and is an alias for `u32`. To denote them, use `
|
||||
(backticks) :
|
||||
|
||||
```v
|
||||
rocket := `🚀`
|
||||
```
|
||||
|
||||
A `rune` can be converted to a UTF-8 string by using the `.str()` method.
|
||||
|
||||
```v
|
||||
rocket := `🚀`
|
||||
assert rocket.str() == '🚀'
|
||||
```
|
||||
|
||||
A `rune` can be converted to UTF-8 bytes by using the `.bytes()` method.
|
||||
|
||||
```v
|
||||
rocket := `🚀`
|
||||
assert rocket.bytes() == [byte(0xf0), 0x9f, 0x9a, 0x80]
|
||||
```
|
||||
|
||||
Hex, Unicode, and Octal escape sequences also work in a `rune` literal:
|
||||
|
||||
```v
|
||||
assert `\x61` == `a`
|
||||
assert `\141` == `a`
|
||||
assert `\u0061` == `a`
|
||||
|
||||
// multibyte literals work too
|
||||
assert `\u2605` == `★`
|
||||
assert `\u2605`.bytes() == [byte(0xe2), 0x98, 0x85]
|
||||
assert `\xe2\x98\x85`.bytes() == [byte(0xe2), 0x98, 0x85]
|
||||
assert `\342\230\205`.bytes() == [byte(0xe2), 0x98, 0x85]
|
||||
```
|
||||
|
||||
Note that `rune` literals use the same escape syntax as strings, but they can only hold one unicode
|
||||
character. Therefore, if your code does not specify a single Unicode character, you will receive an
|
||||
error at compile time.
|
||||
|
||||
Also remember that strings are indexed as bytes, not runes, so beware:
|
||||
|
||||
```v
|
||||
rocket_string := '🚀'
|
||||
assert rocket_string[0] != `🚀`
|
||||
assert 'aloha!'[0] == `a`
|
||||
```
|
||||
|
||||
A string can be converted to runes by the `.runes()` method.
|
||||
|
||||
```v
|
||||
hello := 'Hello World 👋'
|
||||
hello_runes := hello.runes() // [`H`, `e`, `l`, `l`, `o`, ` `, `W`, `o`, `r`, `l`, `d`, ` `, `👋`]
|
||||
assert hello_runes.string() == hello
|
||||
```
|
||||
|
||||
### Numbers
|
||||
|
||||
```v
|
||||
@@ -676,12 +786,12 @@ println(nums[1]) // `2`
|
||||
nums[1] = 5
|
||||
println(nums) // `[1, 5, 3]`
|
||||
```
|
||||
#### Array Properties
|
||||
There are two properties that control the "size" of an array:
|
||||
#### Array Fields
|
||||
There are two fields that control the "size" of an array:
|
||||
* `len`: *length* - the number of pre-allocated and initialized elements in the array
|
||||
* `cap`: *capacity* - the amount of memory space which has been reserved for elements,
|
||||
but not initialized or counted as elements. The array can grow up to this size without
|
||||
being reallocated. Usually, V takes care of this property automatically but there are
|
||||
being reallocated. Usually, V takes care of this field automatically but there are
|
||||
cases where the user may want to do manual optimizations (see [below](#array-initialization)).
|
||||
|
||||
```v
|
||||
@@ -692,7 +802,7 @@ nums = [] // The array is now empty
|
||||
println(nums.len) // "0"
|
||||
```
|
||||
|
||||
Note that the properties are read-only fields and can't be modified by the user.
|
||||
Note that fields are read-only and can't be modified by the user.
|
||||
|
||||
#### Array Initialization
|
||||
The basic initialization syntax is as described [above](#basic-array-concepts).
|
||||
@@ -1287,7 +1397,7 @@ println(s)
|
||||
You can check the current type of a sum type using `is` and its negated form `!is`.
|
||||
|
||||
You can do it either in an `if`:
|
||||
```v
|
||||
```v cgen
|
||||
struct Abc {
|
||||
val string
|
||||
}
|
||||
@@ -1990,8 +2100,7 @@ fn main() {
|
||||
}
|
||||
```
|
||||
|
||||
This means that defining public readonly fields is very easy in V,
|
||||
no need in getters/setters or properties.
|
||||
This means that defining public readonly fields is very easy in V.
|
||||
|
||||
## Methods
|
||||
|
||||
@@ -2539,20 +2648,20 @@ particularly useful for initializing a C library.
|
||||
## Type Declarations
|
||||
|
||||
### Interfaces
|
||||
|
||||
```v
|
||||
// interface-example.1
|
||||
struct Dog {
|
||||
breed string
|
||||
}
|
||||
|
||||
struct Cat {
|
||||
breed string
|
||||
}
|
||||
|
||||
fn (d Dog) speak() string {
|
||||
return 'woof'
|
||||
}
|
||||
|
||||
struct Cat {
|
||||
breed string
|
||||
}
|
||||
|
||||
fn (c Cat) speak() string {
|
||||
return 'meow'
|
||||
}
|
||||
@@ -2563,14 +2672,16 @@ interface Speaker {
|
||||
speak() string
|
||||
}
|
||||
|
||||
dog := Dog{'Leonberger'}
|
||||
cat := Cat{'Siamese'}
|
||||
fn main() {
|
||||
dog := Dog{'Leonberger'}
|
||||
cat := Cat{'Siamese'}
|
||||
|
||||
mut arr := []Speaker{}
|
||||
arr << dog
|
||||
arr << cat
|
||||
for item in arr {
|
||||
println('a $item.breed says: $item.speak()')
|
||||
mut arr := []Speaker{}
|
||||
arr << dog
|
||||
arr << cat
|
||||
for item in arr {
|
||||
println('a $item.breed says: $item.speak()')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -2583,6 +2694,7 @@ An interface can have a `mut:` section. Implementing types will need
|
||||
to have a `mut` receiver, for methods declared in the `mut:` section
|
||||
of an interface.
|
||||
```v
|
||||
// interface-example.2
|
||||
module main
|
||||
|
||||
pub interface Foo {
|
||||
@@ -2626,54 +2738,119 @@ fn fn1(s Foo) {
|
||||
|
||||
We can test the underlying type of an interface using dynamic cast operators:
|
||||
```v oksyntax
|
||||
// interface-exmaple.3 (continued from interface-exampe.1)
|
||||
interface Something {}
|
||||
|
||||
fn announce(s Something) {
|
||||
if s is Dog {
|
||||
println('a $s.breed dog') // `s` is automatically cast to `Dog` (smart cast)
|
||||
} else if s is Cat {
|
||||
println('a $s.breed cat')
|
||||
println('a cat speaks $s.speak()')
|
||||
} else {
|
||||
println('something else')
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
dog := Dog{'Leonberger'}
|
||||
cat := Cat{'Siamese'}
|
||||
announce(dog)
|
||||
announce(cat)
|
||||
}
|
||||
```
|
||||
|
||||
```v
|
||||
// interface-example.4
|
||||
interface IFoo {
|
||||
foo()
|
||||
}
|
||||
|
||||
interface IBar {
|
||||
bar()
|
||||
}
|
||||
|
||||
// implements only IFoo
|
||||
struct SFoo {}
|
||||
|
||||
fn (sf SFoo) foo() {}
|
||||
|
||||
// implements both IFoo and IBar
|
||||
struct SFooBar {}
|
||||
|
||||
fn (sfb SFooBar) foo() {}
|
||||
|
||||
fn (sfb SFooBar) bar() {
|
||||
dump('This implements IBar')
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut arr := []IFoo{}
|
||||
arr << SFoo{}
|
||||
arr << SFooBar{}
|
||||
|
||||
for a in arr {
|
||||
dump(a)
|
||||
// In order to execute instances that implements IBar.
|
||||
if a is IBar {
|
||||
// a.bar() // Error.
|
||||
b := a as IBar
|
||||
dump(b)
|
||||
b.bar()
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For more information, see [Dynamic casts](#dynamic-casts).
|
||||
|
||||
#### Interface method definitions
|
||||
|
||||
Also unlike Go, an interface may implement a method.
|
||||
These methods are not implemented by structs which implement that interface.
|
||||
Also unlike Go, an interface can have it's own methods, similar to how
|
||||
structs can have their methods. These 'interface methods' do not have
|
||||
to be implemented, by structs which implement that interface.
|
||||
They are just a convenient way to write `i.some_function()` instead of
|
||||
`some_function(i)`, similar to how struct methods can be looked at, as
|
||||
a convenience for writing `s.xyz()` instead of `xyz(s)`.
|
||||
|
||||
When a struct is wrapped in an interface that has implemented a method
|
||||
with the same name as one implemented by this struct, only the method
|
||||
implemented on the interface is called.
|
||||
N.B. This feature is NOT a "default implementation" like in C#.
|
||||
|
||||
For example, if a struct `cat` is wrapped in an interface `a`, that has
|
||||
implemented a method with the same name `speak`, as a method implemented by
|
||||
the struct, and you do `a.speak()`, *only* the interface method is called:
|
||||
|
||||
```v
|
||||
struct Cat {}
|
||||
|
||||
fn (c Cat) speak() string {
|
||||
return 'meow!'
|
||||
}
|
||||
|
||||
interface Adoptable {}
|
||||
|
||||
fn (a Adoptable) speak() string {
|
||||
return 'adopt me!'
|
||||
}
|
||||
|
||||
fn new_adoptable() Adoptable {
|
||||
return Cat{}
|
||||
struct Cat {}
|
||||
|
||||
fn (c Cat) speak() string {
|
||||
return 'meow!'
|
||||
}
|
||||
|
||||
struct Dog {}
|
||||
|
||||
fn main() {
|
||||
cat := Cat{}
|
||||
assert cat.speak() == 'meow!'
|
||||
a := new_adoptable()
|
||||
assert a.speak() == 'adopt me!'
|
||||
assert dump(cat.speak()) == 'meow!'
|
||||
//
|
||||
a := Adoptable(cat)
|
||||
assert dump(a.speak()) == 'adopt me!' // call Adoptable's `speak`
|
||||
if a is Cat {
|
||||
println(a.speak()) // meow!
|
||||
// Inside this `if` however, V knows that `a` is not just any
|
||||
// kind of Adoptable, but actually a Cat, so it will use the
|
||||
// Cat `speak`, NOT the Adoptable `speak`:
|
||||
dump(a.speak()) // meow!
|
||||
}
|
||||
//
|
||||
b := Adoptable(Dog{})
|
||||
assert dump(b.speak()) == 'adopt me!' // call Adoptable's `speak`
|
||||
// if b is Dog {
|
||||
// dump(b.speak()) // error: unknown method or field: Dog.speak
|
||||
// }
|
||||
}
|
||||
```
|
||||
|
||||
@@ -3175,6 +3352,39 @@ if resp := http.get('https://google.com') {
|
||||
Above, `http.get` returns a `?http.Response`. `resp` is only in scope for the first
|
||||
`if` branch. `err` is only in scope for the `else` branch.
|
||||
|
||||
|
||||
## Custom error types
|
||||
|
||||
V gives you the ability to define custom error types through the `IError` interface.
|
||||
The interface requires two methods: `msg() string` and `code() int`. Every type that
|
||||
implements these methods can be used as an error.
|
||||
|
||||
When defining a custom error type it is recommended to embed the builtin `Error` default
|
||||
implementation. This provides an empty default implementation for both required methods,
|
||||
so you only have to implement what you really need, and may provide additional utility
|
||||
functions in the future.
|
||||
|
||||
```v
|
||||
struct PathError {
|
||||
Error
|
||||
path string
|
||||
}
|
||||
|
||||
fn (err PathError) msg() string {
|
||||
return 'Failed to open path: $err.path'
|
||||
}
|
||||
|
||||
fn try_open(path string) ? {
|
||||
return IError(PathError{
|
||||
path: path
|
||||
})
|
||||
}
|
||||
|
||||
fn main() {
|
||||
try_open('/tmp') or { panic(err) }
|
||||
}
|
||||
```
|
||||
|
||||
## Generics
|
||||
|
||||
```v wip
|
||||
@@ -3356,12 +3566,12 @@ fn main() {
|
||||
```
|
||||
|
||||
### Channels
|
||||
Channels are the preferred way to communicate between coroutines. V's channels work basically like
|
||||
Channels are the preferred way to communicate between threads. V's channels work basically like
|
||||
those in Go. You can push objects into a channel on one end and pop objects from the other end.
|
||||
Channels can be buffered or unbuffered and it is possible to `select` from multiple channels.
|
||||
|
||||
#### Syntax and Usage
|
||||
Channels have the type `chan objtype`. An optional buffer length can specified as the `cap` property
|
||||
Channels have the type `chan objtype`. An optional buffer length can specified as the `cap` field
|
||||
in the declaration:
|
||||
|
||||
```v
|
||||
@@ -3370,7 +3580,7 @@ ch2 := chan f64{cap: 100} // buffer length 100
|
||||
```
|
||||
|
||||
Channels do not have to be declared as `mut`. The buffer length is not part of the type but
|
||||
a property of the individual channel object. Channels can be passed to coroutines like normal
|
||||
a field of the individual channel object. Channels can be passed to threads like normal
|
||||
variables:
|
||||
|
||||
```v
|
||||
@@ -3491,7 +3701,7 @@ if select {
|
||||
|
||||
#### Special Channel Features
|
||||
|
||||
For special purposes there are some builtin properties and methods:
|
||||
For special purposes there are some builtin fields and methods:
|
||||
```v
|
||||
struct Abc {
|
||||
x int
|
||||
@@ -3514,7 +3724,7 @@ res2 := ch2.try_pop(mut b) // try to perform `b = <-ch2`
|
||||
The `try_push/pop()` methods will return immediately with one of the results
|
||||
`.success`, `.not_ready` or `.closed` - dependent on whether the object has been transferred or
|
||||
the reason why not.
|
||||
Usage of these methods and properties in production is not recommended -
|
||||
Usage of these methods and fields in production is not recommended -
|
||||
algorithms based on them are often subject to race conditions. Especially `.len` and
|
||||
`.closed` should not be used to make decisions.
|
||||
Use `or` branches, error propagation or `select` instead (see [Syntax and Usage](#syntax-and-usage)
|
||||
@@ -3522,8 +3732,8 @@ and [Channel Select](#channel-select) above).
|
||||
|
||||
### Shared Objects
|
||||
|
||||
Data can be exchanged between a coroutine and the calling thread via a shared variable.
|
||||
Such variables should be created as `shared` and passed to the coroutine as such, too.
|
||||
Data can be exchanged between a thread and the calling thread via a shared variable.
|
||||
Such variables should be created as `shared` and passed to the thread as such, too.
|
||||
The underlying `struct` contains a hidden *mutex* that allows locking concurrent access
|
||||
using `rlock` for read-only and `lock` for read/write access.
|
||||
|
||||
@@ -3552,7 +3762,14 @@ fn main() {
|
||||
```
|
||||
Shared variables must be structs, arrays or maps.
|
||||
|
||||
## Decoding JSON
|
||||
## JSON
|
||||
|
||||
Because of the ubiquitous nature of JSON, support for it is built directly into V.
|
||||
|
||||
V generates code for JSON encoding and decoding.
|
||||
No runtime reflection is used. This results in much better performance.
|
||||
|
||||
### Decoding JSON
|
||||
|
||||
```v
|
||||
import json
|
||||
@@ -3590,14 +3807,32 @@ println(foos[0].x)
|
||||
println(foos[1].x)
|
||||
```
|
||||
|
||||
Because of the ubiquitous nature of JSON, support for it is built directly into V.
|
||||
|
||||
The `json.decode` function takes two arguments:
|
||||
the first is the type into which the JSON value should be decoded and
|
||||
the second is a string containing the JSON data.
|
||||
|
||||
V generates code for JSON encoding and decoding.
|
||||
No runtime reflection is used. This results in much better performance.
|
||||
### Encoding JSON
|
||||
|
||||
```v
|
||||
import json
|
||||
|
||||
struct User {
|
||||
name string
|
||||
score i64
|
||||
}
|
||||
|
||||
mut data := map[string]int{}
|
||||
user := &User{
|
||||
name: 'Pierre'
|
||||
score: 1024
|
||||
}
|
||||
|
||||
data['x'] = 42
|
||||
data['y'] = 360
|
||||
|
||||
println(json.encode(data)) // {"x":42,"y":360}
|
||||
println(json.encode(user)) // {"name":"Pierre","score":1024}
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -3698,7 +3933,7 @@ file, can easily run *other* test files like this:
|
||||
import os
|
||||
|
||||
fn test_subtest() {
|
||||
res := os.execute('${@VEXE} other_test.v')
|
||||
res := os.execute('${os.quoted_path(@VEXE)} other_test.v')
|
||||
assert res.exit_code == 1
|
||||
assert res.output.contains('other_test.v does not exist')
|
||||
}
|
||||
@@ -3741,6 +3976,11 @@ memory manually. (See [attributes](#attributes)).
|
||||
_Note: right now autofree is hidden behind the -autofree flag. It will be enabled by
|
||||
default in V 0.3. If autofree is not used, V programs will leak memory._
|
||||
|
||||
Note 2: Autofree is still WIP. Until it stabilises and becomes the default, please
|
||||
compile your long running processes with `-gc boehm`, which will use the
|
||||
Boehm-Demers-Weiser conservative garbage collector, to free the memory, that your
|
||||
programs leak, at runtime.
|
||||
|
||||
### Examples
|
||||
|
||||
```v
|
||||
@@ -4101,7 +4341,7 @@ sql db {
|
||||
}
|
||||
```
|
||||
|
||||
For more examples and the docs, see <a href='https://github.com/vlang/v/tree/master/vlib/orm'>vlib/orm</a>.
|
||||
For more examples and the docs, see [vlib/orm](https://github.com/vlang/v/tree/master/vlib/orm).
|
||||
|
||||
## Writing Documentation
|
||||
|
||||
@@ -4950,40 +5190,42 @@ For all supported options check the latest help:
|
||||
|
||||
#### `$if` condition
|
||||
```v
|
||||
// Support for multiple conditions in one branch
|
||||
$if ios || android {
|
||||
println('Running on a mobile device!')
|
||||
}
|
||||
$if linux && x64 {
|
||||
println('64-bit Linux.')
|
||||
}
|
||||
// Usage as expression
|
||||
os := $if windows { 'Windows' } $else { 'UNIX' }
|
||||
println('Using $os')
|
||||
// $else-$if branches
|
||||
$if tinyc {
|
||||
println('tinyc')
|
||||
} $else $if clang {
|
||||
println('clang')
|
||||
} $else $if gcc {
|
||||
println('gcc')
|
||||
} $else {
|
||||
println('different compiler')
|
||||
}
|
||||
$if test {
|
||||
println('testing')
|
||||
}
|
||||
// v -cg ...
|
||||
$if debug {
|
||||
println('debugging')
|
||||
}
|
||||
// v -prod ...
|
||||
$if prod {
|
||||
println('production build')
|
||||
}
|
||||
// v -d option ...
|
||||
$if option ? {
|
||||
println('custom option')
|
||||
fn main() {
|
||||
// Support for multiple conditions in one branch
|
||||
$if ios || android {
|
||||
println('Running on a mobile device!')
|
||||
}
|
||||
$if linux && x64 {
|
||||
println('64-bit Linux.')
|
||||
}
|
||||
// Usage as expression
|
||||
os := $if windows { 'Windows' } $else { 'UNIX' }
|
||||
println('Using $os')
|
||||
// $else-$if branches
|
||||
$if tinyc {
|
||||
println('tinyc')
|
||||
} $else $if clang {
|
||||
println('clang')
|
||||
} $else $if gcc {
|
||||
println('gcc')
|
||||
} $else {
|
||||
println('different compiler')
|
||||
}
|
||||
$if test {
|
||||
println('testing')
|
||||
}
|
||||
// v -cg ...
|
||||
$if debug {
|
||||
println('debugging')
|
||||
}
|
||||
// v -prod ...
|
||||
$if prod {
|
||||
println('production build')
|
||||
}
|
||||
// v -d option ...
|
||||
$if option ? {
|
||||
println('custom option')
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -5394,6 +5636,12 @@ fn main() {
|
||||
```
|
||||
|
||||
Build this example with `v -live message.v`.
|
||||
|
||||
You can also run this example with `v -live run message.v`.
|
||||
Make sure that in command you use a path to a V's file,
|
||||
**not** a path to a folder (like `v -live run .`) -
|
||||
in that case you need to modify content of a folder (add new file, for example),
|
||||
because changes in *message.v* will have no effect.
|
||||
|
||||
Functions that you want to be reloaded must have `[live]` attribute
|
||||
before their definition.
|
||||
@@ -5401,7 +5649,7 @@ before their definition.
|
||||
Right now it's not possible to modify types while the program is running.
|
||||
|
||||
More examples, including a graphical application:
|
||||
[github.com/vlang/v/tree/master/examples/hot_code_reload](https://github.com/vlang/v/tree/master/examples/hot_reload).
|
||||
[github.com/vlang/v/tree/master/examples/hot_reload](https://github.com/vlang/v/tree/master/examples/hot_reload).
|
||||
|
||||
## Cross compilation
|
||||
|
||||
|
||||
5
examples/call_v_from_python/README.md
Normal file
5
examples/call_v_from_python/README.md
Normal file
@@ -0,0 +1,5 @@
|
||||
A simple example to show how to call a function written in v from python
|
||||
|
||||
Step 1: Compile the v code to a shared library using ``v -shared -prod test.v``
|
||||
|
||||
Step 2: Run the python file using ``python3 test.py``
|
||||
8
examples/call_v_from_python/test.py
Normal file
8
examples/call_v_from_python/test.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from ctypes import *
|
||||
import os
|
||||
|
||||
so_file="./test.so"
|
||||
if os.name=="nt":
|
||||
so_file="./test.dll"
|
||||
my_functions = CDLL(so_file)
|
||||
print(my_functions.square(10))
|
||||
6
examples/call_v_from_python/test.v
Normal file
6
examples/call_v_from_python/test.v
Normal file
@@ -0,0 +1,6 @@
|
||||
module test
|
||||
|
||||
[export: 'square']
|
||||
fn square(i int) int {
|
||||
return i * i
|
||||
}
|
||||
@@ -47,7 +47,7 @@ fn test_can_compile_and_use_library_with_skip_unused() {
|
||||
}
|
||||
|
||||
fn v_compile(vopts string) os.Result {
|
||||
cmd := '"$vexe" -showcc $vopts'
|
||||
cmd := '${os.quoted_path(vexe)} -showcc $vopts'
|
||||
// dump(cmd)
|
||||
res := os.execute_or_exit(cmd)
|
||||
// dump(res)
|
||||
|
||||
@@ -21,8 +21,8 @@ struct AppState {
|
||||
mut:
|
||||
gg &gg.Context = 0
|
||||
iidx int
|
||||
pixels [pheight][pwidth]u32
|
||||
npixels [pheight][pwidth]u32 // all drawing happens here, results are copied with vmemcpy to pixels at the end
|
||||
pixels []u32 = []u32{len: pwidth * pheight}
|
||||
npixels []u32 = []u32{len: pwidth * pheight} // all drawing happens here, results are copied at the end
|
||||
view ViewRect = ViewRect{-2.7610033817025625, 1.1788897130338223, -1.824584023871934, 2.1153096311072788}
|
||||
ntasks int = runtime.nr_jobs()
|
||||
}
|
||||
@@ -46,7 +46,7 @@ fn (mut state AppState) update() {
|
||||
threads << go state.recalc_lines(cview, start, start + sheight)
|
||||
}
|
||||
threads.wait()
|
||||
unsafe { vmemcpy(&state.pixels[0], &state.npixels[0], int(sizeof(state.pixels))) }
|
||||
state.pixels = state.npixels
|
||||
println('$state.ntasks threads; $sw.elapsed().milliseconds() ms / frame')
|
||||
oview = cview
|
||||
}
|
||||
@@ -65,14 +65,14 @@ fn (mut state AppState) recalc_lines(cview ViewRect, ymin f64, ymax f64) {
|
||||
break
|
||||
}
|
||||
}
|
||||
state.npixels[int(y_pixel)][int(x_pixel)] = u32(colors[iter % 8].abgr8())
|
||||
state.npixels[int(y_pixel) * pwidth + int(x_pixel)] = u32(colors[iter % 8].abgr8())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn (mut state AppState) draw() {
|
||||
mut istream_image := state.gg.get_cached_image_by_idx(state.iidx)
|
||||
istream_image.update_pixel_data(&state.pixels)
|
||||
istream_image.update_pixel_data(&state.pixels[0])
|
||||
size := gg.window_size()
|
||||
state.gg.draw_image(0, 0, size.width, size.height, istream_image)
|
||||
}
|
||||
@@ -169,6 +169,7 @@ fn graphics_keydown(code gg.KeyCode, mod gg.Modifier, mut state AppState) {
|
||||
}
|
||||
}
|
||||
|
||||
[console]
|
||||
fn main() {
|
||||
mut state := &AppState{}
|
||||
state.gg = gg.new_context(
|
||||
|
||||
@@ -36,7 +36,6 @@ fn main() {
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: init
|
||||
font_path: gg.system_font_path()
|
||||
)
|
||||
app.gg.run()
|
||||
}
|
||||
|
||||
@@ -42,7 +42,6 @@ fn main() {
|
||||
create_window: true
|
||||
frame_fn: frame
|
||||
bg_color: gx.white
|
||||
font_path: gg.system_font_path()
|
||||
)
|
||||
// window.onkeydown(key_down)
|
||||
println('Starting the game loop...')
|
||||
|
||||
@@ -29,7 +29,6 @@ fn main() {
|
||||
frame_fn: frame
|
||||
resizable: true
|
||||
bg_color: gx.white
|
||||
font_path: gg.system_font_path()
|
||||
)
|
||||
context.gg.run()
|
||||
}
|
||||
|
||||
18
examples/pendulum-simulation/.gitignore
vendored
Normal file
18
examples/pendulum-simulation/.gitignore
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Executables files
|
||||
test
|
||||
test.exe
|
||||
|
||||
# Temporary files
|
||||
fns.txt
|
||||
|
||||
!/bin/test
|
||||
|
||||
/docs/build
|
||||
*.ppm
|
||||
main
|
||||
parallel
|
||||
parallel_with_iw
|
||||
sequential
|
||||
animation
|
||||
full
|
||||
*.log
|
||||
21
examples/pendulum-simulation/LICENSE
Normal file
21
examples/pendulum-simulation/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2021-2022 Ulises Jeremias Cornejo Fandos
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
93
examples/pendulum-simulation/README.md
Normal file
93
examples/pendulum-simulation/README.md
Normal file
@@ -0,0 +1,93 @@
|
||||
<div align="center">
|
||||
<p>
|
||||
<img
|
||||
style="width: 250px"
|
||||
width="250"
|
||||
src="https://user-images.githubusercontent.com/17727170/153699135-a63e9644-1a29-4c04-9de3-c9100b06001d.png"
|
||||
>
|
||||
</p>
|
||||
|
||||
<h1>Pendulum Simulation in V</h1>
|
||||
|
||||
You can see the origin implementation among with some benchmarks at
|
||||
[ulises-jeremias/v-pendulum-simulation](https://github.com/ulises-jeremias/v-pendulum-simulation).
|
||||
|
||||
[vlang.io](https://vlang.io) |
|
||||
[Docs](https://ulises-jeremias.github.io/v-pendulum-simulation) |
|
||||
[Contributing](https://github.com/ulises-jeremias/v-pendulum-simulation/blob/main/CONTRIBUTING.md)
|
||||
|
||||
</div>
|
||||
<div align="center">
|
||||
|
||||
[![Build Status][workflowbadge]][workflowurl]
|
||||
[![Docs Validation][validatedocsbadge]][validatedocsurl]
|
||||
[![License: MIT][licensebadge]][licenseurl]
|
||||
|
||||
</div>
|
||||
|
||||
## Run the Simulations
|
||||
|
||||
### Sequential Simulation
|
||||
|
||||
```sh
|
||||
$ v -gc boehm -prod sequential.v
|
||||
$ ./sequential # execute ./sequential -h for more info
|
||||
```
|
||||
|
||||
### Parallel Simulation
|
||||
|
||||
```sh
|
||||
$ v -gc boehm -prod parallel.v
|
||||
$ ./parallel # execute ./parallel -h for more info
|
||||
```
|
||||
|
||||

|
||||
|
||||
### Parallel Simulation with Image Worker
|
||||
|
||||
```sh
|
||||
$ v -gc boehm -prod parallel_with_iw.v
|
||||
$ ./parallel_with_iw # execute ./parallel_with_iw -h for more info
|
||||
```
|
||||
|
||||

|
||||
|
||||
### Parallel Simulation with Graphic User Interface
|
||||
|
||||
```sh
|
||||
$ v -gc boehm -prod animation.v
|
||||
$ ./animation # execute ./animation -h for more info
|
||||
```
|
||||
|
||||
### Full Parallel Simulation with Graphic User Interface and Image Output
|
||||
|
||||
```sh
|
||||
$ v -gc boehm -prod full.v
|
||||
$ ./full # execute ./full -h for more info
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To test the module, just type the following command:
|
||||
|
||||
```sh
|
||||
$ v test .
|
||||
```
|
||||
|
||||
## Benchmark
|
||||
|
||||
Check the original repository for tools to run benchmark tests. In there you can execute
|
||||
the following command to execute benchmark tests to get a full comparison between implementations:
|
||||
|
||||
```sh
|
||||
$ ./bin/run-benchmark-test --help
|
||||
```
|
||||
|
||||

|
||||
|
||||
[workflowbadge]: https://github.com/ulises-jeremias/v-pendulum-simulation/workflows/Build%20and%20Test%20with%20deps/badge.svg
|
||||
[validatedocsbadge]: https://github.com/ulises-jeremias/v-pendulum-simulation/workflows/Validate%20Docs/badge.svg
|
||||
[licensebadge]: https://img.shields.io/badge/License-MIT-blue.svg
|
||||
[workflowurl]: https://github.com/ulises-jeremias/v-pendulum-simulation/commits/main
|
||||
[validatedocsurl]: https://github.com/ulises-jeremias/v-pendulum-simulation/commits/main
|
||||
[licenseurl]: https://github.com/ulises-jeremias/v-pendulum-simulation/blob/main/LICENSE
|
||||
37
examples/pendulum-simulation/animation.v
Normal file
37
examples/pendulum-simulation/animation.v
Normal file
@@ -0,0 +1,37 @@
|
||||
module main
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.anim
|
||||
import sim.args as simargs
|
||||
|
||||
fn main() {
|
||||
args := simargs.parse_args(extra_workers: 1) ? as simargs.ParallelArgs
|
||||
|
||||
mut app := anim.new_app(args)
|
||||
mut workers := []thread{cap: args.workers}
|
||||
|
||||
mut bmark := benchmark.start()
|
||||
|
||||
defer {
|
||||
app.request_chan.close()
|
||||
sim.log('Waiting for workers to finish')
|
||||
workers.wait()
|
||||
app.result_chan.close()
|
||||
sim.log('Workers finished!')
|
||||
bmark.measure(@FN)
|
||||
sim.log('Done!')
|
||||
}
|
||||
|
||||
for id in 0 .. args.workers {
|
||||
workers << go sim.sim_worker(id, app.request_chan, [app.result_chan])
|
||||
}
|
||||
|
||||
handle_request := fn [app] (request &sim.SimRequest) ? {
|
||||
app.request_chan <- request
|
||||
}
|
||||
|
||||
go app.gg.run()
|
||||
|
||||
sim.run(args.params, grid: args.grid, on_request: sim.SimRequestHandler(handle_request))
|
||||
}
|
||||
56
examples/pendulum-simulation/full.v
Normal file
56
examples/pendulum-simulation/full.v
Normal file
@@ -0,0 +1,56 @@
|
||||
module main
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.anim
|
||||
import sim.args as simargs
|
||||
import sim.img
|
||||
|
||||
fn main() {
|
||||
args := simargs.parse_args(extra_workers: 2) ? as simargs.ParallelArgs
|
||||
|
||||
img_settings := img.image_settings_from_grid(args.grid)
|
||||
|
||||
mut writer := img.ppm_writer_for_fname(args.filename, img_settings) ?
|
||||
|
||||
mut app := anim.new_app(args)
|
||||
mut workers := []thread{cap: args.workers + 1}
|
||||
|
||||
mut bmark := benchmark.start()
|
||||
|
||||
img_result_chan := chan &sim.SimResult{cap: args.workers}
|
||||
|
||||
defer {
|
||||
image_worker := workers.pop()
|
||||
app.request_chan.close()
|
||||
sim.log('Waiting for workers to finish')
|
||||
workers.wait()
|
||||
app.result_chan.close()
|
||||
img_result_chan.close()
|
||||
sim.log('Waiting for image writer to finish')
|
||||
image_worker.wait()
|
||||
sim.log('Workers finished!')
|
||||
bmark.measure(@FN)
|
||||
sim.log('Closing writer file')
|
||||
writer.close()
|
||||
sim.log('Done!')
|
||||
}
|
||||
|
||||
// start a worker on each core
|
||||
for id in 0 .. app.args.workers {
|
||||
workers << go sim.sim_worker(id, app.request_chan, [app.result_chan, img_result_chan])
|
||||
}
|
||||
|
||||
handle_request := fn [app] (request &sim.SimRequest) ? {
|
||||
app.request_chan <- request
|
||||
}
|
||||
|
||||
workers << go img.image_worker(mut writer, img_result_chan, img_settings)
|
||||
|
||||
go app.gg.run()
|
||||
|
||||
sim.run(app.args.params,
|
||||
grid: app.args.grid
|
||||
on_request: sim.SimRequestHandler(handle_request)
|
||||
)
|
||||
}
|
||||
64
examples/pendulum-simulation/modules/sim/anim/app.v
Normal file
64
examples/pendulum-simulation/modules/sim/anim/app.v
Normal file
@@ -0,0 +1,64 @@
|
||||
module anim
|
||||
|
||||
import gg
|
||||
import gx
|
||||
import sim
|
||||
import sim.args as simargs
|
||||
|
||||
const bg_color = gx.white
|
||||
|
||||
struct Pixel {
|
||||
x f32
|
||||
y f32
|
||||
color gx.Color
|
||||
}
|
||||
|
||||
struct App {
|
||||
pub:
|
||||
args simargs.ParallelArgs
|
||||
request_chan chan &sim.SimRequest
|
||||
result_chan chan &sim.SimResult
|
||||
pub mut:
|
||||
gg &gg.Context = 0
|
||||
iidx int
|
||||
pixels []u32
|
||||
}
|
||||
|
||||
pub fn new_app(args simargs.ParallelArgs) &App {
|
||||
total_pixels := args.grid.height * args.grid.width
|
||||
|
||||
mut app := &App{
|
||||
args: args
|
||||
pixels: []u32{len: total_pixels}
|
||||
request_chan: chan &sim.SimRequest{cap: args.grid.width}
|
||||
}
|
||||
app.gg = gg.new_context(
|
||||
width: args.grid.width
|
||||
height: args.grid.height
|
||||
create_window: true
|
||||
window_title: 'V Pendulum Simulation'
|
||||
user_data: app
|
||||
bg_color: anim.bg_color
|
||||
frame_fn: frame
|
||||
init_fn: init
|
||||
)
|
||||
return app
|
||||
}
|
||||
|
||||
fn init(mut app App) {
|
||||
app.iidx = app.gg.new_streaming_image(app.args.grid.width, app.args.grid.height, 4,
|
||||
pixel_format: .rgba8)
|
||||
go pixels_worker(mut app)
|
||||
}
|
||||
|
||||
fn frame(mut app App) {
|
||||
app.gg.begin()
|
||||
app.draw()
|
||||
app.gg.end()
|
||||
}
|
||||
|
||||
fn (mut app App) draw() {
|
||||
mut istream_image := app.gg.get_cached_image_by_idx(app.iidx)
|
||||
istream_image.update_pixel_data(&app.pixels[0])
|
||||
app.gg.draw_image(0, 0, app.args.grid.width, app.args.grid.height, istream_image)
|
||||
}
|
||||
19
examples/pendulum-simulation/modules/sim/anim/worker.v
Normal file
19
examples/pendulum-simulation/modules/sim/anim/worker.v
Normal file
@@ -0,0 +1,19 @@
|
||||
module anim
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.img
|
||||
|
||||
fn pixels_worker(mut app App) {
|
||||
mut bmark := benchmark.new_benchmark()
|
||||
for {
|
||||
result := <-app.result_chan or { break }
|
||||
bmark.step()
|
||||
// find the closest magnet
|
||||
pixel_color := img.compute_pixel(result)
|
||||
app.pixels[result.id] = u32(pixel_color.abgr8())
|
||||
bmark.ok()
|
||||
}
|
||||
bmark.stop()
|
||||
println(bmark.total_message(@FN))
|
||||
}
|
||||
158
examples/pendulum-simulation/modules/sim/args/parser.v
Normal file
158
examples/pendulum-simulation/modules/sim/args/parser.v
Normal file
@@ -0,0 +1,158 @@
|
||||
module args
|
||||
|
||||
import flag
|
||||
import os
|
||||
import runtime
|
||||
import sim
|
||||
import math
|
||||
|
||||
// customisable through setting VJOBS
|
||||
const max_parallel_workers = runtime.nr_jobs()
|
||||
|
||||
[params]
|
||||
pub struct ParserSettings {
|
||||
sequential bool
|
||||
img bool
|
||||
extra_workers int
|
||||
}
|
||||
|
||||
pub struct SequentialArgs {
|
||||
pub:
|
||||
params sim.SimParams
|
||||
grid sim.GridSettings
|
||||
filename string
|
||||
}
|
||||
|
||||
pub struct ParallelArgs {
|
||||
SequentialArgs
|
||||
pub:
|
||||
workers int = args.max_parallel_workers
|
||||
}
|
||||
|
||||
pub type SimArgs = ParallelArgs | SequentialArgs
|
||||
|
||||
pub fn parse_args(config ParserSettings) ?SimArgs {
|
||||
if config.sequential {
|
||||
args := parse_sequential_args() ?
|
||||
return SimArgs(args)
|
||||
} else {
|
||||
args := parse_parallel_args(config.extra_workers) ?
|
||||
return SimArgs(args)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_sequential_args() ?SequentialArgs {
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('vps')
|
||||
fp.version('v0.1.0')
|
||||
fp.limit_free_args(0, 0) ?
|
||||
fp.description('This is a pendulum simulation written in pure V')
|
||||
fp.skip_executable()
|
||||
|
||||
// output parameters
|
||||
width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to $sim.default_width')
|
||||
height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to $sim.default_height')
|
||||
filename := fp.string('output', `o`, 'out.ppm', 'name of the image output. Defaults to out.ppm')
|
||||
|
||||
// simulation parameters
|
||||
rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to $sim.default_rope_length')
|
||||
bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to $sim.default_bearing_mass')
|
||||
magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to $sim.default_magnet_spacing')
|
||||
magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to $sim.default_magnet_height')
|
||||
magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to $sim.default_magnet_strength')
|
||||
gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to $sim.default_gravity')
|
||||
|
||||
fp.finalize() or {
|
||||
println(fp.usage())
|
||||
return none
|
||||
}
|
||||
|
||||
params := sim.sim_params(
|
||||
rope_length: rope_length
|
||||
bearing_mass: bearing_mass
|
||||
magnet_spacing: magnet_spacing
|
||||
magnet_height: magnet_height
|
||||
magnet_strength: magnet_strength
|
||||
gravity: gravity
|
||||
)
|
||||
|
||||
grid := sim.new_grid_settings(
|
||||
width: width
|
||||
height: height
|
||||
)
|
||||
|
||||
args := SequentialArgs{
|
||||
params: params
|
||||
filename: filename
|
||||
grid: grid
|
||||
}
|
||||
|
||||
sim.log('$args')
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
fn parse_parallel_args(extra_workers int) ?ParallelArgs {
|
||||
mut fp := flag.new_flag_parser(os.args)
|
||||
fp.application('vps')
|
||||
fp.version('v0.1.0')
|
||||
fp.limit_free_args(0, 0) ?
|
||||
fp.description('This is a pendulum simulation written in pure V')
|
||||
fp.skip_executable()
|
||||
|
||||
workers := fp.int('workers', 0, args.max_parallel_workers, 'amount of workers to use on simulation. Defaults to $args.max_parallel_workers')
|
||||
|
||||
// output parameters
|
||||
width := fp.int('width', `w`, sim.default_width, 'width of the image output. Defaults to $sim.default_width')
|
||||
height := fp.int('height', `h`, sim.default_height, 'height of the image output. Defaults to $sim.default_height')
|
||||
filename := fp.string('output', `o`, 'out.ppm', 'name of the image output. Defaults to out.ppm')
|
||||
|
||||
// simulation parameters
|
||||
rope_length := fp.float('rope-length', 0, sim.default_rope_length, 'rope length to use on simulation. Defaults to $sim.default_rope_length')
|
||||
bearing_mass := fp.float('bearing-mass', 0, sim.default_bearing_mass, 'bearing mass to use on simulation. Defaults to $sim.default_bearing_mass')
|
||||
magnet_spacing := fp.float('magnet-spacing', 0, sim.default_magnet_spacing, 'magnet spacing to use on simulation. Defaults to $sim.default_magnet_spacing')
|
||||
magnet_height := fp.float('magnet-height', 0, sim.default_magnet_height, 'magnet height to use on simulation. Defaults to $sim.default_magnet_height')
|
||||
magnet_strength := fp.float('magnet-strength', 0, sim.default_magnet_strength, 'magnet strength to use on simulation. Defaults to $sim.default_magnet_strength')
|
||||
gravity := fp.float('gravity', 0, sim.default_gravity, 'gravity to use on simulation. Defaults to $sim.default_gravity')
|
||||
|
||||
fp.finalize() or {
|
||||
println(fp.usage())
|
||||
return none
|
||||
}
|
||||
|
||||
params := sim.sim_params(
|
||||
rope_length: rope_length
|
||||
bearing_mass: bearing_mass
|
||||
magnet_spacing: magnet_spacing
|
||||
magnet_height: magnet_height
|
||||
magnet_strength: magnet_strength
|
||||
gravity: gravity
|
||||
)
|
||||
|
||||
grid := sim.new_grid_settings(
|
||||
width: width
|
||||
height: height
|
||||
)
|
||||
|
||||
args := ParallelArgs{
|
||||
params: params
|
||||
filename: filename
|
||||
grid: grid
|
||||
workers: get_workers(workers, extra_workers)
|
||||
}
|
||||
|
||||
sim.log('$args')
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
[inline]
|
||||
fn get_workers(workers int, extra_workers int) int {
|
||||
result := if workers + extra_workers <= args.max_parallel_workers {
|
||||
workers
|
||||
} else {
|
||||
args.max_parallel_workers - extra_workers
|
||||
}
|
||||
|
||||
return math.max(1, result)
|
||||
}
|
||||
74
examples/pendulum-simulation/modules/sim/img/ppm.v
Normal file
74
examples/pendulum-simulation/modules/sim/img/ppm.v
Normal file
@@ -0,0 +1,74 @@
|
||||
module img
|
||||
|
||||
import gx
|
||||
import os
|
||||
import sim
|
||||
|
||||
[params]
|
||||
pub struct ImageSettings {
|
||||
pub:
|
||||
width int = sim.default_width
|
||||
height int = sim.default_height
|
||||
cache_size int = 200
|
||||
}
|
||||
|
||||
pub fn new_image_settings(settings ImageSettings) ImageSettings {
|
||||
return ImageSettings{
|
||||
...settings
|
||||
}
|
||||
}
|
||||
|
||||
pub fn image_settings_from_grid(grid sim.GridSettings) ImageSettings {
|
||||
return ImageSettings{
|
||||
width: grid.width
|
||||
height: grid.height
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (s ImageSettings) to_grid_settings() sim.GridSettings {
|
||||
return sim.GridSettings{
|
||||
width: s.width
|
||||
height: s.height
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PPMWriter {
|
||||
mut:
|
||||
file os.File
|
||||
cache []byte
|
||||
cache_size int
|
||||
}
|
||||
|
||||
pub fn ppm_writer_for_fname(fname string, settings ImageSettings) ?&PPMWriter {
|
||||
mut writer := &PPMWriter{
|
||||
cache_size: settings.cache_size
|
||||
cache: []byte{cap: settings.cache_size}
|
||||
}
|
||||
writer.start_for_file(fname, settings) ?
|
||||
return writer
|
||||
}
|
||||
|
||||
pub fn (mut writer PPMWriter) start_for_file(fname string, settings ImageSettings) ? {
|
||||
writer.file = os.create(fname) ?
|
||||
writer.file.writeln('P6 $settings.width $settings.height 255') ?
|
||||
}
|
||||
|
||||
pub fn (mut writer PPMWriter) handle_pixel(p gx.Color) ? {
|
||||
if writer.cache.len >= writer.cache_size {
|
||||
writer.write() ?
|
||||
writer.flush() ?
|
||||
}
|
||||
writer.cache << [p.r, p.g, p.b]
|
||||
}
|
||||
|
||||
pub fn (mut writer PPMWriter) flush() ? {
|
||||
writer.cache.clear()
|
||||
}
|
||||
|
||||
pub fn (mut writer PPMWriter) write() ? {
|
||||
writer.file.write(writer.cache) ?
|
||||
}
|
||||
|
||||
pub fn (mut writer PPMWriter) close() {
|
||||
writer.file.close()
|
||||
}
|
||||
40
examples/pendulum-simulation/modules/sim/img/worker.v
Normal file
40
examples/pendulum-simulation/modules/sim/img/worker.v
Normal file
@@ -0,0 +1,40 @@
|
||||
module img
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
|
||||
pub fn image_worker(mut writer PPMWriter, result_chan chan &sim.SimResult, settings ImageSettings) {
|
||||
width := settings.width
|
||||
height := settings.height
|
||||
total_pixels := width * height
|
||||
|
||||
// as new pixels come in, write them to the image file
|
||||
mut current_index := u64(0)
|
||||
mut pixel_buf := []ValidColor{len: total_pixels, init: ValidColor{
|
||||
valid: false
|
||||
}}
|
||||
|
||||
mut bmark := benchmark.new_benchmark()
|
||||
for {
|
||||
result := <-result_chan or { break }
|
||||
|
||||
// find the closest magnet
|
||||
pixel_buf[result.id].Color = compute_pixel(result)
|
||||
pixel_buf[result.id].valid = true
|
||||
|
||||
for current_index < total_pixels && pixel_buf[current_index].valid {
|
||||
bmark.step()
|
||||
writer.handle_pixel(pixel_buf[current_index].Color) or {
|
||||
bmark.fail()
|
||||
sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error $err')
|
||||
break
|
||||
}
|
||||
bmark.ok()
|
||||
current_index++
|
||||
}
|
||||
}
|
||||
bmark.stop()
|
||||
println(bmark.total_message(@FN))
|
||||
|
||||
writer.write() or { panic('Could not write image') }
|
||||
}
|
||||
68
examples/pendulum-simulation/modules/sim/img/writer.v
Normal file
68
examples/pendulum-simulation/modules/sim/img/writer.v
Normal file
@@ -0,0 +1,68 @@
|
||||
module img
|
||||
|
||||
import gx
|
||||
import sim
|
||||
|
||||
pub struct ValidColor {
|
||||
gx.Color
|
||||
pub mut:
|
||||
valid bool
|
||||
}
|
||||
|
||||
pub struct ImageWritter {
|
||||
settings ImageSettings
|
||||
pub mut:
|
||||
writer PPMWriter
|
||||
current_index int
|
||||
buffer []ValidColor
|
||||
}
|
||||
|
||||
pub fn new_image_writer(mut writer PPMWriter, settings ImageSettings) &ImageWritter {
|
||||
total_pixels := settings.width * settings.height
|
||||
mut buffer := []ValidColor{len: total_pixels, init: ValidColor{
|
||||
valid: false
|
||||
}}
|
||||
return &ImageWritter{
|
||||
writer: writer
|
||||
settings: settings
|
||||
buffer: buffer
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut iw ImageWritter) handle(result sim.SimResult) ?int {
|
||||
total_pixels := iw.settings.width * iw.settings.height
|
||||
|
||||
// find the closest magnet
|
||||
iw.buffer[result.id].Color = compute_pixel(result)
|
||||
iw.buffer[result.id].valid = true
|
||||
|
||||
for iw.current_index < total_pixels && iw.buffer[iw.current_index].valid {
|
||||
iw.writer.handle_pixel(iw.buffer[iw.current_index].Color) or {
|
||||
sim.log(@MOD + '.' + @FN + ': pixel handler failed. Error $err')
|
||||
break
|
||||
}
|
||||
iw.current_index++
|
||||
}
|
||||
|
||||
if iw.current_index == total_pixels {
|
||||
iw.writer.write() or { panic('Could not write image') }
|
||||
return none
|
||||
}
|
||||
|
||||
return iw.current_index
|
||||
}
|
||||
|
||||
pub fn compute_pixel(result sim.SimResult) gx.Color {
|
||||
closest_to_m1 := result.magnet1_distance < result.magnet2_distance
|
||||
&& result.magnet1_distance < result.magnet3_distance
|
||||
closest_to_m2 := result.magnet2_distance < result.magnet1_distance
|
||||
&& result.magnet2_distance < result.magnet3_distance
|
||||
|
||||
if closest_to_m1 {
|
||||
return gx.red
|
||||
} else if closest_to_m2 {
|
||||
return gx.green
|
||||
} else {
|
||||
return gx.blue
|
||||
}
|
||||
}
|
||||
9
examples/pendulum-simulation/modules/sim/log.v
Normal file
9
examples/pendulum-simulation/modules/sim/log.v
Normal file
@@ -0,0 +1,9 @@
|
||||
module sim
|
||||
|
||||
// log is a helper function to print debug info
|
||||
[inline]
|
||||
pub fn log(info string) {
|
||||
$if verbose ? {
|
||||
println(info)
|
||||
}
|
||||
}
|
||||
96
examples/pendulum-simulation/modules/sim/params.v
Normal file
96
examples/pendulum-simulation/modules/sim/params.v
Normal file
@@ -0,0 +1,96 @@
|
||||
module sim
|
||||
|
||||
import math
|
||||
|
||||
pub const (
|
||||
default_rope_length = 0.25
|
||||
default_bearing_mass = 0.03
|
||||
default_magnet_spacing = 0.05
|
||||
default_magnet_height = 0.03
|
||||
default_magnet_strength = 10.0
|
||||
default_gravity = 4.9
|
||||
)
|
||||
|
||||
[params]
|
||||
pub struct SimParams {
|
||||
rope_length f64 = sim.default_rope_length
|
||||
bearing_mass f64 = sim.default_bearing_mass
|
||||
magnet_spacing f64 = sim.default_magnet_spacing
|
||||
magnet_height f64 = sim.default_magnet_height
|
||||
magnet_strength f64 = sim.default_magnet_strength
|
||||
gravity f64 = sim.default_gravity
|
||||
}
|
||||
|
||||
pub fn sim_params(params SimParams) SimParams {
|
||||
return SimParams{
|
||||
...params
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_rope_vector(state SimState) Vector3D {
|
||||
rope_origin := vector(z: params.rope_length)
|
||||
|
||||
return state.position + rope_origin.scale(-1)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_forces_sum(state SimState) Vector3D {
|
||||
// force due to gravity
|
||||
f_gravity := params.get_grav_force(state)
|
||||
|
||||
// force due to magnets
|
||||
f_magnet1 := params.get_magnet1_force(state)
|
||||
f_magnet2 := params.get_magnet2_force(state)
|
||||
f_magnet3 := params.get_magnet3_force(state)
|
||||
|
||||
mut f_passive := vector(x: 0.0, y: 0.0, z: 0.0)
|
||||
for force in [f_gravity, f_magnet1, f_magnet2, f_magnet3] {
|
||||
f_passive = f_passive + force
|
||||
}
|
||||
|
||||
// force due to tension of the rope
|
||||
f_tension := params.get_tension_force(state, f_passive)
|
||||
|
||||
return f_passive + f_tension
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_grav_force(state SimState) Vector3D {
|
||||
return vector(z: -params.bearing_mass * params.gravity)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet_position(theta f64) Vector3D {
|
||||
return vector(
|
||||
x: math.cos(theta) * params.magnet_spacing
|
||||
y: math.sin(theta) * params.magnet_spacing
|
||||
z: -params.magnet_height
|
||||
)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet_force(theta f64, state SimState) Vector3D {
|
||||
magnet_position := params.get_magnet_position(theta)
|
||||
mut diff := magnet_position + state.position.scale(-1)
|
||||
distance_squared := diff.norm_squared()
|
||||
diff = diff.scale(1.0 / math.sqrt(distance_squared))
|
||||
return diff.scale(params.magnet_strength / distance_squared)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet_dist(theta f64, state SimState) f64 {
|
||||
return (params.get_magnet_position(theta) + state.position.scale(-1)).norm()
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet1_force(state SimState) Vector3D {
|
||||
return params.get_magnet_force(0.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet2_force(state SimState) Vector3D {
|
||||
return params.get_magnet_force(2.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_magnet3_force(state SimState) Vector3D {
|
||||
return params.get_magnet_force(4.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
pub fn (params SimParams) get_tension_force(state SimState, f_passive Vector3D) Vector3D {
|
||||
rope_vector := params.get_rope_vector(state)
|
||||
rope_vector_norm := rope_vector.scale(1.0 / rope_vector.norm())
|
||||
return rope_vector_norm.scale(-1.0 * (rope_vector_norm * f_passive))
|
||||
}
|
||||
125
examples/pendulum-simulation/modules/sim/params_test.v
Normal file
125
examples/pendulum-simulation/modules/sim/params_test.v
Normal file
@@ -0,0 +1,125 @@
|
||||
module sim
|
||||
|
||||
import math
|
||||
|
||||
const (
|
||||
params_test_mock_params = SimParams{
|
||||
rope_length: 0.25
|
||||
bearing_mass: 0.03
|
||||
magnet_spacing: 0.05
|
||||
magnet_height: 0.03
|
||||
magnet_strength: 10
|
||||
gravity: 4.9
|
||||
}
|
||||
params_test_mock_state = SimState{
|
||||
position: vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
velocity: vector(
|
||||
x: -7.251158929833104
|
||||
y: -12.559375680227724
|
||||
z: -105.91539687686381
|
||||
)
|
||||
accel: vector(
|
||||
x: -8.337034766251843e-11
|
||||
y: -2.842170943040401e-10
|
||||
z: 1.2126596023639044e-10
|
||||
)
|
||||
}
|
||||
params_test_mock_tetha = 2.0 * math.pi / 3.0
|
||||
)
|
||||
|
||||
pub fn test_get_rope_vector() {
|
||||
result := sim.params_test_mock_params.get_rope_vector(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: -0.24768893652467275
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_forces_sum() {
|
||||
result := sim.params_test_mock_params.get_forces_sum(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: 3.637978807091713e-12
|
||||
y: 5.229594535194337e-12
|
||||
z: 9.094947017729282e-13
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_grav_force() {
|
||||
result := sim.params_test_mock_params.get_grav_force(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
z: -0.147
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet_position() {
|
||||
result := sim.params_test_mock_params.get_magnet_position(sim.params_test_mock_tetha)
|
||||
expected := vector(
|
||||
x: -0.024999999999999988
|
||||
y: 0.043301270189221946
|
||||
z: -0.03
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet_force() {
|
||||
result := sim.params_test_mock_params.get_magnet_force(sim.params_test_mock_tetha,
|
||||
sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: -157.45722976925555
|
||||
y: 1422.736432604726
|
||||
z: -632.5695169850264
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet_dist() {
|
||||
result := sim.params_test_mock_params.get_magnet_dist(sim.params_test_mock_tetha,
|
||||
sim.params_test_mock_state)
|
||||
expected := 0.07993696666249227
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet1_force() {
|
||||
result := sim.params_test_mock_params.get_magnet1_force(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: 1310.8545084099674
|
||||
y: 575.0062553126633
|
||||
z: -632.5695169850262
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet2_force() {
|
||||
result := sim.params_test_mock_params.get_magnet2_force(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: -157.45722976925555
|
||||
y: 1422.736432604726
|
||||
z: -632.5695169850264
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_magnet3_force() {
|
||||
result := sim.params_test_mock_params.get_magnet3_force(sim.params_test_mock_state)
|
||||
expected := vector(
|
||||
x: -1710.46541088048
|
||||
y: -2962.612996234165
|
||||
z: -6871.632889552589
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
pub fn test_get_tension_force() {
|
||||
result := sim.params_test_mock_params.get_tension_force(sim.params_test_mock_state,
|
||||
vector(x: 0.0, y: 0.0, z: 0.0))
|
||||
expected := vector(x: 0.0, y: 0.0, z: 0.0)
|
||||
assert result == expected
|
||||
}
|
||||
96
examples/pendulum-simulation/modules/sim/runner.v
Normal file
96
examples/pendulum-simulation/modules/sim/runner.v
Normal file
@@ -0,0 +1,96 @@
|
||||
module sim
|
||||
|
||||
import benchmark
|
||||
import term
|
||||
|
||||
pub type SimRequestHandler = fn (request &SimRequest) ?
|
||||
|
||||
pub type SimStartHandler = fn () ?
|
||||
|
||||
pub type SimFinishHandler = fn () ?
|
||||
|
||||
pub const (
|
||||
default_width = 600
|
||||
default_height = 600
|
||||
)
|
||||
|
||||
[params]
|
||||
pub struct GridSettings {
|
||||
pub:
|
||||
width int = sim.default_width
|
||||
height int = sim.default_height
|
||||
}
|
||||
|
||||
pub fn new_grid_settings(settings GridSettings) GridSettings {
|
||||
return GridSettings{
|
||||
...settings
|
||||
}
|
||||
}
|
||||
|
||||
[params]
|
||||
pub struct RunnerSettings {
|
||||
pub:
|
||||
grid GridSettings
|
||||
on_request SimRequestHandler
|
||||
on_start SimStartHandler
|
||||
on_finish SimFinishHandler
|
||||
}
|
||||
|
||||
pub fn run(params SimParams, settings RunnerSettings) {
|
||||
height := settings.grid.height
|
||||
width := settings.grid.width
|
||||
|
||||
if !isnil(settings.on_start) {
|
||||
settings.on_start() or {
|
||||
log(@MOD + '.' + @FN + ': Simulation start handler failed. Error $err')
|
||||
}
|
||||
}
|
||||
|
||||
mut index := 0
|
||||
log('')
|
||||
|
||||
mut bmark := benchmark.new_benchmark()
|
||||
for y in 0 .. height {
|
||||
$if verbose ? {
|
||||
term.clear_previous_line()
|
||||
}
|
||||
log(@MOD + '.' + @FN + ': y: ${y + 1}')
|
||||
for x in 0 .. width {
|
||||
bmark.step()
|
||||
// setup state conditions
|
||||
position := vector(
|
||||
x: 0.1 * ((f64(x) - 0.5 * f64(width - 1)) / f64(width - 1))
|
||||
y: 0.1 * ((f64(y) - 0.5 * f64(height - 1)) / f64(height - 1))
|
||||
z: 0.0
|
||||
)
|
||||
velocity := vector(x: 0, y: 0, z: 0)
|
||||
|
||||
mut state := new_state(
|
||||
position: position
|
||||
velocity: velocity
|
||||
)
|
||||
|
||||
state.satisfy_rope_constraint(params)
|
||||
request := &SimRequest{
|
||||
id: index
|
||||
state: state
|
||||
params: params
|
||||
}
|
||||
settings.on_request(request) or {
|
||||
log(@MOD + '.' + @FN + ': request handler failed. Error $err')
|
||||
bmark.fail()
|
||||
break
|
||||
}
|
||||
index++
|
||||
bmark.ok()
|
||||
}
|
||||
}
|
||||
bmark.stop()
|
||||
println(bmark.total_message(@FN))
|
||||
|
||||
if !isnil(settings.on_finish) {
|
||||
settings.on_finish() or {
|
||||
log(@MOD + '.' + @FN + ': Simulation stop handler failed. Error $err')
|
||||
}
|
||||
}
|
||||
}
|
||||
47
examples/pendulum-simulation/modules/sim/sim.v
Normal file
47
examples/pendulum-simulation/modules/sim/sim.v
Normal file
@@ -0,0 +1,47 @@
|
||||
module sim
|
||||
|
||||
pub struct SimState {
|
||||
mut:
|
||||
position Vector3D
|
||||
velocity Vector3D
|
||||
accel Vector3D
|
||||
}
|
||||
|
||||
pub fn new_state(state SimState) SimState {
|
||||
return SimState{
|
||||
...state
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut state SimState) satisfy_rope_constraint(params SimParams) {
|
||||
mut rope_vector := params.get_rope_vector(state)
|
||||
rope_vector = rope_vector.scale(params.rope_length / rope_vector.norm())
|
||||
state.position = vector(z: params.rope_length) + rope_vector
|
||||
}
|
||||
|
||||
pub fn (mut state SimState) increment(delta_t f64, params SimParams) {
|
||||
// 1. add up all forces
|
||||
// 2. get an accelleration
|
||||
// 3. add to velocity
|
||||
// 4. ensure rope constraint is satisfied
|
||||
|
||||
// sum up all forces
|
||||
forces_sum := params.get_forces_sum(state)
|
||||
|
||||
// get the acceleration
|
||||
accel := forces_sum.scale(1.0 / params.bearing_mass)
|
||||
state.accel = accel
|
||||
|
||||
// update the velocity
|
||||
state.velocity = state.velocity + accel.scale(delta_t)
|
||||
|
||||
// update the position
|
||||
state.position = state.position + state.velocity.scale(delta_t)
|
||||
|
||||
// ensure the position satisfies rope constraint
|
||||
state.satisfy_rope_constraint(params)
|
||||
}
|
||||
|
||||
pub fn (state SimState) done() bool {
|
||||
return state.velocity.norm() < 0.05 && state.accel.norm() < 0.01
|
||||
}
|
||||
64
examples/pendulum-simulation/modules/sim/sim_test.v
Normal file
64
examples/pendulum-simulation/modules/sim/sim_test.v
Normal file
@@ -0,0 +1,64 @@
|
||||
module sim
|
||||
|
||||
const (
|
||||
sim_test_mock_params = SimParams{
|
||||
rope_length: 0.25
|
||||
bearing_mass: 0.03
|
||||
magnet_spacing: 0.05
|
||||
magnet_height: 0.03
|
||||
magnet_strength: 10
|
||||
gravity: 4.9
|
||||
}
|
||||
sim_test_mock_state = SimState{
|
||||
position: vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
velocity: vector(
|
||||
x: -7.251158929833104
|
||||
y: -12.559375680227724
|
||||
z: -105.91539687686381
|
||||
)
|
||||
accel: vector(
|
||||
x: -8.337034766251843e-11
|
||||
y: -2.842170943040401e-10
|
||||
z: 1.2126596023639044e-10
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
pub fn test_satisfy_rope_constraint() {
|
||||
mut state := SimState{
|
||||
...sim.sim_test_mock_state
|
||||
}
|
||||
|
||||
state.satisfy_rope_constraint(sim.sim_test_mock_params)
|
||||
assert state.position.x == -0.016957230930171364
|
||||
assert state.position.y == -0.02937078552673521
|
||||
assert state.position.z == 0.002311063475327252
|
||||
assert state.velocity.x == -7.251158929833104
|
||||
assert state.velocity.y == -12.559375680227724
|
||||
assert state.velocity.z == -105.91539687686381
|
||||
assert state.accel.x == -8.337034766251843e-11
|
||||
assert state.accel.y == -2.842170943040401e-10
|
||||
assert state.accel.z == 1.2126596023639044e-10
|
||||
}
|
||||
|
||||
pub fn test_increment() {
|
||||
mut state := SimState{
|
||||
...sim.sim_test_mock_state
|
||||
}
|
||||
|
||||
delta_t := 0.0005
|
||||
state.increment(delta_t, sim.sim_test_mock_params)
|
||||
assert state.position.x == -0.016957230930171364
|
||||
assert state.position.y == -0.02937078552673524
|
||||
assert state.position.z == 0.0023110634753272796
|
||||
assert state.velocity.x == -7.251158929833044
|
||||
assert state.velocity.y == -12.559375680227637
|
||||
assert state.velocity.z == -105.9153968768638
|
||||
assert state.accel.x == 1.2126596023639044e-10
|
||||
assert state.accel.y == 1.7431981783981126e-10
|
||||
assert state.accel.z == 3.031649005909761e-11
|
||||
}
|
||||
50
examples/pendulum-simulation/modules/sim/vec.v
Normal file
50
examples/pendulum-simulation/modules/sim/vec.v
Normal file
@@ -0,0 +1,50 @@
|
||||
module sim
|
||||
|
||||
import math
|
||||
|
||||
// Vector3D is a 3D vector
|
||||
pub struct Vector3D {
|
||||
x f64
|
||||
y f64
|
||||
z f64
|
||||
}
|
||||
|
||||
// vector creates a Vector3D passing x,y,z as parameteres
|
||||
pub fn vector(data Vector3D) Vector3D {
|
||||
return Vector3D{
|
||||
...data
|
||||
}
|
||||
}
|
||||
|
||||
// addition
|
||||
pub fn (v Vector3D) + (v2 Vector3D) Vector3D {
|
||||
return Vector3D{
|
||||
x: v.x + v2.x
|
||||
y: v.y + v2.y
|
||||
z: v.z + v2.z
|
||||
}
|
||||
}
|
||||
|
||||
// dot product
|
||||
pub fn (v Vector3D) * (v2 Vector3D) f64 {
|
||||
return (v.x * v2.x) + (v.y * v2.y) + (v.z * v2.z)
|
||||
}
|
||||
|
||||
// scale gets a scaled vector
|
||||
pub fn (v Vector3D) scale(scalar f64) Vector3D {
|
||||
return Vector3D{
|
||||
x: v.x * scalar
|
||||
y: v.y * scalar
|
||||
z: v.z * scalar
|
||||
}
|
||||
}
|
||||
|
||||
// norm_squared returns the square of the norm of the vector
|
||||
pub fn (v Vector3D) norm_squared() f64 {
|
||||
return v * v
|
||||
}
|
||||
|
||||
// norm returns the norm of the vector
|
||||
pub fn (v Vector3D) norm() f64 {
|
||||
return math.sqrt(v.norm_squared())
|
||||
}
|
||||
64
examples/pendulum-simulation/modules/sim/vec_test.v
Normal file
64
examples/pendulum-simulation/modules/sim/vec_test.v
Normal file
@@ -0,0 +1,64 @@
|
||||
module sim
|
||||
|
||||
fn test_add() {
|
||||
v := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
result := v + v
|
||||
expected := vector(
|
||||
x: -0.03391446186034273
|
||||
y: -0.05874157105347042
|
||||
z: 0.004622126950654504
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
fn test_dot() {
|
||||
v := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
result := v * v
|
||||
expected := 0.0011555317376636305
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
fn test_scale() {
|
||||
v := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
result := v.scale(2.0)
|
||||
expected := vector(
|
||||
x: -0.03391446186034273
|
||||
y: -0.05874157105347042
|
||||
z: 0.004622126950654504
|
||||
)
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
fn test_norm_squared() {
|
||||
v := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
result := v.norm_squared()
|
||||
expected := 0.0011555317376636305
|
||||
assert result == expected
|
||||
}
|
||||
|
||||
fn test_norm() {
|
||||
v := vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
result := v.norm()
|
||||
expected := 0.033993113091678295
|
||||
assert result == expected
|
||||
}
|
||||
67
examples/pendulum-simulation/modules/sim/worker.v
Normal file
67
examples/pendulum-simulation/modules/sim/worker.v
Normal file
@@ -0,0 +1,67 @@
|
||||
module sim
|
||||
|
||||
import math
|
||||
import benchmark
|
||||
|
||||
const (
|
||||
max_iterations = 1000
|
||||
simulation_delta_t = 0.0005
|
||||
)
|
||||
|
||||
pub struct SimRequest {
|
||||
params SimParams
|
||||
state SimState
|
||||
pub:
|
||||
id int
|
||||
}
|
||||
|
||||
pub struct SimResult {
|
||||
state SimState
|
||||
pub:
|
||||
id int
|
||||
magnet1_distance f64
|
||||
magnet2_distance f64
|
||||
magnet3_distance f64
|
||||
}
|
||||
|
||||
pub fn sim_worker(id int, request_chan chan &SimRequest, result_channels []chan &SimResult) {
|
||||
mut bmark := benchmark.new_benchmark()
|
||||
for {
|
||||
request := <-request_chan or { break }
|
||||
bmark.step()
|
||||
result := compute_result(request)
|
||||
for ch in result_channels {
|
||||
ch <- result
|
||||
}
|
||||
bmark.ok()
|
||||
}
|
||||
bmark.stop()
|
||||
println(bmark.total_message(@FN + ': worker $id'))
|
||||
}
|
||||
|
||||
pub fn compute_result(request SimRequest) &SimResult {
|
||||
mut state := request.state
|
||||
params := request.params
|
||||
|
||||
for _ in 0 .. sim.max_iterations {
|
||||
state.increment(sim.simulation_delta_t, params)
|
||||
if state.done() {
|
||||
println('done!')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
m1_dist := params.get_magnet_dist(0, state)
|
||||
m2_dist := params.get_magnet_dist(2.0 * math.pi / 3.0, state)
|
||||
m3_dist := params.get_magnet_dist(4.0 * math.pi / 3.0, state)
|
||||
|
||||
id := request.id
|
||||
|
||||
return &SimResult{
|
||||
id: id
|
||||
state: state
|
||||
magnet1_distance: m1_dist
|
||||
magnet2_distance: m2_dist
|
||||
magnet3_distance: m3_dist
|
||||
}
|
||||
}
|
||||
63
examples/pendulum-simulation/modules/sim/worker_test.v
Normal file
63
examples/pendulum-simulation/modules/sim/worker_test.v
Normal file
@@ -0,0 +1,63 @@
|
||||
module sim
|
||||
|
||||
const (
|
||||
worker_test_mock_params = SimParams{
|
||||
rope_length: 0.25
|
||||
bearing_mass: 0.03
|
||||
magnet_spacing: 0.05
|
||||
magnet_height: 0.03
|
||||
magnet_strength: 10
|
||||
gravity: 4.9
|
||||
}
|
||||
worker_test_mock_state = SimState{
|
||||
position: vector(
|
||||
x: -0.016957230930171364
|
||||
y: -0.02937078552673521
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
velocity: vector(
|
||||
x: -7.251158929833104
|
||||
y: -12.559375680227724
|
||||
z: -105.91539687686381
|
||||
)
|
||||
accel: vector(
|
||||
x: -8.337034766251843e-11
|
||||
y: -2.842170943040401e-10
|
||||
z: 1.2126596023639044e-10
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
fn test_compute_result() {
|
||||
request := SimRequest{
|
||||
id: 0
|
||||
params: sim.worker_test_mock_params
|
||||
state: sim.worker_test_mock_state
|
||||
}
|
||||
expected_state := SimState{
|
||||
position: vector(
|
||||
x: -0.01695723093017133
|
||||
y: -0.02937078552673517
|
||||
z: 0.002311063475327252
|
||||
)
|
||||
velocity: vector(
|
||||
x: -7.251158929832518
|
||||
y: -12.559375680226692
|
||||
z: -105.91539687685668
|
||||
)
|
||||
accel: vector(
|
||||
x: -3.789561257387201e-12
|
||||
y: 3.410605131648481e-11
|
||||
z: 3.031649005909761e-11
|
||||
)
|
||||
}
|
||||
expected := &SimResult{
|
||||
state: expected_state
|
||||
id: 0
|
||||
magnet1_distance: 0.07993696666249224
|
||||
magnet2_distance: 0.07993696666249223
|
||||
magnet3_distance: 0.03609361938278009
|
||||
}
|
||||
result := compute_result(request)
|
||||
assert result == expected
|
||||
}
|
||||
84
examples/pendulum-simulation/parallel.v
Normal file
84
examples/pendulum-simulation/parallel.v
Normal file
@@ -0,0 +1,84 @@
|
||||
module main
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.args as simargs
|
||||
import sim.img
|
||||
|
||||
fn main() {
|
||||
args := simargs.parse_args() ? as simargs.ParallelArgs
|
||||
|
||||
img_settings := img.image_settings_from_grid(args.grid)
|
||||
|
||||
width := img_settings.width
|
||||
height := img_settings.height
|
||||
total_pixels := width * height
|
||||
|
||||
request_chan := chan &sim.SimRequest{cap: args.workers}
|
||||
result_chan := chan &sim.SimResult{cap: args.workers}
|
||||
|
||||
mut writer := img.ppm_writer_for_fname(args.filename, img_settings) ?
|
||||
mut image_writer := img.new_image_writer(mut writer, img_settings)
|
||||
|
||||
mut workers := []thread{cap: args.workers}
|
||||
mut bmark := benchmark.start()
|
||||
|
||||
defer {
|
||||
request_chan.close()
|
||||
sim.log('Waiting for workers to finish')
|
||||
workers.wait()
|
||||
result_chan.close()
|
||||
bmark.measure(@FN)
|
||||
sim.log('Closing writer file')
|
||||
writer.close()
|
||||
sim.log('Done!')
|
||||
}
|
||||
|
||||
for id in 0 .. args.workers {
|
||||
workers << go sim.sim_worker(id, request_chan, [result_chan])
|
||||
}
|
||||
|
||||
mut x := 0
|
||||
mut y := 0
|
||||
mut request_index := 0
|
||||
|
||||
for {
|
||||
// setup state conditions
|
||||
position := sim.vector(
|
||||
x: 0.1 * ((f64(x) - 0.5 * f64(width - 1)) / f64(width - 1))
|
||||
y: 0.1 * ((f64(y) - 0.5 * f64(height - 1)) / f64(height - 1))
|
||||
z: 0.0
|
||||
)
|
||||
velocity := sim.vector(x: 0, y: 0, z: 0)
|
||||
|
||||
mut state := sim.new_state(
|
||||
position: position
|
||||
velocity: velocity
|
||||
)
|
||||
|
||||
state.satisfy_rope_constraint(args.params)
|
||||
request := &sim.SimRequest{
|
||||
id: request_index
|
||||
state: state
|
||||
params: args.params
|
||||
}
|
||||
select {
|
||||
result := <-result_chan {
|
||||
image_writer.handle(result) or { break }
|
||||
}
|
||||
else {
|
||||
if request.id == total_pixels {
|
||||
continue
|
||||
}
|
||||
request_chan <- request
|
||||
x++
|
||||
if x == width {
|
||||
x = 0
|
||||
y++
|
||||
sim.log('y: ${y + 1}')
|
||||
}
|
||||
request_index++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
50
examples/pendulum-simulation/parallel_with_iw.v
Normal file
50
examples/pendulum-simulation/parallel_with_iw.v
Normal file
@@ -0,0 +1,50 @@
|
||||
module main
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.args as simargs
|
||||
import sim.img
|
||||
|
||||
fn main() {
|
||||
args := simargs.parse_args(extra_workers: 1) ? as simargs.ParallelArgs
|
||||
|
||||
img_settings := img.image_settings_from_grid(args.grid)
|
||||
|
||||
request_chan := chan &sim.SimRequest{cap: args.workers}
|
||||
result_chan := chan &sim.SimResult{cap: args.workers}
|
||||
|
||||
mut writer := img.ppm_writer_for_fname(args.filename, img_settings) ?
|
||||
|
||||
mut workers := []thread{cap: args.workers + 1}
|
||||
mut bmark := benchmark.start()
|
||||
|
||||
defer {
|
||||
image_worker := workers.pop()
|
||||
request_chan.close()
|
||||
sim.log('Waiting for workers to finish')
|
||||
workers.wait()
|
||||
result_chan.close()
|
||||
sim.log('Waiting for image writer to finish')
|
||||
image_worker.wait()
|
||||
sim.log('Workers finished!')
|
||||
bmark.measure(@FN)
|
||||
sim.log('Closing writer file')
|
||||
writer.close()
|
||||
sim.log('Done!')
|
||||
}
|
||||
|
||||
for id in 0 .. args.workers {
|
||||
workers << go sim.sim_worker(id, request_chan, [result_chan])
|
||||
}
|
||||
|
||||
workers << go img.image_worker(mut writer, result_chan, img_settings)
|
||||
|
||||
handle_request := fn [request_chan] (request &sim.SimRequest) ? {
|
||||
request_chan <- request
|
||||
}
|
||||
|
||||
sim.run(args.params,
|
||||
grid: args.grid
|
||||
on_request: sim.SimRequestHandler(handle_request)
|
||||
)
|
||||
}
|
||||
30
examples/pendulum-simulation/sequential.v
Normal file
30
examples/pendulum-simulation/sequential.v
Normal file
@@ -0,0 +1,30 @@
|
||||
module main
|
||||
|
||||
import benchmark
|
||||
import sim
|
||||
import sim.args as simargs
|
||||
import sim.img
|
||||
|
||||
fn main() {
|
||||
args := simargs.parse_args(sequential: true) ? as simargs.SequentialArgs
|
||||
|
||||
mut bmark := benchmark.start()
|
||||
defer {
|
||||
bmark.measure(@FN)
|
||||
}
|
||||
|
||||
mut writer := img.ppm_writer_for_fname(args.filename, img.image_settings_from_grid(args.grid)) ?
|
||||
defer {
|
||||
writer.close()
|
||||
}
|
||||
|
||||
handle_request := fn [mut writer] (request &sim.SimRequest) ? {
|
||||
result := sim.compute_result(request)
|
||||
pixel := img.compute_pixel(result)
|
||||
return writer.handle_pixel(pixel)
|
||||
}
|
||||
|
||||
sim.run(args.params, grid: args.grid, on_request: sim.SimRequestHandler(handle_request))
|
||||
|
||||
writer.write() ?
|
||||
}
|
||||
@@ -1,366 +0,0 @@
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
// sim.v * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
// created by: jordan bonecutter * * * * * * * * * * * * * * * * * * *
|
||||
// jpbonecutter@gmail.com * * * * * * * * * * * * * * * * * * * * * *
|
||||
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
||||
//
|
||||
// I wrote the pendulum simulator to learn V, I think it could be a
|
||||
// good addition to the examples directory.
|
||||
// Essentially, the pendulum sim runs a simulation of a pendulum with
|
||||
// a metallic tip swinging over three magnets.
|
||||
// I run this simulation with the initial position at each pixel in an
|
||||
// image and color the pixel according to the magnet over which it
|
||||
// finally rests.
|
||||
// I used some fun features in V like coroutines, channels,
|
||||
// struct embedding, mutability, methods, and the like.
|
||||
import math
|
||||
import os
|
||||
import term
|
||||
import runtime
|
||||
|
||||
// customisable through setting VJOBS
|
||||
const parallel_workers = runtime.nr_jobs()
|
||||
|
||||
const width = 800
|
||||
|
||||
const height = 600
|
||||
|
||||
struct Vec3D {
|
||||
x f64
|
||||
y f64
|
||||
z f64
|
||||
}
|
||||
|
||||
fn (v Vec3D) add(v2 Vec3D) Vec3D {
|
||||
return Vec3D{
|
||||
x: v.x + v2.x
|
||||
y: v.y + v2.y
|
||||
z: v.z + v2.z
|
||||
}
|
||||
}
|
||||
|
||||
fn (v Vec3D) dot(v2 Vec3D) f64 {
|
||||
return (v.x * v2.x) + (v.y * v2.y) + (v.z * v2.z)
|
||||
}
|
||||
|
||||
fn (v Vec3D) scale(scalar f64) Vec3D {
|
||||
return Vec3D{
|
||||
x: v.x * scalar
|
||||
y: v.y * scalar
|
||||
z: v.z * scalar
|
||||
}
|
||||
}
|
||||
|
||||
fn (v Vec3D) norm_squared() f64 {
|
||||
return v.dot(v)
|
||||
}
|
||||
|
||||
fn (v Vec3D) norm() f64 {
|
||||
return math.sqrt(v.norm_squared())
|
||||
}
|
||||
|
||||
struct SimState {
|
||||
mut:
|
||||
position Vec3D
|
||||
velocity Vec3D
|
||||
accel Vec3D
|
||||
}
|
||||
|
||||
// magnets lie at [
|
||||
// math.cos(index * 2 * math.pi / 3) * magnet_spacing
|
||||
// math.sin(index * 2 * math.pi / 3) * magnet_spacing
|
||||
// -magnet_height
|
||||
// ]
|
||||
struct SimParams {
|
||||
rope_length f64
|
||||
bearing_mass f64
|
||||
magnet_spacing f64
|
||||
magnet_height f64
|
||||
magnet_strength f64
|
||||
gravity f64
|
||||
}
|
||||
|
||||
fn (params SimParams) get_rope_vector(state SimState) Vec3D {
|
||||
rope_origin := Vec3D{
|
||||
x: 0
|
||||
y: 0
|
||||
z: params.rope_length
|
||||
}
|
||||
|
||||
return state.position.add(rope_origin.scale(-1))
|
||||
}
|
||||
|
||||
fn (mut state SimState) satisfy_rope_constraint(params SimParams) {
|
||||
mut rope_vector := params.get_rope_vector(state)
|
||||
rope_vector = rope_vector.scale(params.rope_length / rope_vector.norm())
|
||||
state.position = Vec3D{
|
||||
x: 0
|
||||
y: 0
|
||||
z: params.rope_length
|
||||
}.add(rope_vector)
|
||||
}
|
||||
|
||||
fn (params SimParams) get_grav_force(state SimState) Vec3D {
|
||||
return Vec3D{
|
||||
x: 0
|
||||
y: 0
|
||||
z: -params.bearing_mass * params.gravity
|
||||
}
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet_position(theta f64) Vec3D {
|
||||
return Vec3D{
|
||||
x: math.cos(theta) * params.magnet_spacing
|
||||
y: math.sin(theta) * params.magnet_spacing
|
||||
z: -params.magnet_height
|
||||
}
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet_force(theta f64, state SimState) Vec3D {
|
||||
magnet_position := params.get_magnet_position(theta)
|
||||
mut diff := magnet_position.add(state.position.scale(-1))
|
||||
distance_squared := diff.norm_squared()
|
||||
diff = diff.scale(1.0 / math.sqrt(distance_squared))
|
||||
return diff.scale(params.magnet_strength / distance_squared)
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet_dist(theta f64, state SimState) f64 {
|
||||
return params.get_magnet_position(theta).add(state.position.scale(-1)).norm()
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet1_force(state SimState) Vec3D {
|
||||
return params.get_magnet_force(0.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet2_force(state SimState) Vec3D {
|
||||
return params.get_magnet_force(2.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
fn (params SimParams) get_magnet3_force(state SimState) Vec3D {
|
||||
return params.get_magnet_force(4.0 * math.pi / 3.0, state)
|
||||
}
|
||||
|
||||
fn (params SimParams) get_tension_force(state SimState, f_passive Vec3D) Vec3D {
|
||||
rope_vector := params.get_rope_vector(state)
|
||||
rope_vector_norm := rope_vector.scale(1.0 / rope_vector.norm())
|
||||
return rope_vector_norm.scale(-1.0 * rope_vector_norm.dot(f_passive))
|
||||
}
|
||||
|
||||
fn (mut state SimState) increment(delta_t f64, params SimParams) {
|
||||
// basically just add up all forces =>
|
||||
// get an accelleration =>
|
||||
// add to velocity =>
|
||||
// ensure rope constraint is satisfied
|
||||
|
||||
// force due to gravity
|
||||
f_gravity := params.get_grav_force(state)
|
||||
|
||||
// force due to each magnet
|
||||
f_magnet1 := params.get_magnet1_force(state)
|
||||
|
||||
// force due to each magnet
|
||||
f_magnet2 := params.get_magnet2_force(state)
|
||||
|
||||
// force due to each magnet
|
||||
f_magnet3 := params.get_magnet3_force(state)
|
||||
|
||||
// passive forces
|
||||
f_passive := f_gravity.add(f_magnet1.add(f_magnet2.add(f_magnet3)))
|
||||
|
||||
// force due to tension of the rope
|
||||
f_tension := params.get_tension_force(state, f_passive)
|
||||
|
||||
// sum up all the fores
|
||||
f_sum := f_tension.add(f_passive)
|
||||
|
||||
// get the acceleration
|
||||
accel := f_sum.scale(1.0 / params.bearing_mass)
|
||||
state.accel = accel
|
||||
|
||||
// update the velocity
|
||||
state.velocity = state.velocity.add(accel.scale(delta_t))
|
||||
|
||||
// update the position
|
||||
state.position = state.position.add(state.velocity.scale(delta_t))
|
||||
|
||||
// ensure the position satisfies rope constraint
|
||||
state.satisfy_rope_constraint(params)
|
||||
}
|
||||
|
||||
fn (state SimState) done() bool {
|
||||
return state.velocity.norm() < 0.05 && state.accel.norm() < 0.01
|
||||
}
|
||||
|
||||
struct PPMWriter {
|
||||
mut:
|
||||
file os.File
|
||||
}
|
||||
|
||||
struct ImageSettings {
|
||||
width int
|
||||
height int
|
||||
}
|
||||
|
||||
struct Pixel {
|
||||
r byte
|
||||
g byte
|
||||
b byte
|
||||
}
|
||||
|
||||
fn (mut writer PPMWriter) start_for_file(fname string, settings ImageSettings) {
|
||||
writer.file = os.create(fname) or { panic("can't create file $fname") }
|
||||
writer.file.writeln('P6 $settings.width $settings.height 255') or {}
|
||||
}
|
||||
|
||||
fn (mut writer PPMWriter) next_pixel(p Pixel) {
|
||||
writer.file.write([p.r, p.g, p.b]) or {}
|
||||
}
|
||||
|
||||
fn (mut writer PPMWriter) finish() {
|
||||
writer.file.close()
|
||||
}
|
||||
|
||||
fn sim_runner(mut state SimState, params SimParams) Pixel {
|
||||
// do the simulation!
|
||||
for _ in 0 .. 1000 {
|
||||
state.increment(0.0005, params)
|
||||
if state.done() {
|
||||
println('done!')
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
// find the closest magnet
|
||||
m1_dist := params.get_magnet_dist(0, state)
|
||||
m2_dist := params.get_magnet_dist(2.0 * math.pi / 3.0, state)
|
||||
m3_dist := params.get_magnet_dist(4.0 * math.pi / 3.0, state)
|
||||
|
||||
if m1_dist < m2_dist && m1_dist < m3_dist {
|
||||
return Pixel{
|
||||
r: 255
|
||||
g: 0
|
||||
b: 0
|
||||
}
|
||||
} else if m2_dist < m1_dist && m2_dist < m3_dist {
|
||||
return Pixel{
|
||||
r: 0
|
||||
g: 255
|
||||
b: 0
|
||||
}
|
||||
} else {
|
||||
return Pixel{
|
||||
r: 0
|
||||
g: 0
|
||||
b: 255
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SimResult {
|
||||
id u64
|
||||
p Pixel
|
||||
}
|
||||
|
||||
struct SimRequest {
|
||||
id u64
|
||||
params SimParams
|
||||
mut:
|
||||
initial SimState
|
||||
}
|
||||
|
||||
fn sim_worker(request_chan chan SimRequest, result_chan chan SimResult) {
|
||||
// serve sim requests as they come in
|
||||
for {
|
||||
mut request := <-request_chan or { break }
|
||||
|
||||
result_chan <- SimResult{
|
||||
id: request.id
|
||||
p: sim_runner(mut request.initial, request.params)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ValidPixel {
|
||||
Pixel
|
||||
mut:
|
||||
valid bool
|
||||
}
|
||||
|
||||
fn image_worker(mut writer PPMWriter, result_chan chan SimResult, total_pixels u64) {
|
||||
// as new pixels come in, write them to the image file
|
||||
mut current_index := u64(0)
|
||||
mut pixel_buf := []ValidPixel{len: int(total_pixels), init: ValidPixel{
|
||||
valid: false
|
||||
}}
|
||||
for {
|
||||
result := <-result_chan or { break }
|
||||
pixel_buf[result.id].Pixel = result.p
|
||||
pixel_buf[result.id].valid = true
|
||||
|
||||
for current_index < total_pixels && pixel_buf[current_index].valid {
|
||||
writer.next_pixel(pixel_buf[current_index].Pixel)
|
||||
current_index++
|
||||
}
|
||||
|
||||
if current_index >= total_pixels {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
params := SimParams{
|
||||
rope_length: 0.25
|
||||
bearing_mass: 0.03
|
||||
magnet_spacing: 0.05
|
||||
magnet_height: 0.03
|
||||
magnet_strength: 10.0
|
||||
gravity: 4.9
|
||||
}
|
||||
|
||||
mut writer := PPMWriter{}
|
||||
writer.start_for_file('test.ppm', ImageSettings{
|
||||
width: width
|
||||
height: height
|
||||
})
|
||||
defer {
|
||||
writer.finish()
|
||||
}
|
||||
|
||||
result_chan := chan SimResult{}
|
||||
request_chan := chan SimRequest{}
|
||||
|
||||
// start a worker on each core
|
||||
for _ in 0 .. parallel_workers {
|
||||
go sim_worker(request_chan, result_chan)
|
||||
}
|
||||
|
||||
go fn (request_chan chan SimRequest, params SimParams) {
|
||||
mut index := u64(0)
|
||||
println('')
|
||||
for y in 0 .. height {
|
||||
term.clear_previous_line()
|
||||
println('Line: $y')
|
||||
for x in 0 .. width {
|
||||
// setup initial conditions
|
||||
mut state := SimState{}
|
||||
state.position = Vec3D{
|
||||
x: 0.1 * ((f64(x) - 0.5 * f64(width - 1)) / f64(width - 1))
|
||||
y: 0.1 * ((f64(y) - 0.5 * f64(height - 1)) / f64(height - 1))
|
||||
z: 0.0
|
||||
}
|
||||
state.velocity = Vec3D{}
|
||||
state.satisfy_rope_constraint(params)
|
||||
request_chan <- SimRequest{
|
||||
id: index
|
||||
initial: state
|
||||
params: params
|
||||
}
|
||||
index++
|
||||
}
|
||||
}
|
||||
request_chan.close()
|
||||
}(request_chan, params)
|
||||
|
||||
image_worker(mut writer, result_chan, width * height)
|
||||
}
|
||||
@@ -322,7 +322,7 @@ fn my_init(mut app App) {
|
||||
|
||||
// 3d pipeline
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
|
||||
color_state := gfx.ColorState{
|
||||
blend: gfx.BlendState{
|
||||
@@ -379,10 +379,6 @@ fn my_init(mut app App) {
|
||||
}
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
*
|
||||
* event
|
||||
@@ -424,7 +420,6 @@ fn main() {
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: my_init
|
||||
cleanup_fn: cleanup
|
||||
event_fn: my_event_manager
|
||||
)
|
||||
|
||||
|
||||
@@ -277,7 +277,7 @@ fn init_cube_glsl(mut app App) {
|
||||
]
|
||||
|
||||
mut vert_buffer_desc := gfx.BufferDesc{label: c'cube-vertices'}
|
||||
unsafe { C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc)) }
|
||||
unsafe { vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc))) }
|
||||
|
||||
vert_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
@@ -300,7 +300,7 @@ fn init_cube_glsl(mut app App) {
|
||||
]
|
||||
|
||||
mut index_buffer_desc := gfx.BufferDesc{label: c'cube-indices'}
|
||||
unsafe { C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc)) }
|
||||
unsafe { vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc))) }
|
||||
|
||||
index_buffer_desc.size = usize(indices.len * int(sizeof(u16)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
@@ -315,7 +315,7 @@ fn init_cube_glsl(mut app App) {
|
||||
shader := gfx.make_shader(C.cube_shader_desc(C.sg_query_backend()))
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_t))
|
||||
// the constants [C.ATTR_vs_pos, C.ATTR_vs_color0, C.ATTR_vs_texcoord0] are generated bysokol-shdc
|
||||
@@ -493,7 +493,7 @@ fn my_init(mut app App) {
|
||||
|
||||
// 3d pipeline
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
|
||||
color_state := gfx.ColorState{
|
||||
blend: gfx.BlendState{
|
||||
@@ -553,10 +553,6 @@ fn my_init(mut app App) {
|
||||
app.init_flag = true
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
*
|
||||
* event
|
||||
@@ -601,7 +597,6 @@ fn main() {
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: my_init
|
||||
cleanup_fn: cleanup
|
||||
event_fn: my_event_manager
|
||||
)
|
||||
|
||||
|
||||
@@ -156,7 +156,7 @@ fn init_cube_glsl(mut app App) {
|
||||
]
|
||||
|
||||
mut vert_buffer_desc := gfx.BufferDesc{label: c'cube-vertices'}
|
||||
unsafe { C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc)) }
|
||||
unsafe { vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc))) }
|
||||
|
||||
vert_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
@@ -178,7 +178,7 @@ fn init_cube_glsl(mut app App) {
|
||||
]
|
||||
|
||||
mut index_buffer_desc := gfx.BufferDesc{label: c'cube-indices'}
|
||||
unsafe {C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc))}
|
||||
unsafe {vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc)))}
|
||||
|
||||
index_buffer_desc.size = usize(indices.len * int(sizeof(u16)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
@@ -193,7 +193,7 @@ fn init_cube_glsl(mut app App) {
|
||||
shader := gfx.make_shader(C.rt_shader_desc(C.sg_query_backend()))
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_t))
|
||||
|
||||
// the constants [C.ATTR_vs_pos, C.ATTR_vs_color0, C.ATTR_vs_texcoord0] are generated by sokol-shdc
|
||||
@@ -372,10 +372,6 @@ fn my_init(mut app App) {
|
||||
app.init_flag = true
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* events handling
|
||||
******************************************************************************/
|
||||
@@ -412,7 +408,6 @@ fn main() {
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: my_init
|
||||
cleanup_fn: cleanup
|
||||
event_fn: my_event_manager
|
||||
)
|
||||
|
||||
|
||||
@@ -158,7 +158,7 @@ fn init_cube_glsl_m(mut app App) {
|
||||
]
|
||||
|
||||
mut vert_buffer_desc := gfx.BufferDesc{label: c'cube-vertices'}
|
||||
unsafe { C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc)) }
|
||||
unsafe { vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc))) }
|
||||
vert_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
ptr: vertices.data
|
||||
@@ -180,7 +180,7 @@ fn init_cube_glsl_m(mut app App) {
|
||||
]
|
||||
|
||||
mut index_buffer_desc := gfx.BufferDesc{label: c'cube-indices'}
|
||||
unsafe { C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc)) }
|
||||
unsafe { vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc))) }
|
||||
index_buffer_desc.size = usize(indices.len * int(sizeof(u16)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
ptr: indices.data
|
||||
@@ -193,7 +193,7 @@ fn init_cube_glsl_m(mut app App) {
|
||||
shader := gfx.make_shader(C.rt_march_shader_desc(C.sg_query_backend()))
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_t))
|
||||
|
||||
// the constants [C.ATTR_vs_m_pos, C.ATTR_vs_m_color0, C.ATTR_vs_m_texcoord0] are generated by sokol-shdc
|
||||
@@ -213,7 +213,7 @@ fn init_cube_glsl_m(mut app App) {
|
||||
pipdesc.label = 'glsl_shader pipeline'.str
|
||||
|
||||
mut bind := gfx.Bindings{}
|
||||
unsafe { C.memset(&bind, 0, sizeof(bind)) }
|
||||
unsafe { vmemset(&bind, 0, int(sizeof(bind))) }
|
||||
bind.vertex_buffers[0] = vbuf
|
||||
bind.index_buffer = ibuf
|
||||
bind.fs_images[C.SLOT_tex] = app.texture
|
||||
@@ -264,7 +264,7 @@ fn init_cube_glsl_p(mut app App) {
|
||||
]
|
||||
|
||||
mut vert_buffer_desc := gfx.BufferDesc{label: c'cube-vertices'}
|
||||
unsafe { C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc)) }
|
||||
unsafe { vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc))) }
|
||||
vert_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
ptr: vertices.data
|
||||
@@ -287,7 +287,7 @@ fn init_cube_glsl_p(mut app App) {
|
||||
]
|
||||
|
||||
mut index_buffer_desc := gfx.BufferDesc{label: c'cube-indices'}
|
||||
unsafe { C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc)) }
|
||||
unsafe { vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc))) }
|
||||
index_buffer_desc.size = usize(indices.len * int(sizeof(u16)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
ptr: indices.data
|
||||
@@ -300,7 +300,7 @@ fn init_cube_glsl_p(mut app App) {
|
||||
shader := gfx.make_shader(C.rt_puppy_shader_desc(C.sg_query_backend()))
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_t))
|
||||
|
||||
// the constants [C.ATTR_vs_p_pos, C.ATTR_vs_p_color0, C.ATTR_vs_p_texcoord0] are generated by sokol-shdc
|
||||
@@ -321,7 +321,7 @@ fn init_cube_glsl_p(mut app App) {
|
||||
pipdesc.label = 'glsl_shader pipeline'.str
|
||||
|
||||
mut bind := gfx.Bindings{}
|
||||
unsafe { C.memset(&bind, 0, sizeof(bind)) }
|
||||
unsafe { vmemset(&bind, 0, int(sizeof(bind))) }
|
||||
bind.vertex_buffers[0] = vbuf
|
||||
bind.index_buffer = ibuf
|
||||
bind.fs_images[C.SLOT_tex] = app.texture
|
||||
@@ -560,10 +560,6 @@ fn my_init(mut app App) {
|
||||
app.init_flag = true
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* events handling
|
||||
******************************************************************************/
|
||||
@@ -606,7 +602,6 @@ fn main() {
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: my_init
|
||||
cleanup_fn: cleanup
|
||||
event_fn: my_event_manager
|
||||
)
|
||||
|
||||
|
||||
@@ -173,7 +173,7 @@ fn init_cube_glsl_i(mut app App) {
|
||||
]
|
||||
|
||||
mut vert_buffer_desc := gfx.BufferDesc{label: c'cube-vertices'}
|
||||
unsafe {C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc))}
|
||||
unsafe {vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc)))}
|
||||
vert_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
ptr: vertices.data
|
||||
@@ -184,7 +184,7 @@ fn init_cube_glsl_i(mut app App) {
|
||||
|
||||
/* create an instance buffer for the cube */
|
||||
mut inst_buffer_desc := gfx.BufferDesc{label: c'instance-data'}
|
||||
unsafe {C.memset(&inst_buffer_desc, 0, sizeof(inst_buffer_desc))}
|
||||
unsafe {vmemset(&inst_buffer_desc, 0, int(sizeof(inst_buffer_desc)))}
|
||||
|
||||
inst_buffer_desc.size = usize(num_inst * int(sizeof(m4.Vec4)))
|
||||
inst_buffer_desc.@type = .vertexbuffer
|
||||
@@ -203,7 +203,7 @@ fn init_cube_glsl_i(mut app App) {
|
||||
]
|
||||
|
||||
mut index_buffer_desc := gfx.BufferDesc{label: c'cube-indices'}
|
||||
unsafe {C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc))}
|
||||
unsafe {vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc)))}
|
||||
index_buffer_desc.size = usize(indices.len * int(sizeof(u16)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
ptr: indices.data
|
||||
@@ -216,7 +216,7 @@ fn init_cube_glsl_i(mut app App) {
|
||||
shader := gfx.make_shader(C.instancing_shader_desc(C.sg_query_backend()))
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe {C.memset(&pipdesc, 0, sizeof(pipdesc))}
|
||||
unsafe {vmemset(&pipdesc, 0, int(sizeof(pipdesc)))}
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_t))
|
||||
|
||||
// the constants [C.ATTR_vs_m_pos, C.ATTR_vs_m_color0, C.ATTR_vs_m_texcoord0] are generated by sokol-shdc
|
||||
@@ -246,7 +246,7 @@ fn init_cube_glsl_i(mut app App) {
|
||||
pipdesc.label = "glsl_shader pipeline".str
|
||||
|
||||
mut bind := gfx.Bindings{}
|
||||
unsafe {C.memset(&bind, 0, sizeof(bind))}
|
||||
unsafe {vmemset(&bind, 0, int(sizeof(bind)))}
|
||||
bind.vertex_buffers[0] = vbuf // vertex buffer
|
||||
bind.vertex_buffers[1] = inst_buf // instance buffer
|
||||
bind.index_buffer = ibuf
|
||||
@@ -437,10 +437,6 @@ fn my_init(mut app App) {
|
||||
app.init_flag = true
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
/******************************************************************************
|
||||
* events handling
|
||||
******************************************************************************/
|
||||
@@ -495,7 +491,6 @@ fn main(){
|
||||
bg_color: bg_color
|
||||
frame_fn: frame
|
||||
init_fn: my_init
|
||||
cleanup_fn: cleanup
|
||||
event_fn: my_event_manager
|
||||
)
|
||||
|
||||
|
||||
@@ -10,11 +10,11 @@ uniform vs_params {
|
||||
|
||||
in vec4 a_Position; // Per-vertex position information we will pass in.
|
||||
in vec3 a_Normal; // Per-vertex normal information we will pass in.
|
||||
in vec4 a_Color; // Per-vertex color information we will pass in.
|
||||
//in vec4 a_Color; // Per-vertex color information we will pass in.
|
||||
in vec2 a_Texcoord0;
|
||||
|
||||
out vec3 v_Position; // This will be passed into the fragment shader.
|
||||
out vec4 v_Color; // This will be passed into the fragment shader.
|
||||
//out vec4 v_Color; // This will be passed into the fragment shader.
|
||||
out vec3 v_Normal; // This will be passed into the fragment shader.
|
||||
out vec3 v_Normal1;
|
||||
out vec2 uv; // This will be passed into the fragment shader.
|
||||
@@ -25,7 +25,7 @@ void main()
|
||||
// Transform the vertex into eye space.
|
||||
v_Position = vec3(u_MVMatrix * a_Position);
|
||||
// Pass through the color.
|
||||
v_Color = a_Color;
|
||||
//v_Color = a_Color;
|
||||
// calc eye space normal
|
||||
v_Normal = vec3(u_NMatrix * vec4(a_Normal, 1.0));
|
||||
// texture coord
|
||||
@@ -51,7 +51,7 @@ uniform fs_params {
|
||||
|
||||
};
|
||||
in vec3 v_Position; // Interpolated position for this fragment.
|
||||
in vec4 v_Color; // This is the color from the vertex shader interpolated across the triangle per fragment.
|
||||
//in vec4 v_Color; // This is the color from the vertex shader interpolated across the triangle per fragment.
|
||||
in vec3 v_Normal; // Interpolated normal for this fragment.
|
||||
in vec3 v_Normal1;
|
||||
in vec2 uv;
|
||||
|
||||
@@ -396,15 +396,15 @@ fn (mut obj_part ObjPart) load_materials() {
|
||||
// vertex data struct
|
||||
pub struct Vertex_pnct {
|
||||
pub mut:
|
||||
x f32 // poistion
|
||||
y f32
|
||||
z f32
|
||||
nx f32 // normal
|
||||
ny f32
|
||||
nz f32
|
||||
color u32 = 0xFFFFFFFF // color
|
||||
u f32 // uv
|
||||
v f32
|
||||
x f32 // poistion
|
||||
y f32
|
||||
z f32
|
||||
nx f32 // normal
|
||||
ny f32
|
||||
nz f32
|
||||
// color u32 = 0xFFFFFFFF // color
|
||||
u f32 // uv
|
||||
v f32
|
||||
// u u16 // for compatibility with D3D11
|
||||
// v u16 // for compatibility with D3D11
|
||||
}
|
||||
|
||||
@@ -71,7 +71,7 @@ pub fn (mut obj_part ObjPart) create_pipeline(in_part []int, shader gfx.Shader,
|
||||
mut vert_buffer_desc := gfx.BufferDesc{
|
||||
label: 0
|
||||
}
|
||||
unsafe { C.memset(&vert_buffer_desc, 0, sizeof(vert_buffer_desc)) }
|
||||
unsafe { vmemset(&vert_buffer_desc, 0, int(sizeof(vert_buffer_desc))) }
|
||||
|
||||
vert_buffer_desc.size = usize(obj_buf.vbuf.len * int(sizeof(Vertex_pnct)))
|
||||
vert_buffer_desc.data = gfx.Range{
|
||||
@@ -87,7 +87,7 @@ pub fn (mut obj_part ObjPart) create_pipeline(in_part []int, shader gfx.Shader,
|
||||
mut index_buffer_desc := gfx.BufferDesc{
|
||||
label: 0
|
||||
}
|
||||
unsafe { C.memset(&index_buffer_desc, 0, sizeof(index_buffer_desc)) }
|
||||
unsafe { vmemset(&index_buffer_desc, 0, int(sizeof(index_buffer_desc))) }
|
||||
|
||||
index_buffer_desc.size = usize(obj_buf.ibuf.len * int(sizeof(u32)))
|
||||
index_buffer_desc.data = gfx.Range{
|
||||
@@ -100,13 +100,13 @@ pub fn (mut obj_part ObjPart) create_pipeline(in_part []int, shader gfx.Shader,
|
||||
ibuf := gfx.make_buffer(&index_buffer_desc)
|
||||
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
pipdesc.layout.buffers[0].stride = int(sizeof(Vertex_pnct))
|
||||
|
||||
// the constants [C.ATTR_vs_a_Position, C.ATTR_vs_a_Color, C.ATTR_vs_a_Texcoord0] are generated by sokol-shdc
|
||||
pipdesc.layout.attrs[C.ATTR_vs_a_Position].format = .float3 // x,y,z as f32
|
||||
pipdesc.layout.attrs[C.ATTR_vs_a_Normal].format = .float3 // x,y,z as f32
|
||||
pipdesc.layout.attrs[C.ATTR_vs_a_Color].format = .ubyte4n // color as u32
|
||||
// pipdesc.layout.attrs[C.ATTR_vs_a_Color].format = .ubyte4n // color as u32
|
||||
pipdesc.layout.attrs[C.ATTR_vs_a_Texcoord0].format = .float2 // u,v as f32
|
||||
// pipdesc.layout.attrs[C.ATTR_vs_a_Texcoord0].format = .short2n // u,v as u16
|
||||
pipdesc.index_type = .uint32
|
||||
|
||||
@@ -233,7 +233,6 @@ fn my_init(mut app App) {
|
||||
}
|
||||
|
||||
fn cleanup(mut app App) {
|
||||
gfx.shutdown()
|
||||
/*
|
||||
for _, mat in app.obj_part.texture {
|
||||
obj.destroy_texture(mat)
|
||||
|
||||
@@ -19,6 +19,8 @@ mut:
|
||||
}
|
||||
|
||||
pub fn (mut s System) init(sc SystemConfig) {
|
||||
unsafe { s.pool.flags.set(.noslices) }
|
||||
unsafe { s.bin.flags.set(.noslices) }
|
||||
for i := 0; i < sc.pool; i++ {
|
||||
p := new(vec2.Vec2{f32(s.width) * 0.5, f32(s.height) * 0.5})
|
||||
s.bin << p
|
||||
|
||||
@@ -80,7 +80,7 @@ fn init(user_data voidptr) {
|
||||
}
|
||||
sgl.setup(&sgl_desc)
|
||||
mut pipdesc := gfx.PipelineDesc{}
|
||||
unsafe { C.memset(&pipdesc, 0, sizeof(pipdesc)) }
|
||||
unsafe { vmemset(&pipdesc, 0, int(sizeof(pipdesc))) }
|
||||
|
||||
color_state := gfx.ColorState{
|
||||
blend: gfx.BlendState{
|
||||
|
||||
1
examples/sokol/simple_shader_glsl/.gitignore
vendored
Normal file
1
examples/sokol/simple_shader_glsl/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
simple_shader.h
|
||||
36
examples/sokol/simple_shader_glsl/simple_shader.glsl
Normal file
36
examples/sokol/simple_shader_glsl/simple_shader.glsl
Normal file
@@ -0,0 +1,36 @@
|
||||
// The following defines a vertex shader main function
|
||||
@vs vs
|
||||
in vec4 position;
|
||||
in vec4 color0;
|
||||
|
||||
out vec4 color;
|
||||
|
||||
// You can add more functions here
|
||||
|
||||
void main() {
|
||||
gl_Position = position;
|
||||
color = color0;
|
||||
}
|
||||
@end
|
||||
|
||||
// The following defines a fragment shader main function
|
||||
@fs fs
|
||||
in vec4 color;
|
||||
out vec4 frag_color;
|
||||
|
||||
// You can add more functions here
|
||||
|
||||
void main() {
|
||||
frag_color = color;
|
||||
}
|
||||
@end
|
||||
|
||||
// The value after `@program` and before `vs fs` decide a part of the name
|
||||
// of the C function you need to define in V. The value entered is suffixed `_shader_desc`
|
||||
// in the generated C code. Thus the name for this becomes: `simple_shader_desc`.
|
||||
// In V it's signature then need to be defined as:
|
||||
// `fn C.simple_shader_desc(gfx.Backend) &gfx.ShaderDesc`. See `simple_shader.v` for the define.
|
||||
//
|
||||
// Running `v shader -v .` in this dir will also show you brief information
|
||||
// about how to use the compiled shader.
|
||||
@program simple vs fs
|
||||
156
examples/sokol/simple_shader_glsl/simple_shader.v
Normal file
156
examples/sokol/simple_shader_glsl/simple_shader.v
Normal file
@@ -0,0 +1,156 @@
|
||||
// Copyright(C) 2022 Lars Pontoppidan. All rights reserved.
|
||||
// Use of this source code is governed by an MIT license file distributed with this software package
|
||||
module main
|
||||
|
||||
// Example shader triangle adapted to V from https://github.com/floooh/sokol-samples/blob/1f2ad36/sapp/triangle-sapp.c
|
||||
import sokol.sapp
|
||||
import sokol.gfx
|
||||
|
||||
// Use `v shader` or `sokol-shdc` to generate the necessary `.h` file
|
||||
// Using `v shader -v .` in this directory will show some additional
|
||||
// info - and what you should include to make things work.
|
||||
#flag -I @VMODROOT/.
|
||||
#include "simple_shader.h"
|
||||
|
||||
// simple_shader_desc is a C function declaration defined by
|
||||
// the `@program` entry in the `simple_shader.glsl` shader file.
|
||||
// When the shader is compiled this function name is generated
|
||||
// by the shader compiler for easier inclusion of universal shader code
|
||||
// in C (and V) code.
|
||||
fn C.simple_shader_desc(gfx.Backend) &gfx.ShaderDesc
|
||||
|
||||
// Vertex_t makes it possible to model vertex buffer data
|
||||
// for use with the shader system
|
||||
struct Vertex_t {
|
||||
// Position
|
||||
x f32
|
||||
y f32
|
||||
z f32
|
||||
// Color
|
||||
r f32
|
||||
g f32
|
||||
b f32
|
||||
a f32
|
||||
}
|
||||
|
||||
fn main() {
|
||||
mut app := &App{
|
||||
width: 800
|
||||
height: 400
|
||||
pass_action: gfx.create_clear_pass(0.0, 0.0, 0.0, 1.0) // This will create a black color as a default pass (window background color)
|
||||
}
|
||||
app.run()
|
||||
}
|
||||
|
||||
struct App {
|
||||
pass_action gfx.PassAction
|
||||
mut:
|
||||
width int
|
||||
height int
|
||||
shader_pipeline gfx.Pipeline
|
||||
bind gfx.Bindings
|
||||
}
|
||||
|
||||
fn (mut a App) run() {
|
||||
title := 'V Simple Shader Example'
|
||||
desc := sapp.Desc{
|
||||
width: a.width
|
||||
height: a.height
|
||||
user_data: a
|
||||
init_userdata_cb: init
|
||||
frame_userdata_cb: frame
|
||||
window_title: title.str
|
||||
html5_canvas_name: title.str
|
||||
cleanup_userdata_cb: cleanup
|
||||
sample_count: 4 // Enables MSAA (Multisample anti-aliasing) x4 on rendered output, this can be omitted.
|
||||
}
|
||||
sapp.run(&desc)
|
||||
}
|
||||
|
||||
fn init(user_data voidptr) {
|
||||
mut app := &App(user_data)
|
||||
mut desc := sapp.create_desc()
|
||||
|
||||
gfx.setup(&desc)
|
||||
|
||||
// `vertices` defines a vertex buffer with 3 vertices
|
||||
// with 3 position fields XYZ and 4 color components RGBA -
|
||||
// for drawing a multi-colored triangle.
|
||||
//
|
||||
// C code:
|
||||
// float vertices[] = {
|
||||
// // Positions // Colors
|
||||
// 0.0, 0.5, 0.5, 1.0, 0.0, 0.0, 1.0,
|
||||
// 0.5, -0.5, 0.5, 0.0, 1.0, 0.0, 1.0,
|
||||
// -0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 1.0
|
||||
// };
|
||||
//
|
||||
// Array entries in the following V code is the equivalent
|
||||
// of the C code entry described above:
|
||||
vertices := [
|
||||
Vertex_t{0.0, 0.5, 0.5, 1.0, 0.0, 0.0, 1.0},
|
||||
Vertex_t{0.5, -0.5, 0.5, 0.0, 1.0, 0.0, 1.0},
|
||||
Vertex_t{-0.5, -0.5, 0.5, 0.0, 0.0, 1.0, 1.0},
|
||||
]
|
||||
|
||||
// Create a vertex buffer with the 3 vertices defined above.
|
||||
mut vertex_buffer_desc := gfx.BufferDesc{
|
||||
label: c'triangle-vertices'
|
||||
}
|
||||
unsafe { vmemset(&vertex_buffer_desc, 0, int(sizeof(vertex_buffer_desc))) }
|
||||
|
||||
vertex_buffer_desc.size = usize(vertices.len * int(sizeof(Vertex_t)))
|
||||
vertex_buffer_desc.data = gfx.Range{
|
||||
ptr: vertices.data
|
||||
size: vertex_buffer_desc.size
|
||||
}
|
||||
|
||||
app.bind.vertex_buffers[0] = gfx.make_buffer(&vertex_buffer_desc)
|
||||
|
||||
// Create shader from the code-generated sg_shader_desc (gfx.ShaderDesc in V).
|
||||
// Note the function `C.simple_shader_desc()` (also defined above) - this is
|
||||
// the function that returns the compiled shader code/desciption we have
|
||||
// written in `simple_shader.glsl` and compiled with `v shader .` (`sokol-shdc`).
|
||||
shader := gfx.make_shader(C.simple_shader_desc(gfx.query_backend()))
|
||||
|
||||
// Create a pipeline object (default render states are fine for triangle)
|
||||
mut pipeline_desc := gfx.PipelineDesc{}
|
||||
// This will zero the memory used to store the pipeline in.
|
||||
unsafe { vmemset(&pipeline_desc, 0, int(sizeof(pipeline_desc))) }
|
||||
|
||||
// Populate the essential struct fields
|
||||
pipeline_desc.shader = shader
|
||||
// The vertex shader (`simple_shader.glsl`) takes 2 inputs:
|
||||
// ```glsl
|
||||
// in vec4 position;
|
||||
// in vec4 color0;
|
||||
// ```
|
||||
// Also note the naming of the C.ATTR_* used as indicies.
|
||||
// They are the prefixed versions of the names of the input variables in the shader code.
|
||||
// If they change in the shader code they will also change here.
|
||||
pipeline_desc.layout.attrs[C.ATTR_vs_position].format = .float3 // x,y,z as f32
|
||||
pipeline_desc.layout.attrs[C.ATTR_vs_color0].format = .float4 // r, g, b, a as f32
|
||||
// The .label is optional but can aid debugging sokol shader related issues
|
||||
// When things get complex - and you get tired :)
|
||||
pipeline_desc.label = c'triangle-pipeline'
|
||||
|
||||
app.shader_pipeline = gfx.make_pipeline(&pipeline_desc)
|
||||
}
|
||||
|
||||
fn cleanup(user_data voidptr) {
|
||||
gfx.shutdown()
|
||||
}
|
||||
|
||||
fn frame(user_data voidptr) {
|
||||
mut app := &App(user_data)
|
||||
|
||||
gfx.begin_default_pass(&app.pass_action, sapp.width(), sapp.height())
|
||||
|
||||
gfx.apply_pipeline(app.shader_pipeline)
|
||||
gfx.apply_bindings(&app.bind)
|
||||
|
||||
gfx.draw(0, 3, 1)
|
||||
|
||||
gfx.end_pass()
|
||||
gfx.commit()
|
||||
}
|
||||
0
examples/sokol/simple_shader_glsl/v.mod
Normal file
0
examples/sokol/simple_shader_glsl/v.mod
Normal file
@@ -1,11 +1,42 @@
|
||||
// Copyright (c) 2020 Lars Pontoppidan. All rights reserved.
|
||||
// Use of this source code is governed by the MIT license distributed with this software.
|
||||
// Don't use this editor for any serious work.
|
||||
// A lot of funtionality is missing compared to your favourite editor :)
|
||||
// A lot of functionality is missing compared to your favourite editor :)
|
||||
import strings
|
||||
import os
|
||||
import math
|
||||
import term.ui as tui
|
||||
import encoding.utf8
|
||||
import encoding.utf8.east_asian
|
||||
|
||||
const (
|
||||
rune_digits = [`0`, `1`, `2`, `3`, `4`, `5`, `6`, `7`, `8`, `9`]
|
||||
|
||||
zero_width_unicode = [
|
||||
`\u034f`, // U+034F COMBINING GRAPHEME JOINER
|
||||
`\u061c`, // U+061C ARABIC LETTER MARK
|
||||
`\u17b4`, // U+17B4 KHMER VOWEL INHERENT AQ
|
||||
`\u17b5`, // U+17B5 KHMER VOWEL INHERENT AA
|
||||
`\u200a`, // U+200A HAIR SPACE
|
||||
`\u200b`, // U+200B ZERO WIDTH SPACE
|
||||
`\u200c`, // U+200C ZERO WIDTH NON-JOINER
|
||||
`\u200d`, // U+200D ZERO WIDTH JOINER
|
||||
`\u200e`, // U+200E LEFT-TO-RIGHT MARK
|
||||
`\u200f`, // U+200F RIGHT-TO-LEFT MARK
|
||||
`\u2060`, // U+2060 WORD JOINER
|
||||
`\u2061`, // U+2061 FUNCTION APPLICATION
|
||||
`\u2062`, // U+2062 INVISIBLE TIMES
|
||||
`\u2063`, // U+2063 INVISIBLE SEPARATOR
|
||||
`\u2064`, // U+2064 INVISIBLE PLUS
|
||||
`\u206a`, // U+206A INHIBIT SYMMETRIC SWAPPING
|
||||
`\u206b`, // U+206B ACTIVATE SYMMETRIC SWAPPING
|
||||
`\u206c`, // U+206C INHIBIT ARABIC FORM SHAPING
|
||||
`\u206d`, // U+206D ACTIVATE ARABIC FORM SHAPING
|
||||
`\u206e`, // U+206E NATIONAL DIGIT SHAPES
|
||||
`\u206f`, // U+206F NOMINAL DIGIT SHAPES
|
||||
`\ufeff`, // U+FEFF ZERO WIDTH NO-BREAK SPACE
|
||||
]
|
||||
)
|
||||
|
||||
enum Movement {
|
||||
up
|
||||
@@ -131,7 +162,7 @@ fn (b Buffer) raw() string {
|
||||
}
|
||||
|
||||
fn (b Buffer) view(from int, to int) View {
|
||||
l := b.cur_line()
|
||||
l := b.cur_line().runes()
|
||||
mut x := 0
|
||||
for i := 0; i < b.cursor.pos_x && i < l.len; i++ {
|
||||
if l[i] == `\t` {
|
||||
@@ -167,6 +198,14 @@ fn (b Buffer) cur_line() string {
|
||||
return b.line(b.cursor.pos_y)
|
||||
}
|
||||
|
||||
fn (b Buffer) cur_slice() string {
|
||||
line := b.line(b.cursor.pos_y).runes()
|
||||
if b.cursor.pos_x == 0 || b.cursor.pos_x > line.len {
|
||||
return ''
|
||||
}
|
||||
return line[..b.cursor.pos_x].string()
|
||||
}
|
||||
|
||||
fn (b Buffer) cursor_index() int {
|
||||
mut i := 0
|
||||
for y, line in b.lines {
|
||||
@@ -174,7 +213,7 @@ fn (b Buffer) cursor_index() int {
|
||||
i += b.cursor.pos_x
|
||||
break
|
||||
}
|
||||
i += line.len + 1
|
||||
i += line.runes().len + 1
|
||||
}
|
||||
return i
|
||||
}
|
||||
@@ -185,22 +224,22 @@ fn (mut b Buffer) put(s string) {
|
||||
if b.lines.len == 0 {
|
||||
b.lines.prepend('')
|
||||
}
|
||||
line := b.lines[y]
|
||||
l, r := line[..x], line[x..]
|
||||
line := b.lines[y].runes()
|
||||
l, r := line[..x].string(), line[x..].string()
|
||||
if has_line_ending {
|
||||
mut lines := s.split('\n')
|
||||
lines[0] = l + lines[0]
|
||||
lines[lines.len - 1] += r
|
||||
b.lines.delete(y)
|
||||
b.lines.insert(y, lines)
|
||||
last := lines[lines.len - 1]
|
||||
last := lines[lines.len - 1].runes()
|
||||
b.cursor.set(last.len, y + lines.len - 1)
|
||||
if s == '\n' {
|
||||
b.cursor.set(0, b.cursor.pos_y)
|
||||
}
|
||||
} else {
|
||||
b.lines[y] = l + s + r
|
||||
b.cursor.set(x + s.len, y)
|
||||
b.cursor.set(x + s.runes().len, y)
|
||||
}
|
||||
$if debug {
|
||||
flat := s.replace('\n', r'\n')
|
||||
@@ -217,24 +256,35 @@ fn (mut b Buffer) del(amount int) string {
|
||||
if x == 0 && y == 0 {
|
||||
return ''
|
||||
}
|
||||
} else if x >= b.cur_line().len && y >= b.lines.len - 1 {
|
||||
} else if x >= b.cur_line().runes().len && y >= b.lines.len - 1 {
|
||||
return ''
|
||||
}
|
||||
mut removed := ''
|
||||
if amount < 0 { // backspace (backward)
|
||||
i := b.cursor_index()
|
||||
removed = b.raw()[i + amount..i]
|
||||
raw_runes := b.raw().runes()
|
||||
removed = raw_runes[i + amount..i].string()
|
||||
mut left := amount * -1
|
||||
for li := y; li >= 0 && left > 0; li-- {
|
||||
ln := b.lines[li]
|
||||
if left > ln.len {
|
||||
ln := b.lines[li].runes()
|
||||
if left == ln.len + 1 { // All of the line + 1 - since we're going backwards the "+1" is the line break delimiter.
|
||||
b.lines.delete(li)
|
||||
left = 0
|
||||
if y == 0 {
|
||||
return ''
|
||||
}
|
||||
line_above := b.lines[li - 1].runes()
|
||||
b.cursor.pos_x = line_above.len
|
||||
b.cursor.pos_y--
|
||||
break
|
||||
} else if left > ln.len {
|
||||
b.lines.delete(li)
|
||||
if ln.len == 0 { // line break delimiter
|
||||
left--
|
||||
if y == 0 {
|
||||
return ''
|
||||
}
|
||||
line_above := b.lines[li - 1]
|
||||
line_above := b.lines[li - 1].runes()
|
||||
b.cursor.pos_x = line_above.len
|
||||
} else {
|
||||
left -= ln.len
|
||||
@@ -245,22 +295,23 @@ fn (mut b Buffer) del(amount int) string {
|
||||
if y == 0 {
|
||||
return ''
|
||||
}
|
||||
line_above := b.lines[li - 1]
|
||||
line_above := b.lines[li - 1].runes()
|
||||
if ln.len == 0 { // at line break
|
||||
b.lines.delete(li)
|
||||
b.cursor.pos_y--
|
||||
b.cursor.pos_x = line_above.len
|
||||
} else {
|
||||
b.lines[li - 1] = line_above + ln
|
||||
b.lines[li - 1] = line_above.string() + ln.string()
|
||||
b.lines.delete(li)
|
||||
b.cursor.pos_y--
|
||||
b.cursor.pos_x = line_above.len
|
||||
}
|
||||
} else if x == 1 {
|
||||
b.lines[li] = b.lines[li][left..]
|
||||
runes := b.lines[li].runes()
|
||||
b.lines[li] = runes[left..].string()
|
||||
b.cursor.pos_x = 0
|
||||
} else {
|
||||
b.lines[li] = ln[..x - left] + ln[x..]
|
||||
b.lines[li] = ln[..x - left].string() + ln[x..].string()
|
||||
b.cursor.pos_x -= left
|
||||
}
|
||||
left = 0
|
||||
@@ -269,13 +320,20 @@ fn (mut b Buffer) del(amount int) string {
|
||||
}
|
||||
} else { // delete (forward)
|
||||
i := b.cursor_index() + 1
|
||||
removed = b.raw()[i - amount..i]
|
||||
raw_buffer := b.raw().runes()
|
||||
from_i := i
|
||||
mut to_i := i + amount
|
||||
|
||||
if to_i > raw_buffer.len {
|
||||
to_i = raw_buffer.len
|
||||
}
|
||||
removed = raw_buffer[from_i..to_i].string()
|
||||
mut left := amount
|
||||
for li := y; li >= 0 && left > 0; li++ {
|
||||
ln := b.lines[li]
|
||||
ln := b.lines[li].runes()
|
||||
if x == ln.len { // at line end
|
||||
if y + 1 <= b.lines.len {
|
||||
b.lines[li] = ln + b.lines[y + 1]
|
||||
b.lines[li] = ln.string() + b.lines[y + 1]
|
||||
b.lines.delete(y + 1)
|
||||
left--
|
||||
b.del(left)
|
||||
@@ -284,7 +342,7 @@ fn (mut b Buffer) del(amount int) string {
|
||||
b.lines.delete(li)
|
||||
left -= ln.len
|
||||
} else {
|
||||
b.lines[li] = ln[..x] + ln[x + left..]
|
||||
b.lines[li] = ln[..x].string() + ln[x + left..].string()
|
||||
left = 0
|
||||
}
|
||||
}
|
||||
@@ -309,7 +367,7 @@ fn (mut b Buffer) free() {
|
||||
fn (mut b Buffer) move_updown(amount int) {
|
||||
b.cursor.move(0, amount)
|
||||
// Check the move
|
||||
line := b.cur_line()
|
||||
line := b.cur_line().runes()
|
||||
if b.cursor.pos_x > line.len {
|
||||
b.cursor.set(line.len, b.cursor.pos_y)
|
||||
}
|
||||
@@ -317,7 +375,7 @@ fn (mut b Buffer) move_updown(amount int) {
|
||||
|
||||
// move_cursor will navigate the cursor within the buffer bounds
|
||||
fn (mut b Buffer) move_cursor(amount int, movement Movement) {
|
||||
cur_line := b.cur_line()
|
||||
cur_line := b.cur_line().runes()
|
||||
match movement {
|
||||
.up {
|
||||
if b.cursor.pos_y - amount >= 0 {
|
||||
@@ -341,7 +399,7 @@ fn (mut b Buffer) move_cursor(amount int, movement Movement) {
|
||||
if b.cursor.pos_x - amount >= 0 {
|
||||
b.cursor.move(-amount, 0)
|
||||
} else if b.cursor.pos_y > 0 {
|
||||
b.cursor.set(b.line(b.cursor.pos_y - 1).len, b.cursor.pos_y - 1)
|
||||
b.cursor.set(b.line(b.cursor.pos_y - 1).runes().len, b.cursor.pos_y - 1)
|
||||
}
|
||||
}
|
||||
.right {
|
||||
@@ -362,25 +420,26 @@ fn (mut b Buffer) move_cursor(amount int, movement Movement) {
|
||||
|
||||
fn (mut b Buffer) move_to_word(movement Movement) {
|
||||
a := if movement == .left { -1 } else { 1 }
|
||||
mut line := b.cur_line()
|
||||
|
||||
mut line := b.cur_line().runes()
|
||||
mut x, mut y := b.cursor.pos_x, b.cursor.pos_y
|
||||
if x + a < 0 && y > 0 {
|
||||
y--
|
||||
line = b.line(b.cursor.pos_y - 1)
|
||||
line = b.line(b.cursor.pos_y - 1).runes()
|
||||
x = line.len
|
||||
} else if x + a >= line.len && y + 1 < b.lines.len {
|
||||
y++
|
||||
line = b.line(b.cursor.pos_y + 1)
|
||||
line = b.line(b.cursor.pos_y + 1).runes()
|
||||
x = 0
|
||||
}
|
||||
// first, move past all non-`a-zA-Z0-9_` characters
|
||||
for x + a >= 0 && x + a < line.len && !(line[x + a].is_letter()
|
||||
|| line[x + a].is_digit() || line[x + a] == `_`) {
|
||||
for x + a >= 0 && x + a < line.len && !(utf8.is_letter(line[x + a])
|
||||
|| line[x + a] in rune_digits || line[x + a] == `_`) {
|
||||
x += a
|
||||
}
|
||||
// then, move past all the letters and numbers
|
||||
for x + a >= 0 && x + a < line.len && (line[x + a].is_letter()
|
||||
|| line[x + a].is_digit() || line[x + a] == `_`) {
|
||||
for x + a >= 0 && x + a < line.len && (utf8.is_letter(line[x + a])
|
||||
|| line[x + a] in rune_digits || line[x + a] == `_`) {
|
||||
x += a
|
||||
}
|
||||
// if the cursor is out of bounds, move it to the next/previous line
|
||||
@@ -457,7 +516,7 @@ fn (a &App) view_height() int {
|
||||
fn (mut a App) magnet_cursor_x() {
|
||||
mut buffer := a.ed
|
||||
if buffer.cursor.pos_x < a.magnet_x {
|
||||
if a.magnet_x < buffer.cur_line().len {
|
||||
if a.magnet_x < buffer.cur_line().runes().len {
|
||||
move_x := a.magnet_x - buffer.cursor.pos_x
|
||||
buffer.move_cursor(move_x, .right)
|
||||
}
|
||||
@@ -478,7 +537,17 @@ fn frame(x voidptr) {
|
||||
view := ed.view(a.viewport, scroll_limit + a.viewport)
|
||||
a.tui.draw_text(0, 0, view.raw)
|
||||
a.footer()
|
||||
a.tui.set_cursor_position(view.cursor.pos_x + 1, ed.cursor.pos_y + 1 - a.viewport)
|
||||
|
||||
// Unicode: Handle correct mapping of cursor X position in terminal.
|
||||
mut ch_x := view.cursor.pos_x
|
||||
mut sl := ed.cur_slice().replace('\t', ' '.repeat(ed.tab_width))
|
||||
if sl.len > 0 {
|
||||
// Strip out any zero-width codepoints.
|
||||
sl = sl.runes().filter(it !in zero_width_unicode).string()
|
||||
ch_x = east_asian.display_width(sl, 1)
|
||||
}
|
||||
|
||||
a.tui.set_cursor_position(ch_x + 1, ed.cursor.pos_y + 1 - a.viewport)
|
||||
a.tui.flush()
|
||||
}
|
||||
|
||||
@@ -555,7 +624,8 @@ fn event(e &tui.Event, x voidptr) {
|
||||
return
|
||||
}
|
||||
}
|
||||
buffer.put(e.utf8.bytes().bytestr())
|
||||
|
||||
buffer.put(e.utf8)
|
||||
}
|
||||
}
|
||||
} else if e.typ == .mouse_scroll {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user