mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
examples: fix typos (#18229)
This commit is contained in:
parent
caee3935a5
commit
993546a0a2
@ -76,7 +76,7 @@ fn (tree Tree[T]) min[T]() T {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete a value in BST (if nonexistant do nothing)
|
// delete a value in BST (if nonexistent do nothing)
|
||||||
fn (tree Tree[T]) delete[T](x T) Tree[T] {
|
fn (tree Tree[T]) delete[T](x T) Tree[T] {
|
||||||
return match tree {
|
return match tree {
|
||||||
Empty {
|
Empty {
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
/*
|
/*
|
||||||
A V program for Bellman-Ford's single source
|
A V program for Bellman-Ford's single source
|
||||||
shortest path algorithm.
|
shortest path algorithm.
|
||||||
literaly adapted from:
|
literally adapted from:
|
||||||
https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
|
https://www.geeksforgeeks.org/bellman-ford-algorithm-dp-23/
|
||||||
// Adapted from this site... from C++ and Python codes
|
// Adapted from this site... from C++ and Python codes
|
||||||
|
|
||||||
For Portugese reference
|
For Portuguese reference
|
||||||
http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.html
|
http://rascunhointeligente.blogspot.com/2010/10/o-algoritmo-de-bellman-ford-um.html
|
||||||
|
|
||||||
code by CCS
|
code by CCS
|
||||||
@ -24,7 +24,7 @@ mut:
|
|||||||
// building a map of with all edges etc of a graph, represented from a matrix adjacency
|
// building a map of with all edges etc of a graph, represented from a matrix adjacency
|
||||||
// Input: matrix adjacency --> Output: edges list of src, dest and weight
|
// Input: matrix adjacency --> Output: edges list of src, dest and weight
|
||||||
fn build_map_edges_from_graph[T](g [][]T) map[T]EDGE {
|
fn build_map_edges_from_graph[T](g [][]T) map[T]EDGE {
|
||||||
n := g.len // TOTAL OF NODES for this graph -- its dimmension
|
n := g.len // TOTAL OF NODES for this graph -- its dimensions
|
||||||
mut edges_map := map[int]EDGE{} // a graph represented by map of edges
|
mut edges_map := map[int]EDGE{} // a graph represented by map of edges
|
||||||
|
|
||||||
mut edge := 0 // a counter of edges
|
mut edge := 0 // a counter of edges
|
||||||
@ -61,8 +61,8 @@ fn bellman_ford[T](graph [][]T, src int) {
|
|||||||
// Step 1: Initialize distances from src to all other
|
// Step 1: Initialize distances from src to all other
|
||||||
// vertices as INFINITE
|
// vertices as INFINITE
|
||||||
n_vertex := graph.len // adjc matrix ... n nodes or vertex
|
n_vertex := graph.len // adjc matrix ... n nodes or vertex
|
||||||
mut dist := []int{len: n_vertex, init: large} // dist with -1 instead of INIFINITY
|
mut dist := []int{len: n_vertex, init: large} // dist with -1 instead of INFINITY
|
||||||
// mut path := []int{len: n , init:-1} // previous node of each shortest paht
|
// mut path := []int{len: n , init:-1} // previous node of each shortest path
|
||||||
dist[src] = 0
|
dist[src] = 0
|
||||||
|
|
||||||
// Step 2: Relax all edges |V| - 1 times. A simple
|
// Step 2: Relax all edges |V| - 1 times. A simple
|
||||||
@ -152,7 +152,7 @@ fn main() {
|
|||||||
// for index, g_value in [graph_01, graph_02, graph_03] {
|
// for index, g_value in [graph_01, graph_02, graph_03] {
|
||||||
for index, g_value in [graph_01, graph_02, graph_03] {
|
for index, g_value in [graph_01, graph_02, graph_03] {
|
||||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||||
// allways starting by node 0
|
// always starting by node 0
|
||||||
start_node := 0
|
start_node := 0
|
||||||
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: ${start_node})')
|
println('\n\n Graph ${index + 1} using Bellman-Ford algorithm (source node: ${start_node})')
|
||||||
bellman_ford(graph, start_node)
|
bellman_ford(graph, start_node)
|
||||||
|
@ -79,7 +79,7 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
|
|||||||
for i in array_of_nodes {
|
for i in array_of_nodes {
|
||||||
if current in graph[i] && visited[i] == true {
|
if current in graph[i] && visited[i] == true {
|
||||||
current = i
|
current = i
|
||||||
break // the first ocurrence is enough
|
break // the first occurrence is enough
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
path << current // update the path tracked
|
path << current // update the path tracked
|
||||||
|
@ -44,7 +44,7 @@ fn depth_first_search_path(graph map[string][]string, start string, target strin
|
|||||||
|
|
||||||
// check if this node is already visited
|
// check if this node is already visited
|
||||||
if visited[node] == false {
|
if visited[node] == false {
|
||||||
// if no ... test it searchin for a final node
|
// if no ... test it and search for a final node
|
||||||
visited[node] = true // means: node visited
|
visited[node] = true // means: node visited
|
||||||
if node == target {
|
if node == target {
|
||||||
path = build_path_reverse(graph, start, node, visited)
|
path = build_path_reverse(graph, start, node, visited)
|
||||||
@ -93,7 +93,7 @@ fn build_path_reverse(graph map[string][]string, start string, final string, vis
|
|||||||
for i in array_of_nodes {
|
for i in array_of_nodes {
|
||||||
if current in graph[i] && visited[i] == true {
|
if current in graph[i] && visited[i] == true {
|
||||||
current = i
|
current = i
|
||||||
break // the first ocurrence is enough
|
break // the first occurrence is enough
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
path << current // updating the path tracked
|
path << current // updating the path tracked
|
||||||
|
@ -22,7 +22,7 @@ $ ./an_executable.EXE
|
|||||||
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
|
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
|
||||||
pseudo code written in C
|
pseudo code written in C
|
||||||
This idea is quite different: it uses a priority queue to store the current
|
This idea is quite different: it uses a priority queue to store the current
|
||||||
shortest path evaluted
|
shortest path evaluated
|
||||||
The priority queue structure built using a list to simulate
|
The priority queue structure built using a list to simulate
|
||||||
the queue. A heap is not used in this case.
|
the queue. A heap is not used in this case.
|
||||||
*/
|
*/
|
||||||
@ -38,17 +38,17 @@ mut:
|
|||||||
// The "push" always sorted in pq
|
// The "push" always sorted in pq
|
||||||
fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
||||||
mut temp := []T{}
|
mut temp := []T{}
|
||||||
lenght_pq := prior_queue.len
|
pq_len := prior_queue.len
|
||||||
|
|
||||||
mut i := 0
|
mut i := 0
|
||||||
for i < lenght_pq && priority > prior_queue[i].priority {
|
for i < pq_len && priority > prior_queue[i].priority {
|
||||||
temp << prior_queue[i]
|
temp << prior_queue[i]
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
// INSERTING SORTED in the queue
|
// INSERTING SORTED in the queue
|
||||||
temp << NODE{data, priority} // do the copy in the right place
|
temp << NODE{data, priority} // do the copy in the right place
|
||||||
// copy the another part (tail) of original prior_queue
|
// copy the another part (tail) of original prior_queue
|
||||||
for i < lenght_pq {
|
for i < pq_len {
|
||||||
temp << prior_queue[i]
|
temp << prior_queue[i]
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
@ -59,16 +59,16 @@ fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
|||||||
// Change the priority of a value/node ... exist a value, change its priority
|
// Change the priority of a value/node ... exist a value, change its priority
|
||||||
fn updating_priority[T](mut prior_queue []T, search_data int, new_priority int) {
|
fn updating_priority[T](mut prior_queue []T, search_data int, new_priority int) {
|
||||||
mut i := 0
|
mut i := 0
|
||||||
mut lenght_pq := prior_queue.len
|
mut pq_len := prior_queue.len
|
||||||
|
|
||||||
for i < lenght_pq {
|
for i < pq_len {
|
||||||
if search_data == prior_queue[i].data {
|
if search_data == prior_queue[i].data {
|
||||||
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
|
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
// all the list was examined
|
// all the list was examined
|
||||||
if i >= lenght_pq {
|
if i >= pq_len {
|
||||||
print('\n This data ${search_data} does exist ... PRIORITY QUEUE problem\n')
|
print('\n This data ${search_data} does exist ... PRIORITY QUEUE problem\n')
|
||||||
exit(1) // panic(s string)
|
exit(1) // panic(s string)
|
||||||
}
|
}
|
||||||
@ -126,7 +126,7 @@ fn dijkstra(g [][]int, s int) {
|
|||||||
mut n := g.len
|
mut n := g.len
|
||||||
|
|
||||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
||||||
mut path := []int{len: n, init: -1} // previous node of each shortest paht
|
mut path := []int{len: n, init: -1} // previous node of each shortest path
|
||||||
|
|
||||||
// Distance of source vertex from itself is always 0
|
// Distance of source vertex from itself is always 0
|
||||||
dist[s] = 0
|
dist[s] = 0
|
||||||
@ -223,13 +223,13 @@ fn main() {
|
|||||||
[5, 15, 4, 0],
|
[5, 15, 4, 0],
|
||||||
]
|
]
|
||||||
|
|
||||||
// To find number of coluns
|
// To find number of columns
|
||||||
// mut cols := an_array[0].len
|
// mut cols := an_array[0].len
|
||||||
mut graph := [][]int{} // the graph: adjacency matrix
|
mut graph := [][]int{} // the graph: adjacency matrix
|
||||||
// for index, g_value in [graph_01, graph_02, graph_03] {
|
// for index, g_value in [graph_01, graph_02, graph_03] {
|
||||||
for index, g_value in [graph_01, graph_02, graph_03] {
|
for index, g_value in [graph_01, graph_02, graph_03] {
|
||||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||||
// allways starting by node 0
|
// always starting by node 0
|
||||||
start_node := 0
|
start_node := 0
|
||||||
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: ${start_node})')
|
println('\n\n Graph ${index + 1} using Dijkstra algorithm (source node: ${start_node})')
|
||||||
dijkstra(graph, start_node)
|
dijkstra(graph, start_node)
|
||||||
|
@ -16,7 +16,7 @@ $ ./an_executable.EXE
|
|||||||
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
|
Code based from : Data Structures and Algorithms Made Easy: Data Structures and Algorithmic Puzzles, Fifth Edition (English Edition)
|
||||||
pseudo code written in C
|
pseudo code written in C
|
||||||
This idea is quite different: it uses a priority queue to store the current
|
This idea is quite different: it uses a priority queue to store the current
|
||||||
shortest path evaluted
|
shortest path evaluated
|
||||||
The priority queue structure built using a list to simulate
|
The priority queue structure built using a list to simulate
|
||||||
the queue. A heap is not used in this case.
|
the queue. A heap is not used in this case.
|
||||||
*/
|
*/
|
||||||
@ -32,17 +32,17 @@ mut:
|
|||||||
// The "push" always sorted in pq
|
// The "push" always sorted in pq
|
||||||
fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
||||||
mut temp := []T{}
|
mut temp := []T{}
|
||||||
lenght_pq := prior_queue.len
|
pg_len := prior_queue.len
|
||||||
|
|
||||||
mut i := 0
|
mut i := 0
|
||||||
for i < lenght_pq && priority > prior_queue[i].priority {
|
for i < pg_len && priority > prior_queue[i].priority {
|
||||||
temp << prior_queue[i]
|
temp << prior_queue[i]
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
// INSERTING SORTED in the queue
|
// INSERTING SORTED in the queue
|
||||||
temp << NODE{data, priority} // do the copy in the right place
|
temp << NODE{data, priority} // do the copy in the right place
|
||||||
// copy the another part (tail) of original prior_queue
|
// copy the another part (tail) of original prior_queue
|
||||||
for i < lenght_pq {
|
for i < pg_len {
|
||||||
temp << prior_queue[i]
|
temp << prior_queue[i]
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
@ -52,16 +52,16 @@ fn push_pq[T](mut prior_queue []T, data int, priority int) {
|
|||||||
// Change the priority of a value/node ... exist a value, change its priority
|
// Change the priority of a value/node ... exist a value, change its priority
|
||||||
fn updating_priority[T](mut prior_queue []T, search_data int, new_priority int) {
|
fn updating_priority[T](mut prior_queue []T, search_data int, new_priority int) {
|
||||||
mut i := 0
|
mut i := 0
|
||||||
mut lenght_pq := prior_queue.len
|
mut pg_len := prior_queue.len
|
||||||
|
|
||||||
for i < lenght_pq {
|
for i < pg_len {
|
||||||
if search_data == prior_queue[i].data {
|
if search_data == prior_queue[i].data {
|
||||||
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
|
prior_queue[i] = NODE{search_data, new_priority} // do the copy in the right place
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
i++
|
i++
|
||||||
// all the list was examined
|
// all the list was examined
|
||||||
if i >= lenght_pq {
|
if i >= pg_len {
|
||||||
// print('\n Priority Queue: ${prior_queue}')
|
// print('\n Priority Queue: ${prior_queue}')
|
||||||
// print('\n These data ${search_data} and ${new_priority} do not exist ... PRIORITY QUEUE problem\n')
|
// print('\n These data ${search_data} and ${new_priority} do not exist ... PRIORITY QUEUE problem\n')
|
||||||
// if it does not find ... then push it
|
// if it does not find ... then push it
|
||||||
@ -118,7 +118,7 @@ fn prim_mst(g [][]int, s int) {
|
|||||||
mut n := g.len
|
mut n := g.len
|
||||||
|
|
||||||
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
mut dist := []int{len: n, init: -1} // dist with -1 instead of INIFINITY
|
||||||
mut path := []int{len: n, init: -1} // previous node of each shortest paht
|
mut path := []int{len: n, init: -1} // previous node of each shortest path
|
||||||
|
|
||||||
// Distance of source vertex from itself is always 0
|
// Distance of source vertex from itself is always 0
|
||||||
dist[s] = 0
|
dist[s] = 0
|
||||||
@ -216,7 +216,7 @@ fn main() {
|
|||||||
for index, g_value in [graph_01, graph_02, graph_03] {
|
for index, g_value in [graph_01, graph_02, graph_03] {
|
||||||
println('\n Minimal Spanning Tree of graph ${index + 1} using PRIM algorithm')
|
println('\n Minimal Spanning Tree of graph ${index + 1} using PRIM algorithm')
|
||||||
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
graph = g_value.clone() // graphs_sample[g].clone() // choice your SAMPLE
|
||||||
// starting by node x ... see the graphs dimmension
|
// starting by node x ... see the graphs dimensions
|
||||||
start_node := 0
|
start_node := 0
|
||||||
prim_mst(graph, start_node)
|
prim_mst(graph, start_node)
|
||||||
}
|
}
|
||||||
|
@ -11,9 +11,9 @@ fn topog_sort_greedy(graph map[string][]string) []string {
|
|||||||
mut top_order := []string{} // a vector with sequence of nodes visited
|
mut top_order := []string{} // a vector with sequence of nodes visited
|
||||||
mut count := 0
|
mut count := 0
|
||||||
/*
|
/*
|
||||||
IDEA ( a greedy algorythm ):
|
IDEA ( a greedy algorithm ):
|
||||||
|
|
||||||
1. choose allways the node with smallest input degree
|
1. choose always the node with smallest input degree
|
||||||
2. visit it
|
2. visit it
|
||||||
3. put it in the output vector
|
3. put it in the output vector
|
||||||
4. remove it from graph
|
4. remove it from graph
|
||||||
|
@ -197,7 +197,7 @@ fn gen_table_info(attribute_names []string, framework_platform map[string][]int)
|
|||||||
// qtd. of values in 10 % of arrays
|
// qtd. of values in 10 % of arrays
|
||||||
ten_perc := int(framework_platform[name].len / 10)
|
ten_perc := int(framework_platform[name].len / 10)
|
||||||
|
|
||||||
// get 10% highter
|
// get 10% higher
|
||||||
mut min_ten_array := framework_platform[name].clone()
|
mut min_ten_array := framework_platform[name].clone()
|
||||||
min_ten_array.sort()
|
min_ten_array.sort()
|
||||||
min_ten_array.trim(ten_perc)
|
min_ten_array.trim(ten_perc)
|
||||||
|
@ -161,7 +161,7 @@ fn (sp Sphere) intersect(r Ray) f64 {
|
|||||||
* 0) Cornell Box with 2 spheres
|
* 0) Cornell Box with 2 spheres
|
||||||
* 1) Sunset
|
* 1) Sunset
|
||||||
* 2) Psychedelic
|
* 2) Psychedelic
|
||||||
* The sphere fileds are: Sphere{radius, position, emission, color, material}
|
* The sphere fields are: Sphere{radius, position, emission, color, material}
|
||||||
******************************************************************************/
|
******************************************************************************/
|
||||||
const (
|
const (
|
||||||
cen = Vec{50, 40.8, -860} // used by scene 1
|
cen = Vec{50, 40.8, -860} // used by scene 1
|
||||||
@ -361,7 +361,7 @@ fn rand_f64() f64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
cache_len = 65536 // the 2*pi angle will be splitted in 65536 part
|
cache_len = 65536 // the 2*pi angle will be split in 2^16 parts
|
||||||
cache_mask = cache_len - 1 // mask to speed-up the module process
|
cache_mask = cache_len - 1 // mask to speed-up the module process
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ pub mut:
|
|||||||
valid bool
|
valid bool
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ImageWritter {
|
pub struct ImageWriter {
|
||||||
settings ImageSettings
|
settings ImageSettings
|
||||||
pub mut:
|
pub mut:
|
||||||
writer PPMWriter
|
writer PPMWriter
|
||||||
@ -17,19 +17,19 @@ pub mut:
|
|||||||
buffer []ValidColor
|
buffer []ValidColor
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_image_writer(mut writer PPMWriter, settings ImageSettings) &ImageWritter {
|
pub fn new_image_writer(mut writer PPMWriter, settings ImageSettings) &ImageWriter {
|
||||||
total_pixels := settings.width * settings.height
|
total_pixels := settings.width * settings.height
|
||||||
mut buffer := []ValidColor{len: total_pixels, init: ValidColor{
|
mut buffer := []ValidColor{len: total_pixels, init: ValidColor{
|
||||||
valid: false
|
valid: false
|
||||||
}}
|
}}
|
||||||
return &ImageWritter{
|
return &ImageWriter{
|
||||||
writer: writer
|
writer: writer
|
||||||
settings: settings
|
settings: settings
|
||||||
buffer: buffer
|
buffer: buffer
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn (mut iw ImageWritter) handle(result sim.SimResult) !int {
|
pub fn (mut iw ImageWriter) handle(result sim.SimResult) !int {
|
||||||
total_pixels := iw.settings.width * iw.settings.height
|
total_pixels := iw.settings.width * iw.settings.height
|
||||||
|
|
||||||
// find the closest magnet
|
// find the closest magnet
|
||||||
|
@ -21,7 +21,7 @@ pub fn (mut state SimState) satisfy_rope_constraint(params SimParams) {
|
|||||||
|
|
||||||
pub fn (mut state SimState) increment(delta_t f64, params SimParams) {
|
pub fn (mut state SimState) increment(delta_t f64, params SimParams) {
|
||||||
// 1. add up all forces
|
// 1. add up all forces
|
||||||
// 2. get an accelleration
|
// 2. get an acceleration
|
||||||
// 3. add to velocity
|
// 3. add to velocity
|
||||||
// 4. ensure rope constraint is satisfied
|
// 4. ensure rope constraint is satisfied
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ pub struct Vector3D {
|
|||||||
z f64
|
z f64
|
||||||
}
|
}
|
||||||
|
|
||||||
// vector creates a Vector3D passing x,y,z as parameteres
|
// vector creates a Vector3D passing x,y,z as parameters
|
||||||
pub fn vector(data Vector3D) Vector3D {
|
pub fn vector(data Vector3D) Vector3D {
|
||||||
return Vector3D{
|
return Vector3D{
|
||||||
...data
|
...data
|
||||||
|
@ -6,7 +6,7 @@ import pcre
|
|||||||
|
|
||||||
fn example() {
|
fn example() {
|
||||||
r := pcre.new_regex('Match everything after this: (.+)', 0) or {
|
r := pcre.new_regex('Match everything after this: (.+)', 0) or {
|
||||||
println('An error occured!')
|
println('An error occurred!')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,7 +49,7 @@ fn main() {
|
|||||||
regex := r'(\[[a-z\.\! ]*\]\( *\w*\:*\w* *\))*'
|
regex := r'(\[[a-z\.\! ]*\]\( *\w*\:*\w* *\))*'
|
||||||
|
|
||||||
r := pcre.new_regex(regex, 0) or {
|
r := pcre.new_regex(regex, 0) or {
|
||||||
println('An error occured!')
|
println('An error occurred!')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
*
|
*
|
||||||
* TODO:
|
* TODO:
|
||||||
* - add instancing
|
* - add instancing
|
||||||
* - add an exampel with shaders
|
* - add an example with shaders
|
||||||
**********************************************************************/
|
**********************************************************************/
|
||||||
import gg
|
import gg
|
||||||
import gx
|
import gx
|
||||||
@ -53,7 +53,7 @@ fn create_texture(w int, h int, buf &u8) gfx.Image {
|
|||||||
label: &u8(0)
|
label: &u8(0)
|
||||||
d3d11_texture: 0
|
d3d11_texture: 0
|
||||||
}
|
}
|
||||||
// commen if .dynamic is enabled
|
// comment, if .dynamic is enabled
|
||||||
img_desc.data.subimage[0][0] = gfx.Range{
|
img_desc.data.subimage[0][0] = gfx.Range{
|
||||||
ptr: buf
|
ptr: buf
|
||||||
size: usize(sz)
|
size: usize(sz)
|
||||||
|
@ -394,7 +394,7 @@ fn draw_cube_glsl(app App) {
|
|||||||
tr_matrix := m4.calc_tr_matrices(dw, dh, rot[0], rot[1], 2.0)
|
tr_matrix := m4.calc_tr_matrices(dw, dh, rot[0], rot[1], 2.0)
|
||||||
gfx.apply_viewport(ws.width / 2, 0, ws.width / 2, ws.height / 2, true)
|
gfx.apply_viewport(ws.width / 2, 0, ws.width / 2, ws.height / 2, true)
|
||||||
|
|
||||||
// apply the pipline and bindings
|
// apply the pipeline and bindings
|
||||||
gfx.apply_pipeline(app.cube_pip_glsl)
|
gfx.apply_pipeline(app.cube_pip_glsl)
|
||||||
gfx.apply_bindings(app.cube_bind)
|
gfx.apply_bindings(app.cube_bind)
|
||||||
|
|
||||||
|
@ -440,7 +440,7 @@ fn draw_cube_glsl_p(app App) {
|
|||||||
rot := [f32(app.mouse_y), f32(app.mouse_x)]
|
rot := [f32(app.mouse_y), f32(app.mouse_x)]
|
||||||
tr_matrix := calc_tr_matrices(dw, dh, rot[0], rot[1], 2.3)
|
tr_matrix := calc_tr_matrices(dw, dh, rot[0], rot[1], 2.3)
|
||||||
|
|
||||||
// apply the pipline and bindings
|
// apply the pipeline and bindings
|
||||||
gfx.apply_pipeline(app.pipe['puppy'])
|
gfx.apply_pipeline(app.pipe['puppy'])
|
||||||
gfx.apply_bindings(app.bind['puppy'])
|
gfx.apply_bindings(app.bind['puppy'])
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ pub fn (mut obj_part ObjPart) parse_obj_buffer(rows []string, single_material bo
|
|||||||
// println("Vertex line: $c")
|
// println("Vertex line: $c")
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
// parameteres uvw
|
// parameters uvw
|
||||||
`p` {
|
`p` {
|
||||||
obj_part.vp << parse_3f(row, i + 2)
|
obj_part.vp << parse_3f(row, i + 2)
|
||||||
// println("Vertex line: ${obj_part.vp.len}")
|
// println("Vertex line: ${obj_part.vp.len}")
|
||||||
@ -350,7 +350,7 @@ fn (mut obj_part ObjPart) load_materials() {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// trasparency
|
// transparency
|
||||||
`d` {
|
`d` {
|
||||||
if row[i + 1] == ` ` {
|
if row[i + 1] == ` ` {
|
||||||
value, _ := get_float(row, i + 2)
|
value, _ := get_float(row, i + 2)
|
||||||
@ -396,7 +396,7 @@ fn (mut obj_part ObjPart) load_materials() {
|
|||||||
// vertex data struct
|
// vertex data struct
|
||||||
pub struct Vertex_pnct {
|
pub struct Vertex_pnct {
|
||||||
pub mut:
|
pub mut:
|
||||||
x f32 // poistion
|
x f32 // position
|
||||||
y f32
|
y f32
|
||||||
z f32
|
z f32
|
||||||
nx f32 // normal
|
nx f32 // normal
|
||||||
|
@ -143,7 +143,7 @@ pub fn (mut obj_part ObjPart) create_pipeline(in_part []int, shader gfx.Shader,
|
|||||||
/******************************************************************************
|
/******************************************************************************
|
||||||
* Render functions
|
* Render functions
|
||||||
******************************************************************************/
|
******************************************************************************/
|
||||||
// agregate all the part by materials
|
// aggregate all the part by materials
|
||||||
pub fn (mut obj_part ObjPart) init_render_data(texture gfx.Image) {
|
pub fn (mut obj_part ObjPart) init_render_data(texture gfx.Image) {
|
||||||
// create shader
|
// create shader
|
||||||
// One shader for all the model
|
// One shader for all the model
|
||||||
@ -188,12 +188,12 @@ pub fn (mut obj_part ObjPart) init_render_data(texture gfx.Image) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn (obj_part ObjPart) bind_and_draw(rend_data_index int, in_data Shader_data) u32 {
|
pub fn (obj_part ObjPart) bind_and_draw(rend_data_index int, in_data Shader_data) u32 {
|
||||||
// apply the pipline and bindings
|
// apply the pipeline and bindings
|
||||||
mut part_render_data := obj_part.rend_data[rend_data_index]
|
mut part_render_data := obj_part.rend_data[rend_data_index]
|
||||||
|
|
||||||
// pass light position
|
// pass light position
|
||||||
mut tmp_fs_params := Tmp_fs_param{}
|
mut tmp_fs_params := Tmp_fs_param{}
|
||||||
tmp_fs_params.ligth = in_data.fs_data.ligth
|
tmp_fs_params.light = in_data.fs_data.light
|
||||||
|
|
||||||
if part_render_data.material in obj_part.mat_map {
|
if part_render_data.material in obj_part.mat_map {
|
||||||
mat_index := obj_part.mat_map[part_render_data.material]
|
mat_index := obj_part.mat_map[part_render_data.material]
|
||||||
@ -215,7 +215,7 @@ pub fn (obj_part ObjPart) bind_and_draw(rend_data_index int, in_data Shader_data
|
|||||||
if 'Ns' in mat.ns {
|
if 'Ns' in mat.ns {
|
||||||
tmp_fs_params.ks.e[3] = mat.ns['Ns'] / 1000.0
|
tmp_fs_params.ks.e[3] = mat.ns['Ns'] / 1000.0
|
||||||
} else {
|
} else {
|
||||||
// defautl value is 10
|
// default value is 10
|
||||||
tmp_fs_params.ks.e[3] = f32(10) / 1000.0
|
tmp_fs_params.ks.e[3] = f32(10) / 1000.0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -13,7 +13,7 @@ module obj
|
|||||||
import gg.m4
|
import gg.m4
|
||||||
import sokol.gfx
|
import sokol.gfx
|
||||||
|
|
||||||
// part struct mantain the fae indexes list
|
// part struct maintaining the face indices
|
||||||
pub struct Part {
|
pub struct Part {
|
||||||
pub mut:
|
pub mut:
|
||||||
faces [][][3]int // v n t index order, if -1 not available
|
faces [][][3]int // v n t index order, if -1 not available
|
||||||
@ -21,7 +21,7 @@ pub mut:
|
|||||||
material string
|
material string
|
||||||
}
|
}
|
||||||
|
|
||||||
// materias struct, all Ks and Ns are stored as maps of string
|
// material struct, all Ks and Ns are stored as maps of string
|
||||||
pub struct Material {
|
pub struct Material {
|
||||||
pub mut:
|
pub mut:
|
||||||
name string
|
name string
|
||||||
@ -50,14 +50,14 @@ pub mut:
|
|||||||
name string
|
name string
|
||||||
part []Part // parts of the ObjPart
|
part []Part // parts of the ObjPart
|
||||||
mat []Material // list of the materials of the ObjPart
|
mat []Material // list of the materials of the ObjPart
|
||||||
mat_map map[string]int // maping material name to its material index
|
mat_map map[string]int // mapping material name to its material index
|
||||||
texture map[string]gfx.Image // GPU loaded texture map
|
texture map[string]gfx.Image // GPU loaded texture map
|
||||||
material_file string // .mtl file name for the .obj
|
material_file string // .mtl file name for the .obj
|
||||||
|
|
||||||
rend_data []Render_data // render data used for the rendering
|
rend_data []Render_data // render data used for the rendering
|
||||||
|
|
||||||
t_m m4.Mat4 = m4.unit_m4() // transform matrix for this ObjPart
|
t_m m4.Mat4 = m4.unit_m4() // transform matrix for this ObjPart
|
||||||
// child []ObjPart // childs
|
// child []ObjPart
|
||||||
// stats
|
// stats
|
||||||
min m4.Vec4 // min 3d position in the ObjPart
|
min m4.Vec4 // min 3d position in the ObjPart
|
||||||
max m4.Vec4 // max 3d position in the ObjPart
|
max m4.Vec4 // max 3d position in the ObjPart
|
||||||
@ -83,7 +83,7 @@ pub mut:
|
|||||||
// data passed to the pixel shader
|
// data passed to the pixel shader
|
||||||
pub struct Tmp_fs_param {
|
pub struct Tmp_fs_param {
|
||||||
pub mut:
|
pub mut:
|
||||||
ligth m4.Vec4
|
light m4.Vec4
|
||||||
ka m4.Vec4 = m4.Vec4{
|
ka m4.Vec4 = m4.Vec4{
|
||||||
e: [f32(0.1), 0.0, 0.0, 1.0]!
|
e: [f32(0.1), 0.0, 0.0, 1.0]!
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ fn draw_model(app App, model_pos m4.Vec4) u32 {
|
|||||||
z_light := f32(math.sin(time_ticks) * radius_light)
|
z_light := f32(math.sin(time_ticks) * radius_light)
|
||||||
|
|
||||||
mut tmp_fs_params := obj.Tmp_fs_param{}
|
mut tmp_fs_params := obj.Tmp_fs_param{}
|
||||||
tmp_fs_params.ligth = m4.vec3(x_light, radius_light, z_light)
|
tmp_fs_params.light = m4.vec3(x_light, radius_light, z_light)
|
||||||
|
|
||||||
sd := obj.Shader_data{
|
sd := obj.Shader_data{
|
||||||
vs_data: unsafe { &tmp_vs_param }
|
vs_data: unsafe { &tmp_vs_param }
|
||||||
@ -166,7 +166,7 @@ fn frame(mut app App) {
|
|||||||
// render the data
|
// render the data
|
||||||
draw_start_glsl(app)
|
draw_start_glsl(app)
|
||||||
draw_model(app, m4.Vec4{})
|
draw_model(app, m4.Vec4{})
|
||||||
// uncoment if you want a raw benchmark mode
|
// uncomment if you want a raw benchmark mode
|
||||||
/*
|
/*
|
||||||
mut n_vertex_drawn := u32(0)
|
mut n_vertex_drawn := u32(0)
|
||||||
n_x_obj := 20
|
n_x_obj := 20
|
||||||
|
@ -125,7 +125,7 @@ fn init(user_data voidptr) {
|
|||||||
// in vec4 position;
|
// in vec4 position;
|
||||||
// in vec4 color0;
|
// in vec4 color0;
|
||||||
// ```
|
// ```
|
||||||
// Also note the naming of the C.ATTR_* used as indicies.
|
// Also note the naming of the C.ATTR_* used as indices.
|
||||||
// They are the prefixed versions of the names of the input variables in the shader code.
|
// They are the prefixed versions of the names of the input variables in the shader code.
|
||||||
// If they change in the shader code they will also change here.
|
// If they change in the shader code they will also change here.
|
||||||
pipeline_desc.layout.attrs[C.ATTR_vs_position].format = .float3 // x,y,z as f32
|
pipeline_desc.layout.attrs[C.ATTR_vs_position].format = .float3 // x,y,z as f32
|
||||||
|
@ -131,7 +131,7 @@ fn read_wav_file_samples(fpath string) ![]f32 {
|
|||||||
return error('WAV should have `WAVE` form type')
|
return error('WAV should have `WAVE` form type')
|
||||||
}
|
}
|
||||||
if rh.file_size + 8 != bytes.len {
|
if rh.file_size + 8 != bytes.len {
|
||||||
return error('WAV should have valid lenght')
|
return error('WAV should have valid length')
|
||||||
}
|
}
|
||||||
offset += sizeof(RIFFHeader)
|
offset += sizeof(RIFFHeader)
|
||||||
mut rf := &RIFFFormat(0)
|
mut rf := &RIFFFormat(0)
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
"www.threefold.io",
|
"www.threefold.io",
|
||||||
"www.threefold.me"
|
"www.threefold.me"
|
||||||
],
|
],
|
||||||
"descr": "is our entry point for everyone, redirect to the detailed websites underneith."
|
"descr": "is our entry point for everyone, redirect to the detailed websites underneath."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "www_threefold_cloud",
|
"name": "www_threefold_cloud",
|
||||||
@ -139,7 +139,7 @@
|
|||||||
"sdk.threefold.io",
|
"sdk.threefold.io",
|
||||||
"sdk_info.threefold.io"
|
"sdk_info.threefold.io"
|
||||||
],
|
],
|
||||||
"descr": "for IAC, devops, how to do Infrastruture As Code, 3bot, Ansible, tfgrid-sdk, ..."
|
"descr": "for IAC, devops, how to do Infrastructure As Code, 3bot, Ansible, tfgrid-sdk, ..."
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "info_legal",
|
"name": "info_legal",
|
||||||
|
@ -33,7 +33,7 @@ fn data_get() []SiteConfig {
|
|||||||
alias: 'tf'
|
alias: 'tf'
|
||||||
path_code: '/Users/despiegk/codewww/github/threefoldfoundation/www_threefold_io'
|
path_code: '/Users/despiegk/codewww/github/threefoldfoundation/www_threefold_io'
|
||||||
domains: ['www.threefold.io', 'www.threefold.me']
|
domains: ['www.threefold.io', 'www.threefold.me']
|
||||||
descr: 'is our entry point for everyone, redirect to the detailed websites underneith.'
|
descr: 'is our entry point for everyone, redirect to the detailed websites underneath.'
|
||||||
},
|
},
|
||||||
SiteConfig{
|
SiteConfig{
|
||||||
name: 'www_threefold_cloud'
|
name: 'www_threefold_cloud'
|
||||||
@ -134,7 +134,7 @@ fn data_get() []SiteConfig {
|
|||||||
alias: 'sdk'
|
alias: 'sdk'
|
||||||
path_code: '/Users/despiegk/codewww/github/threefoldfoundation/info_sdk'
|
path_code: '/Users/despiegk/codewww/github/threefoldfoundation/info_sdk'
|
||||||
domains: ['sdk.threefold.io', 'sdk_info.threefold.io']
|
domains: ['sdk.threefold.io', 'sdk_info.threefold.io']
|
||||||
descr: 'for IAC, devops, how to do Infrastruture As Code, 3bot, Ansible, tfgrid-sdk, ...'
|
descr: 'for IAC, devops, how to do Infrastructure As Code, 3bot, Ansible, tfgrid-sdk, ...'
|
||||||
},
|
},
|
||||||
SiteConfig{
|
SiteConfig{
|
||||||
name: 'info_legal'
|
name: 'info_legal'
|
||||||
|
@ -67,7 +67,7 @@ fn draw_frame(mut app App_data) {
|
|||||||
txt1.draw_text_bmp(app.gg, 30, 60)
|
txt1.draw_text_bmp(app.gg, 30, 60)
|
||||||
// block test
|
// block test
|
||||||
block_txt := "Today it is a good day!
|
block_txt := "Today it is a good day!
|
||||||
Tommorow I'm not so sure :(
|
Tomorrow I'm not so sure :(
|
||||||
Frame: ${app.frame_c}
|
Frame: ${app.frame_c}
|
||||||
But Vwill prevail for sure, V is the way!!
|
But Vwill prevail for sure, V is the way!!
|
||||||
òàèì@ò!£$%&
|
òàèì@ò!£$%&
|
||||||
|
@ -22,7 +22,7 @@ import strings
|
|||||||
// Help text
|
// Help text
|
||||||
const (
|
const (
|
||||||
help_text_rows = [
|
help_text_rows = [
|
||||||
'Image Viwer 0.9 help.',
|
'Image Viewer 0.9 help.',
|
||||||
'',
|
'',
|
||||||
'ESC/q - Quit',
|
'ESC/q - Quit',
|
||||||
'cur. right - Next image',
|
'cur. right - Next image',
|
||||||
@ -35,7 +35,7 @@ const (
|
|||||||
'',
|
'',
|
||||||
'mouse wheel - next/previous images',
|
'mouse wheel - next/previous images',
|
||||||
'keep pressed left Mouse button - Pan on the image',
|
'keep pressed left Mouse button - Pan on the image',
|
||||||
'keep pressed rigth Mouse button - Zoom on the image',
|
'keep pressed right Mouse button - Zoom on the image',
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -94,7 +94,7 @@ mut:
|
|||||||
show_help_flag bool
|
show_help_flag bool
|
||||||
// zip container
|
// zip container
|
||||||
zip &szip.Zip = unsafe { nil } // pointer to the szip structure
|
zip &szip.Zip = unsafe { nil } // pointer to the szip structure
|
||||||
zip_index int = -1 // index of the zip contaire item
|
zip_index int = -1 // index of the zip container item
|
||||||
// memory buffer
|
// memory buffer
|
||||||
mem_buf voidptr // buffer used to load items from files/containers
|
mem_buf voidptr // buffer used to load items from files/containers
|
||||||
mem_buf_size int // size of the buffer
|
mem_buf_size int // size of the buffer
|
||||||
@ -392,7 +392,7 @@ fn frame(mut app App) {
|
|||||||
sgl.translate(tr_x, tr_y, 0.0)
|
sgl.translate(tr_x, tr_y, 0.0)
|
||||||
// scaling/zoom
|
// scaling/zoom
|
||||||
sgl.scale(2.0 * app.scale, 2.0 * app.scale, 0.0)
|
sgl.scale(2.0 * app.scale, 2.0 * app.scale, 0.0)
|
||||||
// roation
|
// rotation
|
||||||
mut rotation := 0
|
mut rotation := 0
|
||||||
if app.state == .show && app.item_list.n_item > 0 {
|
if app.state == .show && app.item_list.n_item > 0 {
|
||||||
rotation = app.item_list.lst[app.item_list.item_index].rotation
|
rotation = app.item_list.lst[app.item_list.item_index].rotation
|
||||||
@ -767,7 +767,7 @@ fn main() {
|
|||||||
font_path := os.join_path(os.temp_dir(), font_name)
|
font_path := os.join_path(os.temp_dir(), font_name)
|
||||||
println('Temporary path for the font file: [${font_path}]')
|
println('Temporary path for the font file: [${font_path}]')
|
||||||
|
|
||||||
// if the font doesn't exist create it from the ebedded one
|
// if the font doesn't exist create it from the embedded one
|
||||||
if os.exists(font_path) == false {
|
if os.exists(font_path) == false {
|
||||||
println('Write font [${font_name}] in temp folder.')
|
println('Write font [${font_name}] in temp folder.')
|
||||||
embedded_file := $embed_file('../assets/fonts/RobotoMono-Regular.ttf')
|
embedded_file := $embed_file('../assets/fonts/RobotoMono-Regular.ttf')
|
||||||
@ -781,7 +781,7 @@ fn main() {
|
|||||||
logo_name := 'logo.png'
|
logo_name := 'logo.png'
|
||||||
logo_path := os.join_path(os.temp_dir(), logo_name)
|
logo_path := os.join_path(os.temp_dir(), logo_name)
|
||||||
println('Temporary path for the logo: [${logo_path}]')
|
println('Temporary path for the logo: [${logo_path}]')
|
||||||
// if the logo doesn't exist create it from the ebedded one
|
// if the logo doesn't exist create it from the embedded one
|
||||||
if os.exists(logo_path) == false {
|
if os.exists(logo_path) == false {
|
||||||
println('Write logo [${logo_name}] in temp folder.')
|
println('Write logo [${logo_name}] in temp folder.')
|
||||||
embedded_file := $embed_file('../assets/logo.png')
|
embedded_file := $embed_file('../assets/logo.png')
|
||||||
|
@ -104,6 +104,6 @@ fn middleware_early(mut ctx vweb.Context) bool {
|
|||||||
println('4')
|
println('4')
|
||||||
ctx.text(':(')
|
ctx.text(':(')
|
||||||
|
|
||||||
// returns false, so the middleware propogation is stopped and the user will see the text ":("
|
// returns false, so the middleware propagation is stopped and the user will see the text ":("
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -31,17 +31,17 @@ fn main() {
|
|||||||
|
|
||||||
// Creates an array of words from a given string
|
// Creates an array of words from a given string
|
||||||
fn extract_words(contents string) []string {
|
fn extract_words(contents string) []string {
|
||||||
mut splitted := []string{}
|
mut splits := []string{}
|
||||||
for space_splitted in contents.to_lower().split(' ') {
|
for space_split in contents.to_lower().split(' ') {
|
||||||
if space_splitted.contains('\n') {
|
if space_split.contains('\n') {
|
||||||
splitted << space_splitted.split('\n')
|
splits << space_split.split('\n')
|
||||||
} else {
|
} else {
|
||||||
splitted << space_splitted
|
splits << space_split
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
mut results := []string{}
|
mut results := []string{}
|
||||||
for s in splitted {
|
for s in splits {
|
||||||
result := filter_word(s)
|
result := filter_word(s)
|
||||||
if result == '' {
|
if result == '' {
|
||||||
continue
|
continue
|
||||||
|
Loading…
x
Reference in New Issue
Block a user