2020-10-10 23:45:40 +03:00
|
|
|
import gg
|
|
|
|
import gx
|
2020-10-10 19:36:48 +03:00
|
|
|
import sokol.audio
|
|
|
|
|
2021-02-21 18:05:03 +03:00
|
|
|
const credits = 'Based on the ByteBeat formula from: https://www.youtube.com/watch?v=V4GfkFbDojc \n "Techno" by Gabriel Miceli'
|
|
|
|
|
2020-10-10 23:45:40 +03:00
|
|
|
struct AppState {
|
2020-10-10 19:36:48 +03:00
|
|
|
mut:
|
2021-02-21 18:05:03 +03:00
|
|
|
gframe int // the current graphical frame
|
|
|
|
frame_0 int // offset of the current audio frames, relative to the start of the music
|
|
|
|
frames [2048]f32 // a copy of the last rendered audio frames
|
2020-10-10 23:45:40 +03:00
|
|
|
gg &gg.Context // used for drawing
|
2020-10-10 19:36:48 +03:00
|
|
|
}
|
|
|
|
|
2020-10-26 14:14:21 +03:00
|
|
|
fn my_audio_stream_callback(buffer &f32, num_frames int, num_channels int, mut acontext AppState) {
|
2021-05-07 15:58:48 +03:00
|
|
|
mut soundbuffer := unsafe { buffer }
|
2020-10-10 19:36:48 +03:00
|
|
|
for frame := 0; frame < num_frames; frame++ {
|
|
|
|
t := int(f32(acontext.frame_0 + frame) * 0.245)
|
|
|
|
// "Techno" by Gabriel Miceli
|
2021-02-21 18:05:03 +03:00
|
|
|
y := (t * (((t / 10 | 0) ^ ((t / 10 | 0) - 1280)) % 11) / 2 & 127) +
|
2020-10-10 19:36:48 +03:00
|
|
|
(t * (((t / 640 | 0) ^ ((t / 640 | 0) - 2)) % 13) / 2 & 127)
|
|
|
|
for ch := 0; ch < num_channels; ch++ {
|
|
|
|
idx := frame * num_channels + ch
|
|
|
|
unsafe {
|
2022-04-15 14:58:56 +03:00
|
|
|
a := f32(u8(y) - 127) / 255.0
|
2020-10-10 23:45:40 +03:00
|
|
|
soundbuffer[idx] = a
|
|
|
|
acontext.frames[idx & 2047] = a
|
2020-10-10 19:36:48 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
acontext.frame_0 += num_frames
|
|
|
|
}
|
|
|
|
|
|
|
|
fn main() {
|
2021-02-21 18:05:03 +03:00
|
|
|
println(credits)
|
2020-10-10 23:45:40 +03:00
|
|
|
mut state := &AppState{
|
|
|
|
gg: 0
|
|
|
|
}
|
2021-02-21 18:05:03 +03:00
|
|
|
audio.setup(
|
2020-10-10 19:36:48 +03:00
|
|
|
stream_userdata_cb: my_audio_stream_callback
|
2020-10-10 23:45:40 +03:00
|
|
|
user_data: state
|
2021-02-21 18:05:03 +03:00
|
|
|
)
|
|
|
|
state.gg = gg.new_context(
|
2020-10-10 23:45:40 +03:00
|
|
|
bg_color: gx.rgb(50, 50, 50)
|
|
|
|
width: 1024
|
|
|
|
height: 400
|
|
|
|
create_window: true
|
|
|
|
window_title: 'ByteBeat Music'
|
|
|
|
frame_fn: graphics_frame
|
|
|
|
user_data: state
|
2021-02-21 18:05:03 +03:00
|
|
|
)
|
2020-10-10 23:45:40 +03:00
|
|
|
state.gg.run()
|
2020-10-10 19:36:48 +03:00
|
|
|
audio.shutdown()
|
|
|
|
}
|
2020-10-10 23:45:40 +03:00
|
|
|
|
|
|
|
fn graphics_frame(mut state AppState) {
|
|
|
|
state.gframe++
|
|
|
|
state.gg.begin()
|
|
|
|
state.draw()
|
|
|
|
state.gg.end()
|
|
|
|
}
|
|
|
|
|
|
|
|
[inline]
|
2022-04-15 18:25:45 +03:00
|
|
|
fn (mut state AppState) bsample(idx int) u8 {
|
2022-04-15 14:58:56 +03:00
|
|
|
return u8(127 + state.frames[(state.gframe + idx) & 2047] * 128)
|
2020-10-10 23:45:40 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
fn (mut state AppState) draw() {
|
|
|
|
// first, reset and setup ortho projection
|
|
|
|
for x in 0 .. 1024 {
|
2020-10-26 14:14:21 +03:00
|
|
|
mut y := 100 * (state.frames[2 * x] + state.frames[2 * x + 1])
|
2020-10-10 23:45:40 +03:00
|
|
|
state.gg.draw_line(x, 200, x, 200 + y, gx.rgba(state.bsample(x), state.bsample(x + 300),
|
|
|
|
state.bsample(x + 700), 255))
|
|
|
|
}
|
|
|
|
}
|