mirror of
https://github.com/vlang/v.git
synced 2023-08-10 21:13:21 +03:00
vfmt: voidptr(0) => unsafe { nil } (p.1)
This commit is contained in:
parent
be9f8cc777
commit
caa0c2f153
@ -160,7 +160,7 @@ fn get_reg_sys_env_handle() ?voidptr {
|
||||
$if windows { // wrap for cross-compile compat
|
||||
// open the registry key
|
||||
reg_key_path := 'Environment'
|
||||
reg_env_key := voidptr(0) // or HKEY (HANDLE)
|
||||
reg_env_key := unsafe { nil } // or HKEY (HANDLE)
|
||||
if C.RegOpenKeyEx(os.hkey_current_user, reg_key_path.to_wide(), 0, 1 | 2, ®_env_key) != 0 {
|
||||
return error('Could not open "$reg_key_path" in the registry')
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ fn main() {
|
||||
pass_action.colors[0] = color_action
|
||||
state := &AppState{
|
||||
pass_action: pass_action
|
||||
font_context: voidptr(0) // &fontstash.Context(0)
|
||||
font_context: unsafe { nil } // &fontstash.Context(0)
|
||||
}
|
||||
title := 'V Metal/GL Text Rendering'
|
||||
desc := sapp.Desc{
|
||||
|
@ -76,7 +76,7 @@ fn main() {
|
||||
pass_action.colors[0] = color_action
|
||||
state := &AppState{
|
||||
pass_action: pass_action
|
||||
fons: voidptr(0) // &fontstash.Context(0)
|
||||
fons: unsafe { nil } // &fontstash.Context(0)
|
||||
}
|
||||
title := 'V Metal/GL Text Rendering'
|
||||
desc := sapp.Desc{
|
||||
|
@ -130,7 +130,7 @@ fn print_backtrace_skipping_top_frames_linux(skipframes int) bool {
|
||||
fn break_if_debugger_attached() {
|
||||
unsafe {
|
||||
mut ptr := &voidptr(0)
|
||||
*ptr = voidptr(0)
|
||||
*ptr = nil
|
||||
//_ = ptr
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,10 @@ pub fn ptr_str(ptr voidptr) string {
|
||||
return buf1
|
||||
}
|
||||
|
||||
// pub fn nil_str(x voidptr) string {
|
||||
// return 'nil'
|
||||
//}
|
||||
|
||||
// str returns string equivalent of x
|
||||
pub fn (x isize) str() string {
|
||||
return i64(x).str()
|
||||
|
@ -89,12 +89,12 @@ pub fn (mut list DoublyLinkedList<T>) pop_back() ?T {
|
||||
if list.len == 1 {
|
||||
// head == tail
|
||||
value := list.tail.data
|
||||
list.head = voidptr(0)
|
||||
list.tail = voidptr(0)
|
||||
list.head = unsafe { nil }
|
||||
list.tail = unsafe { nil }
|
||||
return value
|
||||
}
|
||||
value := list.tail.data
|
||||
list.tail.prev.next = voidptr(0) // unlink tail
|
||||
list.tail.prev.next = unsafe { nil } // unlink tail
|
||||
list.tail = list.tail.prev
|
||||
return value
|
||||
}
|
||||
@ -110,12 +110,12 @@ pub fn (mut list DoublyLinkedList<T>) pop_front() ?T {
|
||||
if list.len == 1 {
|
||||
// head == tail
|
||||
value := list.head.data
|
||||
list.head = voidptr(0)
|
||||
list.tail = voidptr(0)
|
||||
list.head = unsafe { nil }
|
||||
list.tail = unsafe { nil }
|
||||
return value
|
||||
}
|
||||
value := list.head.data
|
||||
list.head.next.prev = voidptr(0) // unlink head
|
||||
list.head.next.prev = unsafe { nil } // unlink head
|
||||
list.head = list.head.next
|
||||
return value
|
||||
}
|
||||
@ -261,15 +261,15 @@ pub fn (list DoublyLinkedList<T>) str() string {
|
||||
// next implements the iter interface to use DoublyLinkedList with
|
||||
// V's for loop syntax.
|
||||
pub fn (mut list DoublyLinkedList<T>) next() ?T {
|
||||
if list.iter == voidptr(0) {
|
||||
if list.iter == unsafe { nil } {
|
||||
// initialize new iter object
|
||||
list.iter = &DoublyListIter<T>{
|
||||
node: list.head
|
||||
}
|
||||
return list.next()
|
||||
}
|
||||
if list.iter.node == voidptr(0) {
|
||||
list.iter = voidptr(0)
|
||||
if list.iter.node == unsafe { nil } {
|
||||
list.iter = unsafe { nil }
|
||||
return none
|
||||
}
|
||||
defer {
|
||||
|
@ -229,8 +229,8 @@ mut:
|
||||
treebins [n_tree_bins]&TreeChunk
|
||||
dvsize usize
|
||||
topsize usize
|
||||
dv &Chunk = voidptr(0)
|
||||
top &Chunk = voidptr(0)
|
||||
dv &Chunk = unsafe { nil }
|
||||
top &Chunk = unsafe { nil }
|
||||
footprint usize
|
||||
max_footprint usize
|
||||
seg Segment
|
||||
@ -247,13 +247,13 @@ pub fn new(system_allocator Allocator) Dlmalloc {
|
||||
treebins: unsafe { [dlmalloc.n_tree_bins]&TreeChunk{} }
|
||||
dvsize: 0
|
||||
topsize: 0
|
||||
dv: voidptr(0)
|
||||
top: voidptr(0)
|
||||
dv: unsafe { nil }
|
||||
top: unsafe { nil }
|
||||
footprint: 0
|
||||
max_footprint: 0
|
||||
seg: Segment{voidptr(0), 0, voidptr(0), 0}
|
||||
seg: Segment{unsafe { nil }, 0, unsafe { nil }, 0}
|
||||
trim_check: 0
|
||||
least_addr: voidptr(0)
|
||||
least_addr: unsafe { nil }
|
||||
release_checks: 0
|
||||
system_allocator: system_allocator
|
||||
max_request: 4294901657
|
||||
@ -498,7 +498,7 @@ fn (mut dl Dlmalloc) unlink_large_chunk(chunk_ &TreeChunk) {
|
||||
unsafe {
|
||||
mut chunk := chunk_
|
||||
mut xp := &TreeChunk(chunk.parent)
|
||||
mut r := &TreeChunk(voidptr(0))
|
||||
mut r := &TreeChunk(unsafe { nil })
|
||||
if voidptr(chunk.next()) != voidptr(chunk) {
|
||||
mut f := chunk.prev()
|
||||
r = chunk.next()
|
||||
@ -523,7 +523,7 @@ fn (mut dl Dlmalloc) unlink_large_chunk(chunk_ &TreeChunk) {
|
||||
rp = cp
|
||||
}
|
||||
r = *rp
|
||||
*rp = &TreeChunk(voidptr(0))
|
||||
*rp = &TreeChunk(unsafe { nil })
|
||||
}
|
||||
}
|
||||
|
||||
@ -635,7 +635,7 @@ pub fn (mut dl Dlmalloc) free_(mem voidptr) {
|
||||
dl.top = p
|
||||
p.head = tsize | dlmalloc.pinuse
|
||||
if voidptr(p) == voidptr(dl.dv) {
|
||||
dl.dv = voidptr(0)
|
||||
dl.dv = unsafe { nil }
|
||||
dl.dvsize = 0
|
||||
}
|
||||
|
||||
@ -748,7 +748,7 @@ fn (mut dl Dlmalloc) release_unused_segments() usize {
|
||||
if !p.inuse() && chunk_top >= top {
|
||||
mut tp := &TreeChunk(p)
|
||||
if voidptr(p) == voidptr(dl.dv) {
|
||||
dl.dv = voidptr(0)
|
||||
dl.dv = unsafe { nil }
|
||||
dl.dvsize = 0
|
||||
} else {
|
||||
dl.unlink_large_chunk(tp)
|
||||
@ -843,8 +843,8 @@ fn (mut dl Dlmalloc) insert_large_chunk(chunk_ &TreeChunk, size usize) {
|
||||
mut h := dl.treebin_at(idx)
|
||||
|
||||
chunk.index = idx
|
||||
chunk.child[0] = voidptr(0)
|
||||
chunk.child[1] = voidptr(0)
|
||||
chunk.child[0] = unsafe { nil }
|
||||
chunk.child[1] = unsafe { nil }
|
||||
|
||||
mut chunkc := chunk.chunk()
|
||||
if !dl.treemap_is_marked(idx) {
|
||||
@ -879,7 +879,7 @@ fn (mut dl Dlmalloc) insert_large_chunk(chunk_ &TreeChunk, size usize) {
|
||||
tc.prev = chunkc
|
||||
chunkc.prev = f
|
||||
chunkc.next = tc
|
||||
chunk.parent = voidptr(0)
|
||||
chunk.parent = unsafe { nil }
|
||||
break
|
||||
}
|
||||
}
|
||||
@ -988,7 +988,7 @@ fn (mut dl Dlmalloc) malloc_real(size usize) voidptr {
|
||||
}
|
||||
}
|
||||
} else if size >= dl.max_request {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
} else {
|
||||
nb = pad_request(size)
|
||||
if dl.treemap != 0 {
|
||||
@ -1013,7 +1013,7 @@ fn (mut dl Dlmalloc) malloc_real(size usize) voidptr {
|
||||
} else {
|
||||
dvs := dl.dvsize
|
||||
dl.dvsize = 0
|
||||
dl.dv = voidptr(0)
|
||||
dl.dv = unsafe { nil }
|
||||
p.set_inuse_and_pinuse(dvs)
|
||||
}
|
||||
ret := p.to_mem()
|
||||
@ -1140,7 +1140,7 @@ fn (mut dl Dlmalloc) sys_alloc(size usize) voidptr {
|
||||
return ret
|
||||
}
|
||||
}
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
[unsafe]
|
||||
@ -1183,13 +1183,13 @@ fn (mut dl Dlmalloc) tmalloc_small(size usize) voidptr {
|
||||
[unsafe]
|
||||
fn (mut dl Dlmalloc) tmalloc_large(size usize) voidptr {
|
||||
unsafe {
|
||||
mut v := &TreeChunk(voidptr(0))
|
||||
mut v := &TreeChunk(unsafe { nil })
|
||||
mut rsize := ~size + 1
|
||||
idx := dl.compute_tree_index(size)
|
||||
mut t := *dl.treebin_at(idx)
|
||||
if !isnil(t) {
|
||||
mut sizebits := size << leftshift_for_tree_index(idx)
|
||||
mut rst := voidptr(0)
|
||||
mut rst := unsafe { nil }
|
||||
for {
|
||||
csize := t.chunk().size()
|
||||
if csize >= size && csize - size < rsize {
|
||||
@ -1232,7 +1232,7 @@ fn (mut dl Dlmalloc) tmalloc_large(size usize) voidptr {
|
||||
}
|
||||
|
||||
if isnil(v) || (dl.dvsize >= size && !(rsize < dl.dvsize - size)) {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
mut vc := v.chunk()
|
||||
@ -1362,7 +1362,7 @@ fn (mut dl Dlmalloc) segment_holding(ptr voidptr) &Segment {
|
||||
[unsafe]
|
||||
pub fn (mut dl Dlmalloc) realloc(oldmem voidptr, bytes usize) voidptr {
|
||||
if bytes >= dl.max_request {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
unsafe {
|
||||
nb := request_2_size(bytes)
|
||||
@ -1393,7 +1393,7 @@ pub fn (mut dl Dlmalloc) memalign(alignment_ usize, bytes usize) voidptr {
|
||||
}
|
||||
|
||||
if bytes >= max_request() - alignment {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
unsafe {
|
||||
nb := request_2_size(bytes)
|
||||
@ -1412,7 +1412,7 @@ pub fn (mut dl Dlmalloc) memalign(alignment_ usize, bytes usize) voidptr {
|
||||
// we've allocated enough total room so that this is always possible
|
||||
br_ := (usize(mem) + alignment - 1) & (~alignment + 1)
|
||||
br := chunk_from_mem(voidptr(br_))
|
||||
mut pos := voidptr(0)
|
||||
mut pos := unsafe { nil }
|
||||
if usize(br) - usize(p) > min_chunk_size() {
|
||||
pos = voidptr(br)
|
||||
} else {
|
||||
@ -1469,7 +1469,7 @@ fn (mut dl Dlmalloc) try_realloc_chunk(p_ &Chunk, nb usize, can_move bool) &Chun
|
||||
return p
|
||||
} else if voidptr(next) == voidptr(dl.top) {
|
||||
if oldsize + dl.topsize <= nb {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
newsize := oldsize + dl.topsize
|
||||
@ -1483,7 +1483,7 @@ fn (mut dl Dlmalloc) try_realloc_chunk(p_ &Chunk, nb usize, can_move bool) &Chun
|
||||
} else if voidptr(next) == voidptr(dl.dv) {
|
||||
dvs := dl.dvsize
|
||||
if oldsize + dvs < nb {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
dsize := oldsize + dvs - nb
|
||||
@ -1499,13 +1499,13 @@ fn (mut dl Dlmalloc) try_realloc_chunk(p_ &Chunk, nb usize, can_move bool) &Chun
|
||||
newsize := oldsize + dvs
|
||||
p.set_inuse(newsize)
|
||||
dl.dvsize = 0
|
||||
dl.dv = voidptr(0)
|
||||
dl.dv = unsafe { nil }
|
||||
}
|
||||
return p
|
||||
} else if !next.cinuse() {
|
||||
nextsize := next.size()
|
||||
if oldsize + nextsize < nb {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
rsize := oldsize + nextsize - nb
|
||||
dl.unlink_chunk(next, nextsize)
|
||||
@ -1520,7 +1520,7 @@ fn (mut dl Dlmalloc) try_realloc_chunk(p_ &Chunk, nb usize, can_move bool) &Chun
|
||||
}
|
||||
return p
|
||||
} else {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1530,7 +1530,7 @@ fn (mut dl Dlmalloc) mmap_resize(oldp_ &Chunk, nb usize, can_move bool) &Chunk {
|
||||
mut oldp := unsafe { oldp_ }
|
||||
oldsize := oldp.size()
|
||||
if is_small(nb) {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
// Keep the old chunk if it's big enough but not too big
|
||||
if oldsize >= nb + sizeof(usize) && (oldsize - nb) <= (default_granularity() << 1) {
|
||||
@ -1544,7 +1544,7 @@ fn (mut dl Dlmalloc) mmap_resize(oldp_ &Chunk, nb usize, can_move bool) &Chunk {
|
||||
ptr := dl.system_allocator.remap(dl.system_allocator.data, voidptr(usize(oldp) - offset),
|
||||
oldmmsize, newmmsize, can_move)
|
||||
if isnil(ptr) {
|
||||
return voidptr(0)
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
mut newp := &Chunk(voidptr(usize(ptr) + offset))
|
||||
@ -1604,7 +1604,7 @@ fn (mut dl Dlmalloc) dispose_chunk(p_ &Chunk, psize_ usize) {
|
||||
dl.top = p
|
||||
p.head = tsize | dlmalloc.pinuse
|
||||
if voidptr(p) == voidptr(dl.dv) {
|
||||
dl.dv = voidptr(0)
|
||||
dl.dv = unsafe { nil }
|
||||
dl.dvsize = 0
|
||||
}
|
||||
return
|
||||
|
@ -61,26 +61,26 @@ pub:
|
||||
borderless_window bool
|
||||
always_on_top bool
|
||||
bg_color gx.Color
|
||||
init_fn FNCb = voidptr(0)
|
||||
frame_fn FNCb = voidptr(0)
|
||||
native_frame_fn FNCb = voidptr(0)
|
||||
cleanup_fn FNCb = voidptr(0)
|
||||
fail_fn FNFail = voidptr(0)
|
||||
init_fn FNCb = unsafe { nil }
|
||||
frame_fn FNCb = unsafe { nil }
|
||||
native_frame_fn FNCb = unsafe { nil }
|
||||
cleanup_fn FNCb = unsafe { nil }
|
||||
fail_fn FNFail = unsafe { nil }
|
||||
//
|
||||
event_fn FNEvent = voidptr(0)
|
||||
quit_fn FNEvent = voidptr(0)
|
||||
event_fn FNEvent = unsafe { nil }
|
||||
quit_fn FNEvent = unsafe { nil }
|
||||
//
|
||||
keydown_fn FNKeyDown = voidptr(0)
|
||||
keyup_fn FNKeyUp = voidptr(0)
|
||||
char_fn FNChar = voidptr(0)
|
||||
keydown_fn FNKeyDown = unsafe { nil }
|
||||
keyup_fn FNKeyUp = unsafe { nil }
|
||||
char_fn FNChar = unsafe { nil }
|
||||
//
|
||||
move_fn FNMove = voidptr(0)
|
||||
click_fn FNClick = voidptr(0)
|
||||
unclick_fn FNUnClick = voidptr(0)
|
||||
leave_fn FNEvent = voidptr(0)
|
||||
enter_fn FNEvent = voidptr(0)
|
||||
resized_fn FNEvent = voidptr(0)
|
||||
scroll_fn FNEvent = voidptr(0)
|
||||
move_fn FNMove = unsafe { nil }
|
||||
click_fn FNClick = unsafe { nil }
|
||||
unclick_fn FNUnClick = unsafe { nil }
|
||||
leave_fn FNEvent = unsafe { nil }
|
||||
enter_fn FNEvent = unsafe { nil }
|
||||
resized_fn FNEvent = unsafe { nil }
|
||||
scroll_fn FNEvent = unsafe { nil }
|
||||
// wait_events bool // set this to true for UIs, to save power
|
||||
fullscreen bool
|
||||
scale f32 = 1.0
|
||||
@ -222,7 +222,7 @@ fn gg_init_sokol_window(user_data voidptr) {
|
||||
|
||||
ctx.timage_pip = sgl.make_pipeline(&pipdesc)
|
||||
//
|
||||
if ctx.config.init_fn != voidptr(0) {
|
||||
if ctx.config.init_fn != unsafe { nil } {
|
||||
$if android {
|
||||
// NOTE on Android sokol can emit resize events *before* the init function is
|
||||
// called (Android has to initialize a lot more through the Activity system to
|
||||
@ -233,7 +233,7 @@ fn gg_init_sokol_window(user_data voidptr) {
|
||||
if ctx.width != win_size.width || ctx.height != win_size.height {
|
||||
ctx.width = win_size.width
|
||||
ctx.height = win_size.height
|
||||
if ctx.config.resized_fn != voidptr(0) {
|
||||
if ctx.config.resized_fn != unsafe { nil } {
|
||||
e := Event{
|
||||
typ: .resized
|
||||
window_width: ctx.width
|
||||
@ -260,7 +260,7 @@ fn gg_init_sokol_window(user_data voidptr) {
|
||||
fn gg_frame_fn(user_data voidptr) {
|
||||
mut ctx := unsafe { &Context(user_data) }
|
||||
ctx.frame++
|
||||
if ctx.config.frame_fn == voidptr(0) {
|
||||
if ctx.config.frame_fn == unsafe { nil } {
|
||||
return
|
||||
}
|
||||
if ctx.native_rendering {
|
||||
@ -323,64 +323,64 @@ fn gg_event_fn(ce voidptr, user_data voidptr) {
|
||||
ctx.pressed_keys[key_idx] = next
|
||||
ctx.pressed_keys_edge[key_idx] = prev != next
|
||||
}
|
||||
if ctx.config.event_fn != voidptr(0) {
|
||||
if ctx.config.event_fn != unsafe { nil } {
|
||||
ctx.config.event_fn(e, ctx.config.user_data)
|
||||
}
|
||||
match e.typ {
|
||||
.mouse_move {
|
||||
if ctx.config.move_fn != voidptr(0) {
|
||||
if ctx.config.move_fn != unsafe { nil } {
|
||||
ctx.config.move_fn(e.mouse_x / ctx.scale, e.mouse_y / ctx.scale, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.mouse_down {
|
||||
if ctx.config.click_fn != voidptr(0) {
|
||||
if ctx.config.click_fn != unsafe { nil } {
|
||||
ctx.config.click_fn(e.mouse_x / ctx.scale, e.mouse_y / ctx.scale, e.mouse_button,
|
||||
ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.mouse_up {
|
||||
if ctx.config.unclick_fn != voidptr(0) {
|
||||
if ctx.config.unclick_fn != unsafe { nil } {
|
||||
ctx.config.unclick_fn(e.mouse_x / ctx.scale, e.mouse_y / ctx.scale, e.mouse_button,
|
||||
ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.mouse_leave {
|
||||
if ctx.config.leave_fn != voidptr(0) {
|
||||
if ctx.config.leave_fn != unsafe { nil } {
|
||||
ctx.config.leave_fn(e, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.mouse_enter {
|
||||
if ctx.config.enter_fn != voidptr(0) {
|
||||
if ctx.config.enter_fn != unsafe { nil } {
|
||||
ctx.config.enter_fn(e, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.mouse_scroll {
|
||||
if ctx.config.scroll_fn != voidptr(0) {
|
||||
if ctx.config.scroll_fn != unsafe { nil } {
|
||||
ctx.config.scroll_fn(e, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.key_down {
|
||||
if ctx.config.keydown_fn != voidptr(0) {
|
||||
if ctx.config.keydown_fn != unsafe { nil } {
|
||||
ctx.config.keydown_fn(e.key_code, Modifier(e.modifiers), ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.key_up {
|
||||
if ctx.config.keyup_fn != voidptr(0) {
|
||||
if ctx.config.keyup_fn != unsafe { nil } {
|
||||
ctx.config.keyup_fn(e.key_code, Modifier(e.modifiers), ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.char {
|
||||
if ctx.config.char_fn != voidptr(0) {
|
||||
if ctx.config.char_fn != unsafe { nil } {
|
||||
ctx.config.char_fn(e.char_code, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.resized {
|
||||
if ctx.config.resized_fn != voidptr(0) {
|
||||
if ctx.config.resized_fn != unsafe { nil } {
|
||||
ctx.config.resized_fn(e, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
.quit_requested {
|
||||
if ctx.config.quit_fn != voidptr(0) {
|
||||
if ctx.config.quit_fn != unsafe { nil } {
|
||||
ctx.config.quit_fn(e, ctx.config.user_data)
|
||||
}
|
||||
}
|
||||
@ -392,7 +392,7 @@ fn gg_event_fn(ce voidptr, user_data voidptr) {
|
||||
|
||||
fn gg_cleanup_fn(user_data voidptr) {
|
||||
mut ctx := unsafe { &Context(user_data) }
|
||||
if ctx.config.cleanup_fn != voidptr(0) {
|
||||
if ctx.config.cleanup_fn != unsafe { nil } {
|
||||
ctx.config.cleanup_fn(ctx.config.user_data)
|
||||
}
|
||||
gfx.shutdown()
|
||||
@ -401,7 +401,7 @@ fn gg_cleanup_fn(user_data voidptr) {
|
||||
fn gg_fail_fn(msg &char, user_data voidptr) {
|
||||
mut ctx := unsafe { &Context(user_data) }
|
||||
vmsg := unsafe { tos3(msg) }
|
||||
if ctx.config.fail_fn != voidptr(0) {
|
||||
if ctx.config.fail_fn != unsafe { nil } {
|
||||
ctx.config.fail_fn(vmsg, ctx.config.user_data)
|
||||
} else {
|
||||
eprintln('gg error: $vmsg')
|
||||
|
@ -124,10 +124,10 @@ pub fn (mut ch Channel) close() {
|
||||
if !C.atomic_compare_exchange_strong_u16(&ch.closed, &open_val, 1) {
|
||||
return
|
||||
}
|
||||
mut nulladr := voidptr(0)
|
||||
mut nulladr := unsafe { voidptr(0) }
|
||||
for !C.atomic_compare_exchange_weak_ptr(unsafe { &voidptr(&ch.adr_written) }, &nulladr,
|
||||
voidptr(-1)) {
|
||||
nulladr = voidptr(0)
|
||||
nulladr = unsafe { voidptr(0) }
|
||||
}
|
||||
ch.readsem_im.post()
|
||||
ch.readsem.post()
|
||||
@ -135,7 +135,7 @@ pub fn (mut ch Channel) close() {
|
||||
for !C.atomic_compare_exchange_weak_u16(&ch.read_sub_mtx, &null16, u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.read_subscriber != voidptr(0) {
|
||||
if ch.read_subscriber != unsafe { nil } {
|
||||
ch.read_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.read_sub_mtx, u16(0))
|
||||
@ -143,13 +143,13 @@ pub fn (mut ch Channel) close() {
|
||||
for !C.atomic_compare_exchange_weak_u16(&ch.write_sub_mtx, &null16, u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.write_subscriber != voidptr(0) {
|
||||
if ch.write_subscriber != unsafe { nil } {
|
||||
ch.write_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.write_sub_mtx, u16(0))
|
||||
ch.writesem.post()
|
||||
if ch.cap == 0 {
|
||||
C.atomic_store_ptr(unsafe { &voidptr(&ch.read_adr) }, voidptr(0))
|
||||
C.atomic_store_ptr(unsafe { &voidptr(&ch.read_adr) }, unsafe { nil })
|
||||
}
|
||||
ch.writesem_im.post()
|
||||
}
|
||||
@ -187,14 +187,14 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
mut wradr := C.atomic_load_ptr(unsafe { &voidptr(&ch.write_adr) })
|
||||
for wradr != C.NULL {
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.write_adr) },
|
||||
&wradr, voidptr(0))
|
||||
&wradr, unsafe { nil })
|
||||
{
|
||||
// there is a reader waiting for us
|
||||
unsafe { C.memcpy(wradr, src, ch.objsize) }
|
||||
mut nulladr := voidptr(0)
|
||||
mut nulladr := unsafe { voidptr(0) }
|
||||
for !C.atomic_compare_exchange_weak_ptr(unsafe { &voidptr(&ch.adr_written) },
|
||||
&nulladr, wradr) {
|
||||
nulladr = voidptr(0)
|
||||
nulladr = unsafe { voidptr(0) }
|
||||
}
|
||||
ch.readsem_im.post()
|
||||
return .success
|
||||
@ -228,7 +228,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
if wradr != C.NULL {
|
||||
mut src2 := src
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.read_adr) },
|
||||
&src2, voidptr(0))
|
||||
&src2, unsafe { nil })
|
||||
{
|
||||
ch.writesem.post()
|
||||
continue
|
||||
@ -242,7 +242,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.read_subscriber != voidptr(0) {
|
||||
if ch.read_subscriber != unsafe { nil } {
|
||||
ch.read_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.read_sub_mtx, u16(0))
|
||||
@ -250,7 +250,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
mut src2 := src
|
||||
for sp := u32(0); sp < spinloops_ || read_in_progress; sp++ {
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_read) },
|
||||
&src2, voidptr(0))
|
||||
&src2, unsafe { nil })
|
||||
{
|
||||
have_swapped = true
|
||||
read_in_progress = true
|
||||
@ -273,7 +273,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
}
|
||||
if C.atomic_load_u16(&ch.closed) != 0 {
|
||||
if have_swapped
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_read) }, &src2, voidptr(0)) {
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_read) }, &src2, unsafe { nil }) {
|
||||
ch.writesem.post()
|
||||
return .success
|
||||
} else {
|
||||
@ -281,7 +281,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
}
|
||||
}
|
||||
if have_swapped
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_read) }, &src2, voidptr(0)) {
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_read) }, &src2, unsafe { nil }) {
|
||||
ch.writesem.post()
|
||||
break
|
||||
} else {
|
||||
@ -340,7 +340,7 @@ fn (mut ch Channel) try_push_priv(src voidptr, no_block bool) ChanState {
|
||||
for !C.atomic_compare_exchange_weak_u16(&ch.read_sub_mtx, &null16, u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.read_subscriber != voidptr(0) {
|
||||
if ch.read_subscriber != unsafe { nil } {
|
||||
ch.read_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.read_sub_mtx, u16(0))
|
||||
@ -378,14 +378,14 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
mut rdadr := C.atomic_load_ptr(unsafe { &voidptr(&ch.read_adr) })
|
||||
for rdadr != C.NULL {
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.read_adr) },
|
||||
&rdadr, voidptr(0))
|
||||
&rdadr, unsafe { nil })
|
||||
{
|
||||
// there is a writer waiting for us
|
||||
unsafe { C.memcpy(dest, rdadr, ch.objsize) }
|
||||
mut nulladr := voidptr(0)
|
||||
mut nulladr := unsafe { voidptr(0) }
|
||||
for !C.atomic_compare_exchange_weak_ptr(unsafe { &voidptr(&ch.adr_read) },
|
||||
&nulladr, rdadr) {
|
||||
nulladr = voidptr(0)
|
||||
nulladr = unsafe { nil }
|
||||
}
|
||||
ch.writesem_im.post()
|
||||
return .success
|
||||
@ -461,7 +461,7 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
for !C.atomic_compare_exchange_weak_u16(&ch.write_sub_mtx, &null16, u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.write_subscriber != voidptr(0) {
|
||||
if ch.write_subscriber != unsafe { nil } {
|
||||
ch.write_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.write_sub_mtx, u16(0))
|
||||
@ -475,7 +475,7 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
if rdadr != C.NULL {
|
||||
mut dest2 := dest
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.write_adr) },
|
||||
&dest2, voidptr(0))
|
||||
&dest2, unsafe { nil })
|
||||
{
|
||||
ch.readsem.post()
|
||||
continue
|
||||
@ -489,7 +489,7 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
for !C.atomic_compare_exchange_weak_u16(&ch.write_sub_mtx, &null16, u16(1)) {
|
||||
null16 = u16(0)
|
||||
}
|
||||
if ch.write_subscriber != voidptr(0) {
|
||||
if ch.write_subscriber != unsafe { nil } {
|
||||
ch.write_subscriber.sem.post()
|
||||
}
|
||||
C.atomic_store_u16(&ch.write_sub_mtx, u16(0))
|
||||
@ -497,7 +497,7 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
mut dest2 := dest
|
||||
for sp := u32(0); sp < spinloops_ || write_in_progress; sp++ {
|
||||
if C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_written) },
|
||||
&dest2, voidptr(0))
|
||||
&dest2, unsafe { nil })
|
||||
{
|
||||
have_swapped = true
|
||||
break
|
||||
@ -521,7 +521,7 @@ fn (mut ch Channel) try_pop_priv(dest voidptr, no_block bool) ChanState {
|
||||
ch.readsem_im.wait()
|
||||
}
|
||||
if have_swapped
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_written) }, &dest2, voidptr(0)) {
|
||||
|| C.atomic_compare_exchange_strong_ptr(unsafe { &voidptr(&ch.adr_written) }, &dest2, unsafe { nil }) {
|
||||
ch.readsem.post()
|
||||
break
|
||||
} else {
|
||||
@ -566,7 +566,7 @@ pub fn channel_select(mut channels []&Channel, dir []Direction, mut objrefs []vo
|
||||
subscr[i].nxt = &Subscription(C.atomic_exchange_ptr(&voidptr(&ch.write_subscriber),
|
||||
&subscr[i]))
|
||||
}
|
||||
if voidptr(subscr[i].nxt) != voidptr(0) {
|
||||
if voidptr(subscr[i].nxt) != unsafe { nil } {
|
||||
subscr[i].nxt.prev = unsafe { &subscr[i].nxt }
|
||||
}
|
||||
C.atomic_store_u16(&ch.write_sub_mtx, u16(0))
|
||||
@ -580,7 +580,7 @@ pub fn channel_select(mut channels []&Channel, dir []Direction, mut objrefs []vo
|
||||
subscr[i].nxt = &Subscription(C.atomic_exchange_ptr(&voidptr(&ch.read_subscriber),
|
||||
&subscr[i]))
|
||||
}
|
||||
if voidptr(subscr[i].nxt) != voidptr(0) {
|
||||
if voidptr(subscr[i].nxt) != unsafe { nil } {
|
||||
subscr[i].nxt.prev = unsafe { &subscr[i].nxt }
|
||||
}
|
||||
C.atomic_store_u16(&ch.read_sub_mtx, u16(0))
|
||||
|
@ -173,7 +173,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
|
||||
if mut right is ast.Ident {
|
||||
if mut right.obj is ast.Var {
|
||||
mut obj := unsafe { &right.obj }
|
||||
if c.fn_scope != voidptr(0) {
|
||||
if c.fn_scope != unsafe { nil } {
|
||||
obj = c.fn_scope.find_var(right.obj.name) or { obj }
|
||||
}
|
||||
if obj.is_stack_obj && !c.inside_unsafe {
|
||||
@ -221,6 +221,7 @@ pub fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
|
||||
left.pos)
|
||||
}
|
||||
if right is ast.Nil {
|
||||
// `x := unsafe { nil }` is allowed
|
||||
c.error('use of untyped nil in assignment', right.pos())
|
||||
}
|
||||
}
|
||||
|
@ -457,7 +457,7 @@ pub fn (mut c Checker) struct_init(mut node ast.StructInit) ast.Type {
|
||||
if mut field.expr is ast.Ident {
|
||||
if mut field.expr.obj is ast.Var {
|
||||
mut obj := unsafe { &field.expr.obj }
|
||||
if c.fn_scope != voidptr(0) {
|
||||
if c.fn_scope != unsafe { nil } {
|
||||
obj = c.fn_scope.find_var(obj.name) or { obj }
|
||||
}
|
||||
if obj.is_stack_obj && !c.inside_unsafe {
|
||||
|
@ -46,8 +46,9 @@ pub mut:
|
||||
it_name string // the name to replace `it` with
|
||||
in_lambda_depth int
|
||||
inside_const bool
|
||||
inside_unsafe bool
|
||||
is_mbranch_expr bool // match a { x...y { } }
|
||||
fn_scope &ast.Scope = voidptr(0)
|
||||
fn_scope &ast.Scope = unsafe { nil }
|
||||
wsinfix_depth int
|
||||
}
|
||||
|
||||
@ -438,7 +439,13 @@ pub fn (mut f Fmt) stmt(node ast.Stmt) {
|
||||
f.assign_stmt(node)
|
||||
}
|
||||
ast.Block {
|
||||
f.block(node)
|
||||
if node.is_unsafe {
|
||||
f.inside_unsafe = true
|
||||
f.block(node)
|
||||
f.inside_unsafe = false
|
||||
} else {
|
||||
f.block(node)
|
||||
}
|
||||
}
|
||||
ast.BranchStmt {
|
||||
f.branch_stmt(node)
|
||||
@ -675,7 +682,9 @@ pub fn (mut f Fmt) expr(node_ ast.Expr) {
|
||||
f.type_of(node)
|
||||
}
|
||||
ast.UnsafeExpr {
|
||||
f.inside_unsafe = true
|
||||
f.unsafe_expr(node)
|
||||
f.inside_unsafe = false
|
||||
}
|
||||
ast.ComptimeType {
|
||||
match node.kind {
|
||||
@ -1755,7 +1764,21 @@ pub fn (mut f Fmt) call_args(args []ast.CallArg) {
|
||||
}
|
||||
|
||||
pub fn (mut f Fmt) cast_expr(node ast.CastExpr) {
|
||||
f.write(f.table.type_to_str_using_aliases(node.typ, f.mod2alias) + '(')
|
||||
typ := f.table.type_to_str_using_aliases(node.typ, f.mod2alias)
|
||||
if typ == 'voidptr' {
|
||||
// `voidptr(0)` => `nil`
|
||||
if node.expr is ast.IntegerLiteral {
|
||||
if node.expr.val == '0' {
|
||||
if f.inside_unsafe {
|
||||
f.write('nil')
|
||||
} else {
|
||||
f.write('unsafe { nil }')
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
f.write('${typ}(')
|
||||
f.mark_types_import_as_used(node.typ)
|
||||
f.expr(node.expr)
|
||||
if node.has_arg {
|
||||
|
@ -10,4 +10,5 @@ fn test_cast() {
|
||||
_ := f32(0)
|
||||
_ := Type(0)
|
||||
_ := ast.Expr(sum_expr())
|
||||
_ := nil
|
||||
}
|
||||
|
@ -10,5 +10,6 @@ fn test_cast() {
|
||||
_ := f32(0)
|
||||
_ := table.Type(0)
|
||||
_ := ast.Expr(sum_expr())
|
||||
_ := voidptr(0)
|
||||
}
|
||||
|
||||
|
@ -25,13 +25,13 @@ pub mut:
|
||||
reload_time_ms int // how much time the last reload took (compilation + loading)
|
||||
last_mod_ts i64 // a timestamp for when the original was last changed
|
||||
recheck_period_ms int = 100 // how often do you want to check for changes
|
||||
cb_recheck FNLiveReloadCB = voidptr(0) // executed periodically
|
||||
cb_compile_failed FNLiveReloadCB = voidptr(0) // executed when a reload compilation failed
|
||||
cb_before FNLiveReloadCB = voidptr(0) // executed before a reload try happens
|
||||
cb_after FNLiveReloadCB = voidptr(0) // executed after a reload try happened, even if failed
|
||||
cb_locked_before FNLiveReloadCB = voidptr(0) // executed before lib reload, in the mutex section
|
||||
cb_locked_after FNLiveReloadCB = voidptr(0) // executed after lib reload, in the mutex section
|
||||
user_ptr voidptr = voidptr(0) // you can set it to anything, then retrieve it in the cb_ fns
|
||||
cb_recheck FNLiveReloadCB = unsafe { nil } // executed periodically
|
||||
cb_compile_failed FNLiveReloadCB = unsafe { nil } // executed when a reload compilation failed
|
||||
cb_before FNLiveReloadCB = unsafe { nil } // executed before a reload try happens
|
||||
cb_after FNLiveReloadCB = unsafe { nil } // executed after a reload try happened, even if failed
|
||||
cb_locked_before FNLiveReloadCB = unsafe { nil } // executed before lib reload, in the mutex section
|
||||
cb_locked_after FNLiveReloadCB = unsafe { nil } // executed after lib reload, in the mutex section
|
||||
user_ptr voidptr = unsafe { nil } // you can set it to anything, then retrieve it in the cb_ fns
|
||||
}
|
||||
|
||||
// LiveReloadInfo.live_linkfn should be called by the reloader
|
||||
|
@ -94,14 +94,14 @@ fn load_lib(mut r live.LiveReloadInfo, new_lib_path string) {
|
||||
C.pthread_mutex_lock(r.live_fn_mutex)
|
||||
elog(r, 'live mutex locked')
|
||||
//
|
||||
if r.cb_locked_before != voidptr(0) {
|
||||
if r.cb_locked_before != unsafe { nil } {
|
||||
r.cb_locked_before(r)
|
||||
}
|
||||
//
|
||||
protected_load_lib(mut r, new_lib_path)
|
||||
//
|
||||
r.reloads_ok++
|
||||
if r.cb_locked_after != voidptr(0) {
|
||||
if r.cb_locked_after != unsafe { nil } {
|
||||
r.cb_locked_after(r)
|
||||
}
|
||||
//
|
||||
@ -132,7 +132,7 @@ fn reloader(mut r live.LiveReloadInfo) {
|
||||
// elog(r,'reloader, r: $r')
|
||||
mut last_ts := os.file_last_mod_unix(r.original)
|
||||
for {
|
||||
if r.cb_recheck != voidptr(0) {
|
||||
if r.cb_recheck != unsafe { nil } {
|
||||
r.cb_recheck(r)
|
||||
}
|
||||
now_ts := os.file_last_mod_unix(r.original)
|
||||
@ -140,19 +140,19 @@ fn reloader(mut r live.LiveReloadInfo) {
|
||||
r.reloads++
|
||||
last_ts = now_ts
|
||||
r.last_mod_ts = last_ts
|
||||
if r.cb_before != voidptr(0) {
|
||||
if r.cb_before != unsafe { nil } {
|
||||
r.cb_before(r)
|
||||
}
|
||||
compile_and_reload_shared_lib(mut r) or {
|
||||
if r.cb_compile_failed != voidptr(0) {
|
||||
if r.cb_compile_failed != unsafe { nil } {
|
||||
r.cb_compile_failed(r)
|
||||
}
|
||||
if r.cb_after != voidptr(0) {
|
||||
if r.cb_after != unsafe { nil } {
|
||||
r.cb_after(r)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if r.cb_after != voidptr(0) {
|
||||
if r.cb_after != unsafe { nil } {
|
||||
r.cb_after(r)
|
||||
}
|
||||
}
|
||||
|
@ -55,8 +55,8 @@ fn test_fn_assignment_array() {
|
||||
fn test_fn_variables_can_be_assigned_pointers() {
|
||||
mut fn_ptr := fn (_ voidptr, _ u64) {}
|
||||
// println(voidptr(fn_ptr))
|
||||
assert fn_ptr != voidptr(0)
|
||||
fn_ptr = voidptr(0)
|
||||
assert fn_ptr != unsafe { nil }
|
||||
fn_ptr = unsafe { nil }
|
||||
// aprintln(voidptr(fn_ptr))
|
||||
assert fn_ptr == voidptr(0)
|
||||
assert fn_ptr == unsafe { nil }
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user